bool MultipleAnalogSensorsClient::open(yarp::os::Searchable& config) { if (!config.check("remote")) { yError("MultipleAnalogSensorsClient: missing name parameter, exiting."); return false; } if (!config.check("local")) { yError("MultipleAnalogSensorsClient: missing local parameter, exiting."); return false; } if (config.check("timeout") && !(config.find("timeout").isFloat64())) { yError("MultipleAnalogSensorsClient: timeout parameter is present but is not double, exiting."); return false; } std::string remote = config.find("remote").asString(); std::string local = config.find("local").asString(); // Optional timeout parameter m_streamingPort.timeoutInSeconds = config.check("timeout", yarp::os::Value(0.01), "Timeout parameter").asFloat64(); m_localRPCPortName = local + "/rpc:i"; m_localStreamingPortName = local + "/measures:i"; m_remoteRPCPortName = remote + "/rpc:o"; m_remoteStreamingPortName = remote + "/measures:o"; // TODO(traversaro) : as soon as the method for checking port names validity // are available in YARP ( https://github.com/robotology/yarp/pull/1508 ) add some checks // Open ports bool ok = m_rpcPort.open(m_localRPCPortName); if (!ok) { yError("MultipleAnalogSensorsClient: Failure to open the port %s.", m_localRPCPortName.c_str()); close(); return false; } ok = m_streamingPort.open(m_localStreamingPortName); m_streamingPort.useCallback(); if (!ok) { yError("MultipleAnalogSensorsClient: Failure to open the port %s.", m_localStreamingPortName.c_str()); close(); return false; } // Connect ports ok = yarp::os::Network::connect(m_localRPCPortName, m_remoteRPCPortName); if (!ok) { yError("MultipleAnalogSensorsClient: Failure connecting port %s to %s.", m_localRPCPortName.c_str(), m_remoteRPCPortName.c_str()); yError("MultipleAnalogSensorsClient: Check that the specified MultipleAnalogSensorsServer is up."); close(); return false; } m_RPCConnectionActive = true; ok = yarp::os::Network::connect(m_remoteStreamingPortName, m_localStreamingPortName); if (!ok) { yError("MultipleAnalogSensorsClient: Failure connecting port %s to %s.", m_remoteStreamingPortName.c_str(), m_localStreamingPortName.c_str()); yError("MultipleAnalogSensorsClient: Check that the specified MultipleAnalogSensorsServer is up."); close(); return false; } m_StreamingConnectionActive = true; // Once the connection is active, we just the metadata only once ok = m_RPCInterface.yarp().attachAsClient(m_rpcPort); if (!ok) { yError("MultipleAnalogSensorsClient: Failure opening Thrift-based RPC interface."); return false; } // TODO(traversaro): there is a limitation on the thrift-generated // YARP structures related to how to get connection errors during the call // If this is ever solved at YARP level, we should properly handle errors // here m_sensorsMetadata = m_RPCInterface.getMetadata(); return true; }
bool doubleTouchThread::selectTask() { switch(cntctSkinPart) { case SKIN_LEFT_HAND: curTaskType = "LHtoR"; break; case SKIN_LEFT_FOREARM: curTaskType = "LtoR"; break; case SKIN_RIGHT_HAND: curTaskType = "RHtoL"; break; case SKIN_RIGHT_FOREARM: curTaskType = "RtoL"; break; } slv = new doubleTouch_Solver(curTaskType); gue = new doubleTouch_Variables(slv->probl->getNVars()); // guess sol = new doubleTouch_Variables(slv->probl->getNVars()); // solution solution.resize(slv->probl->getNVars(),0.0); nDOF = solution.size(); if (curTaskType == "LtoR" || curTaskType == "RtoL") { gue->joints[1] = -armPossHome[3]; gue->joints[3] = -armPossHome[1]; gue->joints[4] = -armPossHome[0]; gue->joints[5] = -armPossHome[0]; gue->joints[6] = armPossHome[1]; gue->joints[8] = armPossHome[3]; } else if (curTaskType == "LHtoR" || curTaskType == "RHtoL") { gue->joints[1+2] = -armPossHome[3]; gue->joints[3+2] = -armPossHome[1]; gue->joints[4+2] = -armPossHome[0]; gue->joints[5+2] = armPossHome[0]; gue->joints[6+2] = armPossHome[1]; gue->joints[8+2] = armPossHome[3]; } sol->clone(*gue); slv->probl->limb.setAng(gue->joints); testLimb = new iCubCustomLimb(curTaskType); if (curTaskType=="LHtoR" || curTaskType=="LtoR") { iencsM = iencsR; imodeM = imodeR; iposM = iposR; encsM = encsR; ilimM = ilimR; jntsM = jntsR; armM = armR; iencsS = iencsL; imodeS = imodeL; iposS = iposL; encsS = encsL; ilimS = ilimL; jntsS = jntsL; armS = armL; } else if (curTaskType=="RHtoL" || curTaskType=="RtoL") { iencsM = iencsL; imodeM = imodeL; iposM = iposL; encsM = encsL; ilimM = ilimL; jntsM = jntsL; armM = armL; iencsS = iencsR; imodeS = imodeR; iposS = iposR; encsS = encsR; ilimS = ilimR; jntsS = jntsR; armS = armR; } else { yError("[doubleTouch] current task type is none of the admissible values!"); return false; } if (!alignJointsBounds()) { yError("[doubleTouch] alignJointsBounds failed!!!\n"); return false; } Vector joints; iencsM->getEncoders(encsM->data()); slv->probl->index.getChainJoints(*encsM,joints); Matrix HIndex=slv->probl->index.getH(joints*iCub::ctrl::CTRL_DEG2RAD); slv->probl->limb.setHN(HIndex); testLimb->setHN(HIndex); printMessage(1,"Index type: %s \t HIndex:\n%s\n", slv->probl->index.getType().c_str(), HIndex.toString(3,3).c_str()); return true; }
bool AgentDetector::configure(ResourceFinder &rf) { period = rf.check("period",Value(0.03)).asDouble(); int verbosity=rf.check("verbosity",Value(0)).asInt(); string name=rf.check("name",Value("agentDetector")).asString().c_str(); useFaceRecognition = rf.check("useFaceRecognition"); handleMultiplePlayers = rf.check("multiplePlayers"); isMounted = !rf.check("isFixed"); string show=rf.check("showImages",Value("false")).asString().c_str(); showMode=rf.check("showMode",Value("rgb+depth+skeleton+players")).asString().c_str(); dThresholdDisparition = rf.check("dThresholdDisparition",Value("3.0")).asDouble(); // initialise timing in case of misrecognition dTimingLastApparition = clock(); //Open the OPC Client partner_default_name=rf.check("partner_default_name",Value("partner")).asString().c_str(); string opcName=rf.check("opc",Value("OPC")).asString().c_str(); opc = new OPCClient(name); dSince = 0.0; while (!opc->connect(opcName)) { cout<<"Waiting connection to OPC..."<<endl; Time::delay(1.0); } opc->checkout(); list<shared_ptr<Entity>> entityList = opc->EntitiesCacheCopy(); for(auto e : entityList) { if(e->entity_type() == "agent" && e->name() != "icub") { partner_default_name = e->name(); } } partner = opc->addOrRetrieveEntity<Agent>(partner_default_name); partner->m_present = 0.0; opc->commit(partner); //Retrieve the calibration matrix from RFH string rfhName=rf.check("rfh",Value("referenceFrameHandler")).asString().c_str(); string rfhLocal = "/"; rfhLocal+=name; rfhLocal+="/rfh:o"; rfh.open(rfhLocal.c_str()); string rfhRemote = "/"; rfhRemote += rfhName ; rfhRemote += "/rpc"; while (!Network::connect(rfhLocal.c_str(),rfhRemote.c_str())) { cout<<"Waiting connection to RFH..."<<endl; Time::delay(1.0); } isCalibrated=false; if(!checkCalibration()) yWarning() << " ========================= KINECT NEED TO BE CALIBRATED ============================" ; string clientName = name; clientName += "/kinect"; outputSkeletonPort.open(("/"+name+"/skeleton:o").c_str()); depthPort.open( ("/"+clientName+"/depthPort:o").c_str()); imagePort.open(("/"+clientName+"/imagePort:o").c_str()); playersPort.open(("/"+clientName+"/playersPort:o").c_str()); skeletonPort.open(("/"+clientName+"/skeletonPort:o").c_str()); agentLocOutPort.open(("/"+clientName+"/agentLoc:o").c_str()); Property options; options.put("carrier","tcp"); options.put("remote","kinectServer"); options.put("local",clientName.c_str()); options.put("verbosity",verbosity); if (!client.open(options)) return false; Property opt; client.getInfo(opt); showImages=(show=="true")?true:false; int xPos = rf.check("x",Value(10)).asInt(); int yPos = rf.check("y",Value(10)).asInt(); int img_width=opt.find("img_width").asInt(); int img_height=opt.find("img_height").asInt(); int depth_width=opt.find("depth_width").asInt(); int depth_height=opt.find("depth_height").asInt(); rgb.resize(img_width, img_height); depth.resize(depth_width,depth_height); depthToDisplay.resize(depth_width,depth_height); playersImage.resize(depth_width,depth_height); skeletonImage.resize(depth_width,depth_height); depthTmp=cvCreateImage(cvSize(depth_width,depth_height),IPL_DEPTH_32F,1); rgbTmp=cvCreateImage(cvSize(img_width,img_height),IPL_DEPTH_8U,3); if (showImages) { vector<bool> alreadyOpen(4,false); string mode=showMode; string submode; while (!mode.empty()) { if (showImageParser(mode,submode)) { if (submode=="rgb") { if (!alreadyOpen[0]) { int x_rgb=rf.check("x-rgb",Value(xPos)).asInt(); int y_rgb=rf.check("y-rgb",Value(yPos)).asInt(); yInfo("\"rgb\" window selected in (%d,%d)",x_rgb,y_rgb); cvNamedWindow("rgb",CV_WINDOW_AUTOSIZE); cvMoveWindow("rgb",x_rgb,y_rgb); alreadyOpen[0]=true; } else yWarning("\"rgb\" window already open"); } else if (submode=="depth") { if (!alreadyOpen[1]) { int x_depth=rf.check("x-depth",Value(xPos+300)).asInt(); int y_depth=rf.check("y-depth",Value(yPos)).asInt(); yInfo("\"depth\" window selected in (%d,%d)",x_depth,y_depth); cvNamedWindow("depth",CV_WINDOW_AUTOSIZE); cvMoveWindow("depth",x_depth,y_depth); cvSetMouseCallback("depth",AgentDetector::click_callback, (void*)depthToDisplay.getIplImage()); alreadyOpen[1]=true; } else yWarning("\"depth\" window already open"); } else if (submode=="skeleton") { if (!alreadyOpen[2]) { int x_skeleton=rf.check("x-skeleton",Value(xPos)).asInt(); int y_skeleton=rf.check("y-skeleton",Value(yPos+300)).asInt(); yInfo("\"skeleton\" window selected in (%d,%d)",x_skeleton,y_skeleton); cvNamedWindow("skeleton",CV_WINDOW_AUTOSIZE); cvMoveWindow("skeleton",x_skeleton,y_skeleton); alreadyOpen[2]=true; } else yWarning("\"skeleton\" window already open"); } else if (submode=="players") { if (!alreadyOpen[3]) { int x_players=rf.check("x-players",Value(xPos+300)).asInt(); int y_players=rf.check("y-players",Value(yPos+300)).asInt(); yInfo("\"players\" window selected in (%d,%d)",x_players,y_players); cvNamedWindow("players",CV_WINDOW_AUTOSIZE); cvMoveWindow("players",x_players,y_players); alreadyOpen[3]=true; } else yWarning("\"players\" window already open"); } else yError("unrecognized show mode!"); } } } //Initialise Face Recognizer if (useFaceRecognition) { string faceRecognName = "/"; faceRecognName+=name; faceRecognName+="/faceRecognizer:rpc"; faceRecognizerModule.open(faceRecognName.c_str()); string faceRecognResName = "/"; faceRecognResName+=name; faceRecognResName+="/faceRecognizer/results:i"; faceRecognizerModuleResults.open(faceRecognResName.c_str()); // WARNING: Do not use getContextPath if that ever should be used again! //recognizer->loadTrainingSet(rf.getContextPath().c_str()); //recognizer->Train(); //cout<<"Loading recognizer: " // <<recognizer->loadRecognizer("defaultFaces.tpc")<<endl; } else { //Load the skeletonPatterns } string rpcName = "/"; rpcName+=name; rpcName+="/rpc"; rpc.open(rpcName.c_str()); attach(rpc); pointsCnt=0; return true; }
virtual bool configure(ResourceFinder &rf) { Time::turboBoost(); if (rf.check("name")) name=string("/")+rf.find("name").asString().c_str(); else name="/trajectoryPlayer"; rpcPort.open((name+"/rpc").c_str()); attach(rpcPort); Property portProp; portProp.put("robot", rf.find("robot")); portProp.put("part", rf.find("part")); //*** start the robot driver if (!robot.configure(portProp)) { yError() << "Error configuring position controller, check parameters"; return false; } if (!robot.init()) { yError() << "Error cannot connect to remote ports" ; return false; } //*** attach the robot driver to the thread and start it w_thread.attachRobotDriver(&robot); b_thread.attachRobotDriver(&robot); if (!w_thread.start()) { yError() << "Working thread did not start, queue will not work"; } else { yInfo() << "Working thread started"; } if (rf.check("execute")==true) { yInfo() << "Enablig iPid->setReference() controller"; w_thread.enable_execute_joint_command = true; } else { yInfo() << "Not using iPid->setReference() controller"; w_thread.enable_execute_joint_command = false; } if (rf.check("period")==true) { int period = rf.find("period").asInt(); yInfo() << "Thread period set to " << period << "ms"; w_thread.setRate(period); } yInfo() << "Using parameters:" << rf.toString(); //*** open the position file yInfo() << "opening file..."; if (rf.check("filename")==true) { string filename = rf.find("filename").asString().c_str(); int req_joints = 0; //default value if (rf.check("joints")) { req_joints = rf.find("joints").asInt();} else { yError() << "Missing parameter 'joints' (number of joints to control)"; return false; } if (req_joints < robot.n_joints) { yWarning () << "changing n joints from" << robot.n_joints << "to" << req_joints; robot.n_joints = req_joints; } else if (req_joints > robot.n_joints) { yError() << "Requested number of joints exceeds the number of available joints on the robot!"; return false; } if (!w_thread.actions.openFile(filename,robot.n_joints)) { yError() << "Unable to parse file " << filename; return false; }; } else { yWarning() << "no sequence files load."; } yInfo() << "module succesffully configured. ready."; return true; }
void cartControlReachAvoidThread::doReach() { if (useLeftArm || useRightArm) { if (state==STATE_REACH) { Vector xdhat, odhat, qdhat; Vector torsoAndArmJointDeltas; //!torso already in motor control order // iKin / cartControl pitch, roll, yaw; motorInterface - yaw, roll, pitch Vector qdotReach, qdotAvoid, qdotReachAndAvoid; //3 torso joint velocities (but in motor interface order! yaw, roll, pitch) + 7 arm joints Vector qdotArm, qdotTorso; //joint velocities in motor interface format /**** init ***************/ qdotArm.resize(armIdx.size(),0.0); //size 7 qdotTorso.resize(torsoAxes,0.0); torsoAndArmJointDeltas.resize(cartNrDOF,0.0); qdotReach.resize(cartNrDOF,0.0); qdotAvoid.resize(cartNrDOF,0.0); qdotReachAndAvoid.resize(cartNrDOF,0.0); /******* solve for optimal reaching configuration **********************************/ //optionally set up weights - setRestWeights - optionally to give more value current joint pos (10 values, torso + arm)) Vector x=R.transposed()*(targetPos); limitRange(x); x=R*x; yDebug("doReach(): reach target x %s.\n",x.toString().c_str()); cartArm->askForPosition(x,xdhat,odhat,qdhat); //7+3, including torso, probably even if torso is off // qdhat now contains target joint pos yDebug("xdhat from cart solver %s\n",xdhat.toString().c_str()); yDebug("qdhat from cart solver %s\n",qdhat.toString().c_str()); //*********** now the controller... ***************************/ //use minJerkVelCtrl - instance of minJerkVelCtrlForIdealPlant bool ret=encArm->getEncoders(arm.data()); bool ret2 = encTorso->getEncoders(torso.data()); if ((!ret) && (!ret2)) { yError("Error reading encoders, check connectivity with the robot\n"); } else { /* use encoders */ yDebug("There are %d arm encoder values: %s\n",armAxes,arm.toString().c_str()); yDebug("There are %d torso encoder values: %s\n",torsoAxes,torso.toString().c_str()); torsoAndArmJointDeltas[0] = qdhat[2]-torso[0]; torsoAndArmJointDeltas[1] = qdhat[1]-torso[1]; torsoAndArmJointDeltas[2] = qdhat[0]-torso[2]; torsoAndArmJointDeltas[3]=qdhat[3]-arm[0]; // shoulder pitch yDebug("Shoulder pitch difference torsoAndArmJointDeltas[3]=qdhat[3]-encodersArm[0], %f = %f - % f\n", torsoAndArmJointDeltas[3],qdhat[3],arm[0]); torsoAndArmJointDeltas[4]=qdhat[4]-arm[1]; // shoulder roll torsoAndArmJointDeltas[5]=qdhat[5]-arm[2]; // shoulder yaw torsoAndArmJointDeltas[6]=qdhat[6]-arm[3]; // elbow torsoAndArmJointDeltas[7]=qdhat[7]-arm[4]; // wrist pronosupination torsoAndArmJointDeltas[8]=qdhat[8]-arm[5]; // wrist pitch torsoAndArmJointDeltas[9]=qdhat[9]-arm[6]; // wrist yaw yDebug("torsoAndArmJointDeltas: %s\n",torsoAndArmJointDeltas.toString().c_str()); qdotReach = minJerkVelCtrl->computeCmd(2.0,torsoAndArmJointDeltas); //2 seconds to reach //the above probably gives only the first command which will eventually be part of a minimum jerk trajectory - //- but that will not be instantiated here because we always ask again? yDebug("qdotReach: %s\n",qdotReach.toString().c_str()); } //so far, qdot based only on reaching; now combine magically with avoidance /* for each avoidance vector skinPart gives FoR, which is link after the point - move to first joint/link before the point express point in last proximal joint; create extra link - rototranslation */ //TODO - so far takes only first avoidanceVector if (!avoidanceVectors.empty()){ // Instantiate new chain iCub::iKin::iKinChain from iCub::iKin::iCubArm iCub::iKin::iKinChain *chain; iCub::iKin::iCubArm *icub_arm; if (armSel==LEFTARM) { icub_arm = new iCub::iKin::iCubArm("left"); icub_arm->setAng(arm*CTRL_DEG2RAD); chain = icub_arm->asChain(); } else if (armSel==RIGHTARM) { icub_arm = new iCub::iKin::iCubArm("right"); icub_arm->setAng(arm*CTRL_DEG2RAD); chain = icub_arm->asChain(); } else { yError() << "armSel neither left or right; abort"; return; } // Block all the more distal joints after the joint //unsigned int dof = chain -> getDOF(); //should be 10 by default (3 + 7, with torso) // if the skin part is a hand, no need to block any joints if ((avoidanceVectors[0].skin_part == SKIN_LEFT_FOREARM) || (avoidanceVectors[0].skin_part == SKIN_RIGHT_FOREARM)){ chain->blockLink(9); chain->blockLink(8);//wrist joints yDebug("obstacle threatening skin part %s, blocking links 9 and 10 (wrist; indexes 8,9)",SkinPart_s[avoidanceVectors[0].skin_part].c_str()); } else if ((avoidanceVectors[0].skin_part == SKIN_LEFT_UPPER_ARM) || (avoidanceVectors[0].skin_part == SKIN_RIGHT_UPPER_ARM)){ chain->blockLink(9); chain->blockLink(8);chain->blockLink(7);chain->blockLink(6); //wrist joints + elbow joints yDebug("obstacle threatening skin part %s, blocking links 7-10 (wrist+elbow; indexes 6-9)",SkinPart_s[avoidanceVectors[0].skin_part].c_str()); } //or: //for (int i = dof; i > Skin_2_Link(avoidanceVectors[0].skin_part)+torsoAxes+1; --i) //that is, i from 10, block all joint up to link nr. of specific skin part, + 3 for the extra torso joints in the chain //so, for forearm, this is blocking 10,9... up to 7, where nr. 7 = 4 (link nr. of forearm) + 3 will be first free joint), //effectively blocking the wrist joints + elbow joints // SetHN to move the end effector toward the point to be controlled Matrix HN = eye(4); computeFoR(avoidanceVectors[0].x,avoidanceVectors[0].n,HN); yDebug("HN matrix: %s",HN.toString().c_str()); chain -> setHN(HN); // GeoJacobian to get J, then from x_dot = J * q_dot //q_dot_avoidance = pimv(J) * x_dot //(maybe also here the weights) // Ask for geoJacobian Matrix J = chain -> GeoJacobian(); //6 rows, n columns for every active DOF (excluding the blocked) yDebug("GeoJacobian matrix: %s",J.toString().c_str()); Vector xdotAvoid(3,0.0); // velocity vector = speed (10cm/s) * direction of motion (the unitary version of the normal vector) xdotAvoid = 0.01 * (avoidanceVectors[0].n / yarp::math::norm(avoidanceVectors[0].n)); yDebug("xdotAvoid: %s",xdotAvoid.toString().c_str()); // Compute the q_dot_avoidance qdotAvoid = yarp::math::pinv(J) * xdotAvoid; yDebug("qdotAvoid: %s",qdotAvoid.toString().c_str()); delete icub_arm; //(optionally, if you want avoidance to act more locally, you can 0 some joints from q_dot_avoidance) } /* end of avoidance *****************************/ // combine q_dot_reach with q_dot_avoidance; such as weighted sum using gains from allostasis for(int i=0;i<cartNrDOF;i++){ qdotReachAndAvoid[i] = reachingGain*qdotReach[i] + avoidanceGain*qdotAvoid[i]; yDebug("qdotReachAndAvoid[%d] = reachingGain*qdotReach[%d] + avoidanceGain*qdotAvoid[%d]",i,i,i); yDebug("%f = %f * %f + %f * %f",qdotReachAndAvoid[i],reachingGain,qdotReach[i],avoidanceGain,qdotAvoid[i]); } yDebug("qdotReachAndAvoid: %s",qdotReachAndAvoid.toString().c_str()); /**** do the movement - send velocities ************************************/ //need to pass from the cartArm format (3 for torso, 7 for arm) to torso (3 joints) + arm motor control (16 joints) qdotTorso[0]=qdotReachAndAvoid[0]; qdotTorso[1]=qdotReachAndAvoid[1]; qdotTorso[2]=qdotReachAndAvoid[2]; qdotArm[0]=qdotReachAndAvoid[3]; qdotArm[1]=qdotReachAndAvoid[4]; qdotArm[2]=qdotReachAndAvoid[5]; qdotArm[3]=qdotReachAndAvoid[6]; qdotArm[4]=qdotReachAndAvoid[7]; qdotArm[5]=qdotReachAndAvoid[8]; qdotArm[6]=qdotReachAndAvoid[9]; //for(int j=7;j<armAxes;j++){ //qdotArm[j]=0.0; //no movement for the rest of the joints //} yDebug("velocityMove(qdotArm.data()), qdotArm: %s\n", qdotArm.toString().c_str()); velArm->velocityMove(armIdx.size(),armIdx.getFirst(),qdotArm.data()); yDebug("velocityMove(qdotTorso.data()), qdotTorso: %s\n", qdotTorso.toString().c_str()); velTorso->velocityMove(qdotTorso.data()); } } }
void vtWThread::run() { optFlowPos.resize(3,0.0); pf3dTrackerPos.resize(3,0.0); doubleTouchPos.resize(3,0.0); fgtTrackerPos.resize(3,0.0); bool isTarget = false; events.clear(); // process the optFlow if (optFlowBottle = optFlowPort.read(false)) { if (optFlowBottle->size()>=3) { yDebug("Computing data from the optFlowTracker %g\n",getEstUsed()); optFlowPos.zero(); optFlowPos[0]=optFlowBottle->get(0).asDouble(); optFlowPos[1]=optFlowBottle->get(1).asDouble(); optFlowPos[2]=optFlowBottle->get(2).asDouble(); AWPolyElement el(optFlowPos,Time::now()); optFlowVelEstimate=linEst_optFlow->estimate(el); events.push_back(IncomingEvent(optFlowPos,optFlowVelEstimate,0.05,"optFlow")); isTarget=true; } } // process the pf3dTracker if (pf3dTrackerBottle = pf3dTrackerPort.read(false)) { if (pf3dTrackerBottle->size()>6) { if (pf3dTrackerBottle->get(6).asDouble()==1.0) { Vector fp(4); fp[0]=pf3dTrackerBottle->get(0).asDouble(); fp[1]=pf3dTrackerBottle->get(1).asDouble(); fp[2]=pf3dTrackerBottle->get(2).asDouble(); fp[3]=1.0; if (!gsl_isnan(fp[0]) && !gsl_isnan(fp[1]) && !gsl_isnan(fp[2])) { yDebug("Computing data from the pf3dTracker %g\n",getEstUsed()); Vector x,o; igaze->getLeftEyePose(x,o); Matrix T=axis2dcm(o); T(0,3)=x[0]; T(1,3)=x[1]; T(2,3)=x[2]; pf3dTrackerPos=T*fp; pf3dTrackerPos.pop_back(); AWPolyElement el(pf3dTrackerPos,Time::now()); pf3dTrackerVelEstimate=linEst_pf3dTracker->estimate(el); events.push_back(IncomingEvent(pf3dTrackerPos,pf3dTrackerVelEstimate,0.05,"pf3dTracker")); isTarget=true; } } } } if (!rf->check("noDoubleTouch")) { // processes the fingertipTracker if(fgtTrackerBottle = fgtTrackerPort.read(false)) { if (doubleTouchBottle = doubleTouchPort.read(false)) { if(fgtTrackerBottle != NULL && doubleTouchBottle != NULL) { if (doubleTouchBottle->get(3).asString() != "" && fgtTrackerBottle->get(0).asInt() != 0) { yDebug("Computing data from the fingertipTracker %g\n",getEstUsed()); doubleTouchStep = doubleTouchBottle->get(0).asInt(); fgtTrackerPos[0] = fgtTrackerBottle->get(1).asDouble(); fgtTrackerPos[1] = fgtTrackerBottle->get(2).asDouble(); fgtTrackerPos[2] = fgtTrackerBottle->get(3).asDouble(); AWPolyElement el2(fgtTrackerPos,Time::now()); fgtTrackerVelEstimate=linEst_fgtTracker->estimate(el2); if(doubleTouchStep<=1) { Vector ang(3,0.0); igaze -> lookAtAbsAngles(ang); } else if(doubleTouchStep>1 && doubleTouchStep<8) { events.clear(); events.push_back(IncomingEvent(fgtTrackerPos,fgtTrackerVelEstimate,-1.0,"fingertipTracker")); isTarget=true; } } } } } // processes the doubleTouch !rf->check("noDoubleTouch") if(doubleTouchBottle = doubleTouchPort.read(false)) { if(doubleTouchBottle != NULL) { if (doubleTouchBottle->get(3).asString() != "") { Matrix T = eye(4); Vector fingertipPos(4,0.0); doubleTouchPos.resize(4,0.0); currentTask = doubleTouchBottle->get(3).asString(); doubleTouchStep = doubleTouchBottle->get(0).asInt(); fingertipPos = iCub::skinDynLib::matrixFromBottle(*doubleTouchBottle,20,4,4).subcol(0,3,3); // fixed translation from the palm fingertipPos.push_back(1.0); if(doubleTouchStep<=1) { Vector ang(3,0.0); igaze -> lookAtAbsAngles(ang); } else if(doubleTouchStep>1 && doubleTouchStep<8) { if(currentTask=="LtoR" || currentTask=="LHtoR") //right to left -> the right index finger will be generating events { iencsR->getEncoders(encsR->data()); Vector qR=encsR->subVector(0,6); armR -> setAng(qR*CTRL_DEG2RAD); T = armR -> getH(3+6, true); // torso + up to wrist doubleTouchPos = T * fingertipPos; //optionally, get the finger encoders and get the fingertip position using iKin Finger based on the current joint values //http://wiki.icub.org/iCub/main/dox/html/icub_cartesian_interface.html#sec_cart_tipframe doubleTouchPos.pop_back(); //take out the last dummy value from homogenous form } else if(currentTask=="RtoL" || currentTask=="RHtoL") //left to right -> the left index finger will be generating events { iencsL->getEncoders(encsL->data()); Vector qL=encsL->subVector(0,6); armL -> setAng(qL*CTRL_DEG2RAD); T = armL -> getH(3+6, true); // torso + up to wrist doubleTouchPos = T * fingertipPos; //optionally, get the finger encoders and get the fingertip position using iKin Finger based on the current joint values //http://wiki.icub.org/iCub/main/dox/html/icub_cartesian_interface.html#sec_cart_tipframe doubleTouchPos.pop_back(); //take out the last dummy value from homogenous form } else { yError(" [vtWThread] Unknown task received from the double touch thread!"); } yDebug("Computing data from the doubleTouch %g\n",getEstUsed()); AWPolyElement el2(doubleTouchPos,Time::now()); doubleTouchVelEstimate=linEst_doubleTouch->estimate(el2); events.push_back(IncomingEvent(doubleTouchPos,doubleTouchVelEstimate,-1.0,"doubleTouch")); isTarget=true; } } } } } if (pf3dTrackerPos[0]!=0.0 && pf3dTrackerPos[1]!=0.0 && pf3dTrackerPos[2]!=0.0) igaze -> lookAtFixationPoint(pf3dTrackerPos); else if (doubleTouchPos[0]!=0.0 && doubleTouchPos[1]!=0.0 && doubleTouchPos[2]!=0.0) igaze -> lookAtFixationPoint(doubleTouchPos); else if (optFlowPos[0]!=0.0 && optFlowPos[1]!=0.0 && optFlowPos[2]!=0.0) igaze -> lookAtFixationPoint(optFlowPos); if (isTarget) { Bottle& eventsBottle = eventsPort.prepare(); eventsBottle.clear(); for (size_t i = 0; i < events.size(); i++) { eventsBottle.addList()= events[i].toBottle(); } eventsPort.write(); timeNow = yarp::os::Time::now(); sendGuiTarget(); } else if (yarp::os::Time::now() - timeNow > 1.0) { yDebug("No significant event in the last second. Resetting the velocity estimators.."); timeNow = yarp::os::Time::now(); linEst_optFlow -> reset(); linEst_pf3dTracker -> reset(); linEst_doubleTouch -> reset(); linEst_fgtTracker -> reset(); deleteGuiTarget(); } }
bool iKart_MotorControl::open(Property &_options) { ctrl_options = _options; localName = ctrl_options.find("local").asString(); //the base class open if (!MotorControl::open(_options)) { yError() << "Error in MotorControl::open()"; return false; } // open the interfaces for the control boards bool ok = true; ok = ok & control_board_driver->view(ivel); ok = ok & control_board_driver->view(ienc); ok = ok & control_board_driver->view(ipwm); ok = ok & control_board_driver->view(ipid); ok = ok & control_board_driver->view(iamp); ok = ok & control_board_driver->view(icmd); if(!ok) { yError("One or more devices has not been viewed, returning\n"); return false; } //get robot geometry Bottle geometry_group = ctrl_options.findGroup("ROBOT_GEOMETRY"); if (geometry_group.isNull()) { yError("iKart_Odometry::open Unable to find ROBOT_GEOMETRY group!"); return false; } if (!geometry_group.check("geom_r")) { yError("Missing param geom_r in [ROBOT_GEOMETRY] group"); return false; } if (!geometry_group.check("geom_L")) { yError("Missing param geom_L in [ROBOT_GEOMETRY] group"); return false; } if (!geometry_group.check("g_angle")) { yError("Missing param g_angle in [ROBOT_GEOMETRY] group"); return false; } geom_r = geometry_group.find("geom_r").asDouble(); geom_L = geometry_group.find("geom_L").asDouble(); g_angle = geometry_group.find("g_angle").asDouble(); if (!ctrl_options.check("GENERAL")) { yError() << "Missing [GENERAL] section"; return false; } yarp::os::Bottle& general_options = ctrl_options.findGroup("GENERAL"); localName = ctrl_options.find("local").asString(); return true; }
int utManagerThread::run_with_templateTracker_SFM() { int kalState = -1; switch (stateFlag) { case 0: yDebug("Looking for motion...\n"); timeNow = yarp::os::Time::now(); oldMcutPoss.clear(); stateFlag++; deleteGuiTarget(); break; case 1: // state #01: check the motionCUT and see if there's something interesting. // if so, initialize the tracker, and step up. if (processMotion()) { yDebug("Initializing tracker...\n"); timeNow = yarp::os::Time::now(); initializeTracker(); stateFlag++; } break; case 2: // state #02: read data from the Tracker and use the SFM to retrieve a 3D point. // with that, initialize the kalman filter, and then step up. readFromTracker(); if (getPointFromStereo()) { yDebug("Initializing Kalman filter...\n"); kalThrd -> setKalmanState(KALMAN_INIT); kalThrd -> kalmanInit(SFMPos); stateFlag++; } break; case 3: // state #03: keep reading data from the Tracker and retrieving the 3D point from the SFM // With this info, keep feeding the kalman filter until it thinks that the object is still // tracked. If not, go back from the initial state. printMessage(2,"Reading from tracker and SFM...\n"); readFromTracker(); if (getPointFromStereo()) { kalThrd -> setKalmanInput(SFMPos); } kalThrd -> getKalmanState(kalState); sendGuiTarget(); if (kalState == KALMAN_STOPPED) { yDebug("For some reasons, the kalman filters stopped. Going back to initial state.\n"); stateFlag = 0; } break; default: yError(" utManagerThread should never be here!!!\nState: %d\n",stateFlag); Time::delay(1); break; } return kalState; }
bool ControlThread::threadInit() { //open the joystick port port_joystick_control.open(localName + "/joystick:i"); //try to connect to joystickCtrl output if (rf.check("joystick_connect")) { int joystick_trials = 0; do { yarp::os::Time::delay(1.0); if (yarp::os::Network::connect("/joystickCtrl:o", localName+"/joystick:i")) { yInfo("Joystick has been automatically connected"); break; } else { yWarning("Unable to find the joystick port, retrying (%d/5)...",joystick_trials); joystick_trials++; } if (joystick_trials>=5) { yError("Unable to find the joystick port, giving up"); break; } } while (1); } // open the control board driver yInfo("Opening the motors interface...\n"); int trials = 0; double start_time = yarp::os::Time::now(); Property control_board_options("(device remote_controlboard)"); control_board_options.put("remote", remoteName.c_str()); control_board_options.put("local", localName.c_str()); do { double current_time = yarp::os::Time::now(); //remove previously existing drivers if (control_board_driver) { delete control_board_driver; control_board_driver = 0; } //creates the new device driver control_board_driver = new PolyDriver(control_board_options); bool connected = control_board_driver->isValid(); //check if the driver is connected if (connected) break; //check if the timeout (10s) is expired if (current_time - start_time > 10.0) { yError("It is not possible to instantiate the device driver. I tried %d times!", trials); if (control_board_driver) { delete control_board_driver; control_board_driver = 0; } return false; } yarp::os::Time::delay(0.5); trials++; yWarning("Unable to connect the device driver, trying again..."); } while (true); control_board_driver->view(iDir); control_board_driver->view(iVel); control_board_driver->view(iEnc); control_board_driver->view(iPos); control_board_driver->view(iCmd); if (iDir == 0 || iEnc == 0 || iVel == 0 || iCmd == 0) { yError() << "Failed to open interfaces"; return false; } yarp::os::Time::delay(1.0); iEnc->getEncoder(0, &enc_init_elong); iEnc->getEncoder(1, &enc_init_roll); iEnc->getEncoder(2, &enc_init_pitch); iVel ->setRefAcceleration (0,10000000); iVel ->setRefAcceleration (1,10000000); iVel ->setRefAcceleration (2,10000000); yDebug() << "Initial vals" << enc_init_elong << enc_init_roll << enc_init_pitch; return true; }
/* * Show best augmented so far for the current agent, no matter the instance * */ bool abmInteraction::showBestOfAllForAgent() { Bottle bResult; ostringstream osRequest; //main select querry osRequest << "SELECT feedback.augmented_port, feedback.original_time, feedback.augmented_time, feedback.value FROM visualdata, feedback WHERE augmented IS NOT NULL AND img_provider_port = '" << img_provider_port << "' AND "; //foreign key to join feedback/visualdata osRequest << "augmented_port = img_provider_port AND original_time = \"time\" AND feedback.augmented_time = visualdata.augmented_time "; if (resume == "agent"){ // not proposing instance with feedback from the agent already. osRequest << "AND agent = '" << agentName << "' "; } //just the best feedback value osRequest << " ORDER BY value DESC LIMIT 1;"; bResult = iCub->getABMClient()->requestFromString(osRequest.str()); if (bResult.toString() != "NULL" && bResult.size() > 0) { //take the data from best string augmented_port = bResult.get(0).asList()->get(0).toString(); string original_time = bResult.get(0).asList()->get(1).toString(); string augmented_time = bResult.get(0).asList()->get(2).toString(); int value = atoi(bResult.get(0).asList()->get(3).toString().c_str()); bResult.clear(); //find the corresponding instance number osRequest.str(""); osRequest << "SELECT instance FROM visualdata WHERE visualdata.time = '" << original_time << "' AND img_provider_port = '" << augmented_port << "' AND augmented_time = '" << augmented_time << "';"; bResult = iCub->getABMClient()->requestFromString(osRequest.str()); if (bResult.toString() != "NULL" && bResult.size() > 0){ int instance = atoi(bResult.get(0).asList()->get(0).toString().c_str()); yInfo() << "[showBestOfAll] best feedback : " << " instance = " << instance << " augmented_time = " << augmented_time << " with rank = " << value; //show the best with triggerstreaming Bottle bRpc, bSubRealtime, bSubAugmentedTimes; bRpc.addString("triggerStreaming"); bRpc.addInt(instance); bSubRealtime.addString("realtime"); bSubRealtime.addInt(1); bSubAugmentedTimes.addString("augmentedTimes"); bSubAugmentedTimes.addString(augmented_time); //Ask for showing the current testing augmented + the best one if relevant bRpc.addList() = bSubRealtime; bRpc.addList() = bSubAugmentedTimes; ostringstream osResponse; osResponse.str(""); osResponse << "The current best structure you have ever ranked is shown. The rank is " << value; //<< " for time = " << bestAugmentedTime ; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); iCub->getABMClient()->rpcCommand(bRpc); } else { yError() << " [showBestOfAll] : could not find the instance from the best augmented : something is wrong!!!"; } } else { yInfo() << "This is the first time " << agentName << " give feedback!"; } //check that the streaming is finished before going next bResult.clear(); Bottle bRpc; bRpc.addString("getStreamStatus"); do{ bResult = iCub->getABMClient()->rpcCommand(bRpc); yarp::os::Time::delay(1); } while (bResult.get(0).asString() != "none"); return true; }
SkinMeshThreadPort::SkinMeshThreadPort(Searchable& config,int period) : RateThread(period),mutex(1) { yDebug("SkinMeshThreadPort running at %g ms.",getRate()); mbSimpleDraw=config.check("light"); sensorsNum=0; for (int t=0; t<MAX_SENSOR_NUM; ++t) { sensor[t]=NULL; } std::string part="/skinGui/"; std::string part_virtual=""; if (config.check("name")) { part ="/"; part+=config.find("name").asString().c_str(); part+="/"; } part.append(config.find("robotPart").asString()); part_virtual = part; part_virtual.append("_virtual"); part.append(":i"); part_virtual.append(":i"); skin_port.open(part.c_str()); // Ideally, we would use a --virtual flag. since this would make the skinmanager xml file unflexible, // let's keep the code structure without incurring in any problem whatsoever // if (config.check("virtual")) if (true) { skin_port_virtual.open(part_virtual.c_str()); } int width =config.find("width" ).asInt(); int height=config.find("height").asInt(); bool useCalibration = config.check("useCalibration"); if (useCalibration==true) yInfo("Using calibrated skin values (0-255)"); else yDebug("Using raw skin values (255-0)"); Bottle *color = config.find("color").asList(); unsigned char r=255, g=0, b=0; if(color) { if(color->size()<3 || !color->get(0).isInt() || !color->get(1).isInt() || !color->get(2).isInt()) { yError("Error while reading the parameter color: three integer values should be specified (%s).", color->toString().c_str()); } else { r = color->get(0).asInt(); g = color->get(1).asInt(); b = color->get(2).asInt(); yInfo("Using specified color: %d %d %d", r, g, b); } } else { yDebug("Using red as default color."); } defaultColor.push_back(r); defaultColor.push_back(g); defaultColor.push_back(b); skinThreshold = config.check("skinThreshold")?config.find("skinThreshold").asDouble():SKIN_THRESHOLD; yDebug("Skin Threshold set to %g", skinThreshold); yarp::os::Bottle sensorSetConfig=config.findGroup("SENSORS").tail(); for (int t=0; t<sensorSetConfig.size(); ++t) { yarp::os::Bottle sensorConfig(sensorSetConfig.get(t).toString()); std::string type(sensorConfig.get(0).asString()); if (type=="triangle" || type=="fingertip" || type=="fingertip2L" || type=="fingertip2R" || type=="triangle_10pad" || type=="quad16" || type=="palmR" || type=="palmL") { int id=sensorConfig.get(1).asInt(); double xc=sensorConfig.get(2).asDouble(); double yc=sensorConfig.get(3).asDouble(); double th=sensorConfig.get(4).asDouble(); double gain=sensorConfig.get(5).asDouble(); int lrMirror=sensorConfig.get(6).asInt(); int layoutNum=sensorConfig.get(7).asInt(); yDebug("%s %d %f",type.c_str(),id,gain); if (id>=0 && id<MAX_SENSOR_NUM) { if (sensor[id]) { yError("Triangle %d already exists.",id); } else { if (type=="triangle") { sensor[id]=new Triangle(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="triangle_10pad") { sensor[id]=new Triangle_10pad(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="fingertip") { sensor[id]=new Fingertip(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="fingertip2L") { sensor[id]=new Fingertip2L(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="fingertip2R") { sensor[id]=new Fingertip2R(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="quad16") { sensor[id]=new Quad16(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="palmR") { sensor[id]=new PalmR(xc,yc,th,gain,layoutNum,lrMirror); } else if (type=="palmL") { sensor[id]=new PalmL(xc,yc,th,gain,layoutNum,lrMirror); } sensor[id]->setCalibrationFlag(useCalibration); ++sensorsNum; } } else { yWarning(" %d is invalid triangle Id [0:%d].",id, MAX_SENSOR_NUM-1); } } else { yWarning(" sensor type %s unknown, discarded.",type.c_str()); } } int max_tax=0; for (int t=0; t<MAX_SENSOR_NUM; ++t) { if (sensor[t]) { sensor[t]->min_tax=max_tax; max_tax = sensor[t]->min_tax+sensor[t]->get_nTaxels(); sensor[t]->max_tax=max_tax-1; sensor[t]->setColor(r, g, b); } else { //this deals with the fact that some traingles can be not present, //but they anyway broadcast an array of zeros... max_tax += 12; } } resize(width,height); }
/* * Node to ask Human to give feedback on quality of augmented kinematic structure image */ void abmInteraction::nodeFeedback(bool tryAgain, pair<string,int> & bestTimeAndRank) { ostringstream osError; // Error message Bottle bOutput; Bottle bRecognized, //recceived FROM speech recog with transfer information (1/0 (bAnswer)) bAnswer, //response from speech recog without transfer information, including raw sentence bSemantic, // semantic information of the content of the recognition bMessenger; //to be send TO speech recog ostringstream osResponse; /************************************ Usual feedback : show an image and ask for feedback ************************************/ if (tryAgain == false){ yInfo() << " Current time = " << *it_augmentedTime; iCub->say("Note this kinematic structure between 1 and 10 please", false); yInfo() << " iCub says : Note this kinematic structure between 1 and 10 please"; //feedback likert 1-5 /*iCub->say("Note this kinematic structure, Likert 1-5 quality"); yInfo() << " iCub says : Note this kinematic structure, Likert 1-5 quality" ;*/ //Preparing bottle to trigger the augmenting remembering Bottle bRpc, bSubRealtime, bSubAugmentedTimes; bRpc.addString("triggerStreaming"); bRpc.addInt(rememberedInstance); bSubRealtime.addString("realtime"); bSubRealtime.addInt(1); bSubAugmentedTimes.addString("augmentedTimes"); bSubAugmentedTimes.addString(*it_augmentedTime); //If we have a previously best rank if (bestTimeAndRank.second != 0) { osResponse.str(""); osResponse << "The current best structure is shown at left. The rank is " << bestTimeAndRank.second; //<< " for time = " << bestAugmentedTime ; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); bSubAugmentedTimes.addString(bestTimeAndRank.first); } //Ask for showing the current testing augmented + the best one if relevant bRpc.addList() = bSubRealtime; bRpc.addList() = bSubAugmentedTimes; iCub->getABMClient()->rpcCommand(bRpc); } else { iCub->say("Can you repeat your feedback please?", false); yInfo() << "iCub says : Can you repeat your feedback please?"; } /************************************ Retrieve Human spoken feedback ************************************/ bRecognized = iCub->getRecogClient()->recogFromGrammarLoop(grammarToString(nameGrammarHumanFeedback)); if (bRecognized.get(0).asInt() == 0) { return; } bAnswer = *bRecognized.get(1).asList(); // bAnswer is the result of the regognition system (first element is the raw sentence, 2nd is the list of semantic element) if (bAnswer.get(0).asString() == "stop") { osError.str(""); osError << " | STOP called"; bOutput.addString(osError.str()); cout << osError.str() << endl; } yInfo() << "bRecognized " << bRecognized.toString(); cout << bRecognized.get(1).toString() << endl; string sQuestionKind = bAnswer.get(1).asList()->get(0).toString(); //feedback 1-10 if (sQuestionKind == "FEEDBACK") { yInfo() << "FEEDBACK received from Human!"; } else { yError() << " The sentence type is not recognized, waiting for FEEDBACK"; tryAgain = true; nodeFeedback(tryAgain, bestTimeAndRank); return; } // semantic is the list of the semantic elements of the sentence except the type ef sentence bSemantic = *bAnswer.get(1).asList()->get(1).asList(); //feedback number 1-10 string sFeedback10 = bSemantic.check("feedback10", Value("0")).asString(); int iFeedback10 = atoi(sFeedback10.c_str()); //feedback likert 1-5 quality /*string sFeedback10 = bAnswer.get(1).asList()->get(0).asString() ; int iFeedback10 = atoi(sFeedback10.c_str())*2 ;*/ /************************************ Ask confirmation for the feedback (i.e. if misrecognition) ************************************/ osResponse.str(""); osResponse << "So for you, this kinematic structure has a score of " << iFeedback10 << ", right?"; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); if (!nodeYesNo()) { osResponse.str(""); osResponse << "Oups, I am sorry"; tryAgain = true; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); nodeFeedback(tryAgain, bestTimeAndRank); return; } else { tryAgain = false; } /************************************ put the feedback into ABM, feedback table ************************************/ list<pair<string, string> > lArgument; lArgument.push_back(pair<string, string>("Bob", "agent")); lArgument.push_back(pair<string, string>("kinematic structure", "about")); iCub->getABMClient()->sendActivity("action", "sentence", "feedback", lArgument, true); Bottle bResult; ostringstream osRequest; //only augmented_time is needed but better clarity for the print osRequest << "SELECT instance FROM main WHERE activitytype = 'feedback' ORDER BY \"time\" DESC LIMIT 1 ;"; bResult = iCub->getABMClient()->requestFromString(osRequest.str().c_str()); int feedbackInstance = -1; feedbackInstance = atoi(bResult.get(0).asList()->get(0).toString().c_str()); yInfo() << "Feedback instance stored in main (from Bottle) : " << bResult.get(0).asList()->get(0).toString().c_str(); yInfo() << "Feedback instance stored in main (from feedbackInstance) : " << feedbackInstance; //insert the feedback to the SQL database insertFeedback(iFeedback10, feedbackInstance); /************************************ Update best feedback ************************************/ if (iFeedback10 > bestTimeAndRank.second) { bestTimeAndRank.first = *it_augmentedTime; bestTimeAndRank.second = iFeedback10; osResponse.str(""); osResponse << "Yes, I have improved my skills : best rank is now " << bestTimeAndRank.second; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); } else { osResponse.str(""); osResponse << "Erf, too bad"; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); } //Check that we still have augmented feedback to do if (++it_augmentedTime == vAugmentedTime.end()){ osResponse.str(""); osResponse << "I have no more augmented to check, thank you for your feedback"; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); return; } osResponse.str(""); osResponse << "Another one?"; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); if (nodeYesNo()) { nodeFeedback(tryAgain, bestTimeAndRank); return; } else { osResponse.str(""); osResponse << "Ok, thanks anyway, bye"; iCub->say(osResponse.str().c_str(), false); yInfo() << "iCub says : " << osResponse.str(); //set back default value bestTimeAndRank.first = ""; bestTimeAndRank.second = 0; return; } }
bool abmInteraction::respond(const Bottle& bCommand, Bottle& bReply) { string helpMessage = string(getName().c_str()) + " commands are: \n" + "help \n" + "quit \n"; Bottle bError; bReply.clear(); if (bCommand.get(0) == "set") { bool changeSomething = false; if (bCommand.size() > 1) { Value vRememberedInstance = bCommand.find("rememberedInstance"); if (!vRememberedInstance.isNull() && vRememberedInstance.isInt()) { rememberedInstance = vRememberedInstance.asInt(); changeSomething = true; } Value vImgProviderPort = bCommand.find("img_provider_port"); if (!vImgProviderPort.isNull() && vImgProviderPort.isString()) { img_provider_port = vImgProviderPort.asString(); changeSomething = true; } Value vAgentName = bCommand.find("agentName"); if (!vAgentName.isNull() && vAgentName.isString()) { agentName = vAgentName.asString(); changeSomething = true; } //resum : check for augmented_time with empty feedback from the agentName. Another one needed to check empty feedback, no matter the agent? Value vResume = bCommand.find("resume"); if (!vResume.isNull() && vResume.isString()) { if(vResume != "agent" && vResume != "yes" && vResume != "no"){ string sError = "[set]: Wrong resume keyWord : should be 1) yes 2) no or 3) agent (for agent specific resume)"; yError() << sError; bError.addString(sError); bReply = bError; } else { resume = vResume.asString() ; changeSomething = true; } } yDebug() << "rememberedInstance: " << rememberedInstance; yDebug() << "img_provider_port: " << img_provider_port; yDebug() << "agentName: " << agentName; yDebug() << "resume: " << resume; bReply.addString("ack"); } else { string sError = "[set]: Wrong Bottle => set (rememberedInstance int) (img_provider_port string) (agentName string)"; yError() << sError; bError.addString(sError); bReply = bError; } if (!changeSomething) { string sError = "Nothing has been changed, check the Bottle"; yError() << sError; bError.addString(sError); bReply = bError; } } if (bCommand.get(0).asString() == "runFeedback") { bReply.addString("runFeedback"); pair <string, int> bestTimeAndRank; showBestOfAllForAgent(); createAugmentedTimeVector(bestTimeAndRank) ; if (bestTimeAndRank.second == -1){ yError() << " Something is wrong with the augmented memories! quit"; return false; } nodeFeedback(false, bestTimeAndRank); return false; } if (bCommand.get(0).asString() == "quit") { bReply.addString("quitting"); return false; } else if (bCommand.get(0).asString() == "help") { cout << helpMessage; bReply.addString("ok"); } return true; }
bool SpeechRecognizerModule::updateModule() { cout<<"."; USES_CONVERSION; CSpEvent event; // Process all of the recognition events while (event.GetFrom(m_cpRecoCtxt) == S_OK) { switch (event.eEventId) { case SPEI_SOUND_START: { m_bInSound = TRUE; yInfo() << "Sound in..."; break; } case SPEI_SOUND_END: if (m_bInSound) { m_bInSound = FALSE; if (!m_bGotReco) { // The sound has started and ended, // but the engine has not succeeded in recognizing anything yWarning() << "Chunk of sound detected: Recognition is null"; } m_bGotReco = FALSE; } break; case SPEI_RECOGNITION: // There may be multiple recognition results, so get all of them { m_bGotReco = TRUE; static const WCHAR wszUnrecognized[] = L"<Unrecognized>"; CSpDynamicString dstrText; if (SUCCEEDED(event.RecoResult()->GetText(SP_GETWHOLEPHRASE, SP_GETWHOLEPHRASE, TRUE, &dstrText, NULL))) { SPPHRASE* pPhrase = NULL; bool successGetPhrase = SUCCEEDED(event.RecoResult()->GetPhrase(&pPhrase)); int confidence=pPhrase->Rule.Confidence; string fullSentence = ws2s(dstrText); yInfo() <<"Recognized "<<fullSentence<<" with confidence "<<confidence ; //Send over yarp Bottle bOut; bOut.addString(fullSentence.c_str()); bOut.addInt(confidence); m_portContinuousRecognition.write(bOut); //Treat the semantic if (successGetPhrase) { //Send sound if (m_forwardSound) { yarp::sig::Sound& rawSnd = m_portSound.prepare(); rawSnd = toSound(event.RecoResult()); m_portSound.write(); } //--------------------------------------------------- 1. 1st subBottle : raw Sentence -----------------------------------------------// int wordCount = pPhrase->Rule.ulCountOfElements; string rawPhrase = ""; for(int i=0; i< wordCount; i++){ rawPhrase += ws2s(pPhrase->pElements[i].pszDisplayText) + " "; yDebug() << "word : " << ws2s(pPhrase->pElements[i].pszDisplayText) ; } yInfo() <<"Raw sentence: "<<rawPhrase ; if (&pPhrase->Rule == NULL) { yError() <<"Cannot parse the sentence!"; return true; } //--------------------------------------------------- 2. 2nd subottle : Word/Role ---------------------------------------------------// Bottle bOutGrammar; bOutGrammar.addString(rawPhrase.c_str()); bOutGrammar.addList()=toBottle(pPhrase,&pPhrase->Rule); yInfo() << "Sending semantic bottle : " << bOutGrammar.toString().c_str() ; m_portContinuousRecognitionGrammar.write(bOutGrammar); ::CoTaskMemFree(pPhrase); } if (m_useTalkBack) say(fullSentence); } } break; } } return true; }
/* Called periodically every getPeriod() seconds */ bool ears::updateModule() { LockGuard lg(mutex); if (bShouldListen) { yDebug() << "bListen"; Bottle bRecognized, //recceived FROM speech recog with transfer information (1/0 (bAnswer)) bAnswer, //response from speech recog without transfer information, including raw sentence bSemantic; // semantic information of the content of the recognition bRecognized = iCub->getRecogClient()->recogFromGrammarLoop(grammarToString(MainGrammar), 1, true); //bShouldListen=true; if (bRecognized.get(0).asInt() == 0) { yWarning() << " error in ears::updateModule | Error in speechRecog"; return true; } bAnswer = *bRecognized.get(1).asList(); if (bAnswer.get(0).asString() == "stop") { yInfo() << " in abmHandler::node1 | stop called"; return true; } // bAnswer is the result of the regognition system (first element is the raw sentence, 2nd is the list of semantic element) bSemantic = *(*bAnswer.get(1).asList()).get(1).asList(); cout << bSemantic.toString() << endl; string sObject, sAction; string sQuestionKind = bAnswer.get(1).asList()->get(0).toString(); //string sPredicate = bSemantic.check("predicate", Value("none")).asString(); string sObjectType, sCommand; if(sQuestionKind == "SENTENCEOBJECT") { sObject = bSemantic.check("object", Value("none")).asString(); sAction = bSemantic.check("predicateObject", Value("none")).asString(); sCommand = "followingOrder"; sObjectType = "object"; } else if(sQuestionKind == "SENTENCEBODYPART") { sObject = bSemantic.check("bodypart", Value("none")).asString(); sCommand = "touchingOrder"; sObjectType = "bodypart"; } else if(sQuestionKind == "SENTENCENARRATIVE") { sCommand = "followingOrder"; sAction = "narrate"; sObjectType = ""; sObject = ""; }else{ yError() << "[ears] Unknown predicate"; } Bottle &bToTarget = portTarget.prepare(); bToTarget.clear(); bToTarget.addString(sAction); bToTarget.addString(sObjectType); bToTarget.addString(sObject); portTarget.write(); Bottle bCondition; bCondition.addString(sCommand); //bCondition.addString(sAction); bCondition.addString(sObjectType); bCondition.addString(sObject); portToBehavior.write(bCondition); yDebug() << "Sending " + bCondition.toString(); } else { yDebug() << "Not bListen"; } return true; }
bool Localizer::triangulatePoint(const Vector &pxl, const Vector &pxr, Vector &x) { if ((pxl.length()<2) || (pxr.length()<2)) { yError("Not enough values given for the pixels!"); return false; } if (PrjL && PrjR) { Vector torso=commData->get_torso(); Vector head=commData->get_q(); Vector qL(8); qL[0]=torso[0]; qL[1]=torso[1]; qL[2]=torso[2]; qL[3]=head[0]; qL[4]=head[1]; qL[5]=head[2]; qL[6]=head[3]; qL[7]=head[4]+head[5]/2.0; Vector qR=qL; qR[7]-=head[5]; mutex.lock(); Matrix HL=SE3inv(eyeL->getH(qL)); Matrix HR=SE3inv(eyeR->getH(qR)); mutex.unlock(); Matrix tmp=zeros(3,4); tmp(2,2)=1.0; tmp(0,2)=pxl[0]; tmp(1,2)=pxl[1]; Matrix AL=(*PrjL-tmp)*HL; tmp(0,2)=pxr[0]; tmp(1,2)=pxr[1]; Matrix AR=(*PrjR-tmp)*HR; Matrix A(4,3); Vector b(4); for (int i=0; i<2; i++) { b[i]=-AL(i,3); b[i+2]=-AR(i,3); for (int j=0; j<3; j++) { A(i,j)=AL(i,j); A(i+2,j)=AR(i,j); } } // solve the least-squares problem x=pinv(A)*b; return true; } else { yError("Unspecified projection matrix for at least one camera!"); return false; } }
void Controller::run() { LockGuard guard(mutexRun); mutexCtrl.lock(); bool jointsHealthy=areJointsHealthyAndSet(); mutexCtrl.unlock(); if (!jointsHealthy) { stopControlHelper(); commData->port_xd->get_new()=false; } string event="none"; // verify if any saccade is still underway mutexCtrl.lock(); if (commData->saccadeUnderway && (Time::now()-saccadeStartTime>=Ts)) { bool done; posHead->checkMotionDone(eyesJoints.size(),eyesJoints.getFirst(),&done); commData->saccadeUnderway=!done; if (!commData->saccadeUnderway) notifyEvent("saccade-done"); } mutexCtrl.unlock(); // get data double x_stamp; Vector xd=commData->get_xd(); Vector x=commData->get_x(x_stamp); qd=commData->get_qd(); // read feedbacks q_stamp=Time::now(); if (!getFeedback(fbTorso,fbHead,drvTorso,drvHead,commData,&q_stamp)) { yError("Communication timeout detected!"); notifyEvent("comm-timeout"); suspend(); return; } // update pose information { mutexChain.lock(); for (int i=0; i<nJointsTorso; i++) { chainNeck->setAng(i,fbTorso[i]); chainEyeL->setAng(i,fbTorso[i]); chainEyeR->setAng(i,fbTorso[i]); } for (int i=0; i<3; i++) { chainNeck->setAng(nJointsTorso+i,fbHead[i]); chainEyeL->setAng(nJointsTorso+i,fbHead[i]); chainEyeR->setAng(nJointsTorso+i,fbHead[i]); } chainEyeL->setAng(nJointsTorso+3,fbHead[3]); chainEyeL->setAng(nJointsTorso+4,fbHead[4]+fbHead[5]/2.0); chainEyeR->setAng(nJointsTorso+3,fbHead[3]); chainEyeR->setAng(nJointsTorso+4,fbHead[4]-fbHead[5]/2.0); txInfo_pose.update(q_stamp); mutexChain.unlock(); } IntState->reset(fbHead); fbNeck=fbHead.subVector(0,2); fbEyes=fbHead.subVector(3,5); double errNeck=norm(qd.subVector(0,2)-fbNeck); double errEyes=norm(qd.subVector(3,qd.length()-1)-fbEyes); bool swOffCond=(Time::now()-ctrlActiveRisingEdgeTime<GAZECTRL_SWOFFCOND_DISABLESLOT*(0.001*getRate())) ? false : (!commData->saccadeUnderway && (errNeck<GAZECTRL_MOTIONDONE_NECK_QTHRES*CTRL_DEG2RAD) && (errEyes<GAZECTRL_MOTIONDONE_EYES_QTHRES*CTRL_DEG2RAD)); // verify control switching conditions if (commData->ctrlActive) { // manage new target while controller is active if (commData->port_xd->get_new()) { reliableGyro=true; event="motion-onset"; mutexData.lock(); motionOngoingEventsCurrent=motionOngoingEvents; mutexData.unlock(); ctrlActiveRisingEdgeTime=Time::now(); commData->port_xd->get_new()=false; } // switch-off condition else if (swOffCond) { if (commData->trackingModeOn) { if (!motionDone) event="motion-done"; motionDone=true; } else { event="motion-done"; mutexCtrl.lock(); stopLimb(false); mutexCtrl.unlock(); } } } else if (jointsHealthy) { // inhibition is cleared upon new target arrival if (ctrlInhibited) ctrlInhibited=!commData->port_xd->get_new(); // switch-on condition commData->ctrlActive=commData->port_xd->get_new() || (!ctrlInhibited && (commData->neckSolveCnt>0)); // reset controllers if (commData->ctrlActive) { ctrlActiveRisingEdgeTime=Time::now(); commData->port_xd->get_new()=false; commData->neckSolveCnt=0; if (stabilizeGaze) { mjCtrlNeck->reset(vNeck); mjCtrlEyes->reset(vEyes); } else { mjCtrlNeck->reset(zeros(fbNeck.length())); mjCtrlEyes->reset(zeros(fbEyes.length())); IntStabilizer->reset(zeros(vNeck.length())); IntPlan->reset(fbNeck); } reliableGyro=true; event="motion-onset"; mutexData.lock(); motionOngoingEventsCurrent=motionOngoingEvents; mutexData.unlock(); } } mutexCtrl.lock(); if (event=="motion-onset") { setJointsCtrlMode(); jointsToSet.clear(); motionDone=false; q0=fbHead; } mutexCtrl.unlock(); if (commData->trackingModeOn || stabilizeGaze) { mutexCtrl.lock(); setJointsCtrlMode(); mutexCtrl.unlock(); } qdNeck=qd.subVector(0,2); qdEyes=qd.subVector(3,5); // compute current point [%] in the path double dist=norm(qd-q0); pathPerc=(dist>IKIN_ALMOST_ZERO)?norm(fbHead-q0)/dist:1.0; pathPerc=sat(pathPerc,0.0,1.0); if (commData->ctrlActive) { // control vNeck=mjCtrlNeck->computeCmd(neckTime,qdNeck-fbNeck); if (unplugCtrlEyes) { if (Time::now()-saccadeStartTime>=Ts) vEyes=commData->get_counterv(); } else vEyes=mjCtrlEyes->computeCmd(eyesTime,qdEyes-fbEyes)+commData->get_counterv(); // stabilization if (commData->stabilizationOn) { Vector gyro=CTRL_DEG2RAD*commData->get_imu().subVector(6,8); Vector dx=computedxFP(imu->getH(cat(fbTorso,fbNeck)),zeros(fbNeck.length()),gyro,x); Vector imuNeck=computeNeckVelFromdxFP(x,dx); if (reliableGyro) { vNeck=commData->stabilizationGain*IntStabilizer->integrate(vNeck-imuNeck); // only if the speed is low and we are close to the target if ((norm(vNeck)<commData->gyro_noise_threshold) && (pathPerc>0.9)) reliableGyro=false; } // hysteresis else if ((norm(imuNeck)>1.5*commData->gyro_noise_threshold) || (pathPerc<0.9)) { IntStabilizer->reset(zeros(vNeck.length())); reliableGyro=true; } } IntPlan->integrate(vNeck); } else if (stabilizeGaze) { Vector gyro=CTRL_DEG2RAD*commData->get_imu().subVector(6,8); Vector dx=computedxFP(imu->getH(cat(fbTorso,fbNeck)),zeros(fbNeck.length()),gyro,x); Vector imuNeck=computeNeckVelFromdxFP(x,dx); vNeck=commData->stabilizationGain*IntStabilizer->integrate(-1.0*imuNeck); vEyes=computeEyesVelFromdxFP(dx); IntPlan->integrate(vNeck); } else { vNeck=0.0; vEyes=0.0; } v.setSubvector(0,vNeck); v.setSubvector(3,vEyes); // apply bang-bang just in case to compensate // for unachievable low velocities for (size_t i=0; i<v.length(); i++) if ((v[i]!=0.0) && (v[i]>-min_abs_vel) && (v[i]<min_abs_vel)) v[i]=yarp::math::sign(qd[i]-fbHead[i])*min_abs_vel; // convert to degrees mutexData.lock(); qddeg=CTRL_RAD2DEG*qd; qdeg =CTRL_RAD2DEG*fbHead; vdeg =CTRL_RAD2DEG*v; mutexData.unlock(); // send commands to the robot if (commData->ctrlActive || stabilizeGaze) { mutexCtrl.lock(); Vector posdeg; if (commData->neckPosCtrlOn) { posdeg=(CTRL_RAD2DEG)*IntPlan->get(); posNeck->setPositions(neckJoints.size(),neckJoints.getFirst(),posdeg.data()); velHead->velocityMove(eyesJoints.size(),eyesJoints.getFirst(),vdeg.subVector(3,5).data()); } else velHead->velocityMove(vdeg.data()); if (commData->debugInfoEnabled && (port_debug.getOutputCount()>0)) { Bottle info; int j=0; if (commData->neckPosCtrlOn) { for (size_t i=0; i<posdeg.length(); i++) { ostringstream ss; ss<<"pos_"<<i; info.addString(ss.str().c_str()); info.addDouble(posdeg[i]); } j=eyesJoints[0]; } for (size_t i=j; i<vdeg.length(); i++) { ostringstream ss; ss<<"vel_"<<i; info.addString(ss.str().c_str()); info.addDouble(vdeg[i]); } port_debug.prepare()=info; txInfo_debug.update(q_stamp); port_debug.setEnvelope(txInfo_debug); port_debug.writeStrict(); } mutexCtrl.unlock(); } // print info if (commData->verbose) printIter(xd,x,qddeg,qdeg,vdeg,1.0); // send x,q through YARP ports Vector q(nJointsTorso+nJointsHead); int j; for (j=0; j<nJointsTorso; j++) q[j]=CTRL_RAD2DEG*fbTorso[j]; for (; (size_t)j<q.length(); j++) q[j]=qdeg[j-nJointsTorso]; txInfo_x.update(x_stamp); if (port_x.getOutputCount()>0) { port_x.prepare()=x; port_x.setEnvelope(txInfo_x); port_x.write(); } txInfo_q.update(q_stamp); if (port_q.getOutputCount()>0) { port_q.prepare()=q; port_q.setEnvelope(txInfo_q); port_q.write(); } if (event=="motion-onset") notifyEvent(event); motionOngoingEventsHandling(); if (event=="motion-done") { motionOngoingEventsFlush(); notifyEvent(event); } // update joints angles fbHead=IntState->integrate(v); commData->set_q(fbHead); commData->set_torso(fbTorso); commData->set_v(v); }
bool ShmemOutputStreamImpl::Resize(int newsize) { ++m_ResizeNum; //printf("output stream resize %d to %d\n", m_ResizeNum, newsize); //fflush(stdout); ACE_Shared_Memory* pNewMap; m_pHeader->resize = true; m_pHeader->newsize = newsize; #ifdef ACE_LACKS_SYSV_SHMEM char file_path[1024]; if (ACE::get_temp_dir(file_path, 1024) == -1) { yError("ShmemHybridStream: no temp directory found."); return false; } char file_name[1024]; sprintf(file_name, "%sSHMEM_FILE_%d_%d", file_path, m_Port, m_ResizeNum); pNewMap = new ACE_Shared_Memory_MM(file_name, //const ACE_TCHAR *filename, newsize + sizeof(ShmemHeader_t), //int len = -1, O_RDWR | O_CREAT, //int flags = O_RDWR | O_CREAT, ACE_DEFAULT_FILE_PERMS, //int mode = ACE_DEFAULT_FILE_PERMS, PROT_RDWR, //int prot = PROT_RDWR, ACE_MAP_SHARED); //int share = ACE_MAP_PRIVATE, #else int shmemkey = (m_ResizeNum << 16) + m_Port; pNewMap = new ACE_Shared_Memory_SV(shmemkey, newsize + sizeof(ShmemHeader_t), ACE_Shared_Memory_SV::ACE_CREATE); #endif if (!pNewMap) { yError("ShmemOutputStream can't create shared memory"); return false; } ShmemHeader_t* pNewHeader = (ShmemHeader_t*)pNewMap->malloc(); char* pNewData = (char*)(pNewHeader + 1); pNewHeader->size = newsize; pNewHeader->resize = false; pNewHeader->close = m_pHeader->close; pNewHeader->tail = 0; pNewHeader->head = pNewHeader->avail = m_pHeader->avail; pNewHeader->waiting = m_pHeader->waiting; if (m_pHeader->avail) { // one or two blocks in circular queue? if (m_pHeader->tail < m_pHeader->head) { memcpy(pNewData, m_pData + m_pHeader->tail, m_pHeader->avail); } else { int firstchunk = m_pHeader->size - m_pHeader->tail; memcpy(pNewData, m_pData + m_pHeader->tail, firstchunk); memcpy(pNewData + firstchunk, m_pData, m_pHeader->head); } } m_pMap->close(); delete m_pMap; m_pMap = pNewMap; m_pHeader = pNewHeader; m_pData = pNewData; return true; }
bool vtWThread::threadInit() { optFlowPort.open(("/"+name+"/optFlow:i").c_str()); pf3dTrackerPort.open(("/"+name+"/pf3dTracker:i").c_str()); doubleTouchPort.open(("/"+name+"/doubleTouch:i").c_str()); fgtTrackerPort.open(("/"+name+"/fingertipTracker:i").c_str()); outPortGui.open(("/"+name+"/gui:o").c_str()); eventsPort.open(("/"+name+"/events:o").c_str()); depth2kinPort.open(("/"+name+"/depth2kin:o").c_str()); Network::connect("/ultimateTracker/Manager/events:o",("/"+name+"/optFlow:i").c_str()); Network::connect("/pf3dTracker/data:o",("/"+name+"/pf3dTracker:i").c_str()); Network::connect("/doubleTouch/status:o",("/"+name+"/doubleTouch:i").c_str()); Network::connect("/fingertipTracker/out:o",("/"+name+"/fingertipTracker:i").c_str()); Network::connect(("/"+name+"/events:o").c_str(),"/visuoTactileRF/events:i"); Network::connect(("/"+name+"/gui:o").c_str(),"/iCubGui/objects"); Property OptGaze; OptGaze.put("device","gazecontrollerclient"); OptGaze.put("remote","/iKinGazeCtrl"); OptGaze.put("local",("/"+name+"/gaze").c_str()); if ((!ddG.open(OptGaze)) || (!ddG.view(igaze))){ yError(" could not open the Gaze Controller!"); return false; } igaze -> storeContext(&contextGaze); igaze -> setSaccadesMode(false); igaze -> setNeckTrajTime(0.75); igaze -> setEyesTrajTime(0.5); /**************************/ if (!rf->check("noDoubleTouch")) { Property OptR; OptR.put("robot", robot.c_str()); OptR.put("part", "right_arm"); OptR.put("device", "remote_controlboard"); OptR.put("remote",("/"+robot+"/right_arm").c_str()); OptR.put("local", ("/"+name +"/right_arm").c_str()); if (!ddR.open(OptR)) { yError(" could not open right_arm PolyDriver!"); return false; } bool okR = 1; if (ddR.isValid()) { okR = okR && ddR.view(iencsR); } if (!okR) { yError(" Problems acquiring right_arm interfaces!!!!"); return false; } iencsR->getAxes(&jntsR); encsR = new yarp::sig::Vector(jntsR,0.0); Property OptL; OptL.put("robot", robot.c_str()); OptL.put("part", "left_arm"); OptL.put("device", "remote_controlboard"); OptL.put("remote",("/"+robot+"/left_arm").c_str()); OptL.put("local", ("/"+name +"/left_arm").c_str()); if (!ddL.open(OptL)) { yError(" could not open left_arm PolyDriver!"); return false; } bool okL = 1; if (ddL.isValid()) { okL = okL && ddL.view(iencsL); } if (!okL) { yError(" Problems acquiring left_arm interfaces!!!!"); return false; } iencsL->getAxes(&jntsL); encsL = new yarp::sig::Vector(jntsL,0.0); } linEst_optFlow = new AWLinEstimator(16,0.05); linEst_pf3dTracker = new AWLinEstimator(16,0.05); linEst_doubleTouch = new AWLinEstimator(16,0.05); linEst_fgtTracker = new AWLinEstimator(16,0.05); timeNow = yarp::os::Time::now(); return true; }
bool ShmemOutputStreamImpl::open(int port, int size) { m_pAccessMutex = m_pWaitDataMutex = nullptr; m_pMap = nullptr; m_pData = nullptr; m_pHeader = nullptr; m_ResizeNum = 0; m_Port = port; char obj_name[1024]; char temp_dir_path[1024]; if (ACE::get_temp_dir(temp_dir_path, 1024) == -1) { yError("ShmemHybridStream: no temp directory found."); return false; } #ifdef ACE_LACKS_SYSV_SHMEM sprintf(obj_name, "%sSHMEM_FILE_%d_0", temp_dir_path, port); m_pMap = new ACE_Shared_Memory_MM(obj_name, //const ACE_TCHAR *filename, size + sizeof(ShmemHeader_t), //int len = -1, O_RDWR | O_CREAT, //int flags = O_RDWR | O_CREAT, ACE_DEFAULT_FILE_PERMS, //int mode = ACE_DEFAULT_FILE_PERMS, PROT_RDWR, //int prot = PROT_RDWR, ACE_MAP_SHARED); //int share = ACE_MAP_PRIVATE, #else m_pMap = new ACE_Shared_Memory_SV(port, size + sizeof(ShmemHeader_t), ACE_Shared_Memory_SV::ACE_CREATE); #endif m_pHeader = (ShmemHeader_t*)m_pMap->malloc(); m_pData = (char*)(m_pHeader + 1); #ifdef _ACE_USE_SV_SEM sprintf(obj_name, "%sSHMEM_ACCESS_MUTEX_%d", temp_dir_path, port); m_pAccessMutex = new ACE_Mutex(USYNC_PROCESS, obj_name); sprintf(obj_name, "%sSHMEM_WAITDATA_MUTEX_%d", temp_dir_path, port); m_pWaitDataMutex = new ACE_Mutex(USYNC_PROCESS, obj_name); #else sprintf(obj_name, "SHMEM_ACCESS_MUTEX_%d", port); m_pAccessMutex = new ACE_Process_Mutex(obj_name); sprintf(obj_name, "SHMEM_WAITDATA_MUTEX_%d", port); m_pWaitDataMutex = new ACE_Process_Mutex(obj_name); #endif m_pAccessMutex->acquire(); m_pHeader->resize = false; m_pHeader->close = false; m_pHeader->avail = 0; m_pHeader->head = 0; m_pHeader->size = size; m_pHeader->tail = 0; m_pHeader->waiting = 0; m_pAccessMutex->release(); m_bOpen = true; return true; }
void run() { mutex.wait(); double current_time = yarp::os::Time::now(); if (actions.current_status==ACTION_IDLE) { // do nothing } else if (actions.current_status == ACTION_STOP) { int nj = actions.action_vector[0].get_n_joints(); actions.current_status = ACTION_IDLE; } else if (actions.current_status == ACTION_RESET) { int nj = actions.action_vector[0].get_n_joints(); for (int j = 0; j < nj; j++) { driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION); } actions.current_status = ACTION_IDLE; } else if (actions.current_status == ACTION_RUNNING) { size_t last_action = actions.action_vector.size(); if (last_action == 0) { yError("sequence empty!"); actions.current_status=ACTION_RESET; return; } //if it's not the last action if (actions.current_action < last_action-1) { //if enough time is passed from the previous action //double duration = actions.action_vector[actions.current_action+1].time - // actions.action_vector[actions.current_action].time; double elapsed_time = actions.action_vector[actions.current_action].time; if (current_time-start_time > elapsed_time) { //printf("%d %+6.6f \n", actions.current_action, current_time-start_time); //last_time = current_time; actions.current_action++; compute_and_send_command(actions.current_action); yDebug("Executing action: %4d/%4d", actions.current_action , last_action); //printf("EXECUTING %d, elapsed_time:%.5f requested_time:%.5f\n", actions.current_action, current_time-last_time, duration); } else { //printf("WAITING %d, elapsed_time:%.5f requested_time:%.5f\n", actions.current_action, current_time-last_time, duration); } } else { if (actions.forever==false) { yInfo("sequence complete"); actions.current_status=ACTION_RESET; } else { yInfo("sequence complete, restarting"); actions.current_action=0; start_time = yarp::os::Time::now(); } } } else if (actions.current_status==ACTION_START) { if (actions.action_vector.size()>0) { double *ll = actions.action_vector[0].q_joints; int nj = actions.action_vector[0].get_n_joints(); for (int j = 0; j < nj; j++) { driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION); } yarp::os::Time::delay(0.1); for (int j = 0; j < nj; j++) { driver->ipos_ll->positionMove(j, ll[j]); } yInfo() << "going to to home position"; double enc[50]; int loops = 100; bool check = true; do { check = true; for (int j = 0; j < nj; j++) { driver->ienc_ll->getEncoder(j, &enc[j]); double err = fabs(enc[j] - ll[j]); check &= (err < 2.0); } yarp::os::Time::delay(0.1); loops--; } while (!check && loops>0); if (check) { yInfo() << "done"; for (int j = 0; j <nj; j++) { driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION_DIRECT); } yarp::os::Time::delay(0.1); compute_and_send_command(0); actions.current_status = ACTION_RUNNING; start_time = yarp::os::Time::now(); } else { yError() << "unable to reach start position!"; if (0) { //very strict behavior! if your are controlling fingers, you will probably end here actions.current_status = ACTION_STOP; } else { for (int j = 0; j <nj; j++) { driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION_DIRECT); } yarp::os::Time::delay(0.1); compute_and_send_command(0); actions.current_status = ACTION_RUNNING; start_time = yarp::os::Time::now(); } } } else { yWarning("no sequence in memory"); actions.current_status=ACTION_STOP; } } else { yError() << "unknown current_status"; } mutex.post(); }
int getDataToDump(Property p, std::string *listOfData, int n, bool *needDebug) { std::string availableDataToDump[NUMBER_OF_AVAILABLE_STANDARD_DATA_TO_DUMP]; std::string availableDebugDataToDump[NUMBER_OF_AVAILABLE_DEBUG_DATA_TO_DUMP]; int j; // standard availableDataToDump[0] = ConstString("getEncoders"); availableDataToDump[1] = ConstString("getEncoderSpeeds"); availableDataToDump[2] = ConstString("getEncoderAccelerations"); availableDataToDump[3] = ConstString("getPositionErrors"); availableDataToDump[4] = ConstString("getOutputs"); availableDataToDump[5] = ConstString("getCurrents"); availableDataToDump[6] = ConstString("getTorques"); availableDataToDump[7] = ConstString("getTorqueErrors"); availableDataToDump[8] = ConstString("getPosPidReferences"); availableDataToDump[9] = ConstString("getTrqPidReferences"); availableDataToDump[10] = ConstString("getControlModes"); availableDataToDump[11] = ConstString("getInteractionModes"); availableDataToDump[12] = ConstString("getMotorEncoders"); availableDataToDump[13] = ConstString("getMotorSpeeds"); availableDataToDump[14] = ConstString("getMotorAccelerations"); availableDataToDump[15] = ConstString("getTemperature"); // debug availableDebugDataToDump[0] = ConstString("getRotorPositions"); availableDebugDataToDump[1] = ConstString("getRotorSpeeds"); availableDebugDataToDump[2] = ConstString("getRotorAccelerations"); if (!p.check("dataToDump")) { yError("Missing option 'dataToDump' in given config file\n"); return 0; } Value& list = p.find("dataToDump"); Bottle *pList = list.asList(); for (int i = 0; i < n; i++) { listOfData[i] = pList->get(i).toString(); // check if the requested data is a standard one for(j = 0; j< NUMBER_OF_AVAILABLE_STANDARD_DATA_TO_DUMP; j++) { if(listOfData[i] == (availableDataToDump[j])) break; } // check if the requested data is a debug one for(j = 0; j< NUMBER_OF_AVAILABLE_DEBUG_DATA_TO_DUMP; j++) { if(listOfData[i] == (availableDebugDataToDump[j])) { *needDebug = true; break; } } // if not found if(j == (NUMBER_OF_AVAILABLE_STANDARD_DATA_TO_DUMP + NUMBER_OF_AVAILABLE_DEBUG_DATA_TO_DUMP)) { yError("Illegal values for 'dataToDump': %s does not exist!\n",listOfData[i].c_str()); return 0; } } return 1; }
virtual bool respond(const Bottle &command, Bottle &reply) { bool ret=true; this->w_thread.mutex.wait(); if (command.size()!=0) { string cmdstring = command.get(0).asString().c_str(); { if (cmdstring == "help") { cout << "Available commands:" << endl; cout << "start" << endl; cout << "stop" << endl; cout << "reset" << endl; cout << "clear" << endl; cout << "add" << endl; cout << "load" << endl; cout << "forever" << endl; cout << "list" << endl; reply.addVocab(Vocab::encode("many")); reply.addVocab(Vocab::encode("ack")); reply.addString("Available commands:"); reply.addString("start"); reply.addString("stop"); reply.addString("reset"); reply.addString("clear"); reply.addString("add"); reply.addString("load"); reply.addString("forever"); reply.addString("list"); } else if (cmdstring == "start") { if (this->w_thread.actions.current_action == 0) this->w_thread.actions.current_status = ACTION_START; else this->w_thread.actions.current_status = ACTION_RUNNING; this->w_thread.actions.forever = false; this->b_thread.attachActions(&w_thread.actions); if (this->b_thread.isRunning()==false) b_thread.start(); reply.addVocab(Vocab::encode("ack")); } else if (cmdstring == "forever") { if (this->w_thread.actions.current_action == 0) this->w_thread.actions.current_status = ACTION_START; else this->w_thread.actions.current_status = ACTION_RUNNING; w_thread.actions.forever = true; reply.addVocab(Vocab::encode("ack")); } else if (cmdstring == "stop") { this->w_thread.actions.current_status = ACTION_STOP; if (this->b_thread.isRunning()==true) b_thread.askToStop(); reply.addVocab(Vocab::encode("ack")); } else if (cmdstring == "clear") { this->w_thread.actions.clear(); reply.addVocab(Vocab::encode("ack")); } else if (cmdstring == "add") { if(!w_thread.actions.parseCommandLine(command.get(1).asString().c_str(), -1, robot.n_joints)) { yError() << "Unable to parse command"; reply.addVocab(Vocab::encode("ERROR Unable to parse file")); } else { yInfo() << "Command added"; reply.addVocab(Vocab::encode("ack")); } } else if (cmdstring == "load") { string filename = command.get(1).asString().c_str(); if (!w_thread.actions.openFile(filename, robot.n_joints)) { yError() << "Unable to parse file"; reply.addVocab(Vocab::encode("ERROR Unable to parse file")); } else { yInfo() << "File opened"; reply.addVocab(Vocab::encode("ack")); } } else if (cmdstring == "reset") { this->w_thread.actions.current_status = ACTION_RESET; this->w_thread.actions.current_action = 0; if (this->b_thread.isRunning()==true) b_thread.askToStop(); reply.addVocab(Vocab::encode("ack")); } else if (cmdstring == "list") { this->w_thread.actions.print(); reply.addVocab(Vocab::encode("ack")); } else { reply.addVocab(Vocab::encode("nack")); ret = false; } } } else { reply.addVocab(Vocab::encode("nack")); ret = false; } this->w_thread.mutex.post(); return ret; }
virtual bool open(Searchable &s) { Time::turboBoost(); // get command line options options.fromString(s.toString()); if (!options.check("robot") || !options.check("part")) { printf("Missing either --robot or --part options. Quitting!\n"); return false; } std::string dumpername; // get dumepr name if (options.check("name")) { dumpername = options.find("name").asString(); dumpername += "/"; } else { dumpername = "/controlBoardDumper/"; } Value& robot = options.find("robot"); Value& part = options.find("part"); configFileRobotPart = "config_"; configFileRobotPart = configFileRobotPart + robot.asString(); configFileRobotPart = configFileRobotPart + "_"; configFileRobotPart = configFileRobotPart + part.asString(); configFileRobotPart = configFileRobotPart + ".ini"; //get the joints to be dumped getRate(options, rate); if (getNumberUsedJoints(options, nJoints) == 0) return false; thetaMap = new int[nJoints]; if (getUsedJointsMap(options, nJoints, thetaMap) == 0) return false; //get the type of data to be dumped if (getNumberDataToDump(options, nData) == 0) return false; dataToDump = new std::string[nData]; if (getDataToDump(options, dataToDump, nData, &useDebugClient) == 0) return false; // Printing configuration to the user yInfo("Running with the following configuration:\n"); yInfo("Selected rate is: %d\n", rate); yInfo("Data selected to be dumped are:\n"); for (int i = 0; i < nData; i++) yInfo("\t%s \n", dataToDump[i].c_str()); //open remote control board ddBoardOptions.put("device", "remote_controlboard"); ddDebugOptions.put("device", "debugInterfaceClient"); ConstString localPortName = name; ConstString localDebugPortName = name; localPortName = localPortName + dumpername.c_str(); localDebugPortName = localPortName + "debug/"; //localPortName = localPortName + robot.asString(); localPortName = localPortName + part.asString(); localDebugPortName = localDebugPortName + part.asString(); ddBoardOptions.put("local", localPortName.c_str()); ddDebugOptions.put("local", localDebugPortName.c_str()); ConstString remotePortName = "/"; ConstString remoteDebugPortName; remotePortName = remotePortName + robot.asString(); remotePortName = remotePortName + "/"; remotePortName = remotePortName + part.asString(); ddBoardOptions.put("remote", remotePortName.c_str()); remoteDebugPortName = remotePortName + "/debug"; ddDebugOptions.put("remote", remoteDebugPortName.c_str()); // create a device ddBoard.open(ddBoardOptions); if (!ddBoard.isValid()) { printf("Device not available.\n"); Network::fini(); return false; } if (useDebugClient ) { ddDebug.open(ddDebugOptions); if (!ddDebug.isValid()) { yError("\n-----------------------------------------\n"); yError("Debug Interface is mandatory to run this module with the '--dataToDumpAll' option or to dump any of the getRotorxxx data.\n"); yError("Please Verify the following 2 conditions are satisfied:\n\n"); yError("1) Check 'debugInterfaceClient' is available using 'yarpdev --list' command\n"); // yError("%s", Drivers::factory().toString().c_str()); std::string deviceList, myDev; deviceList.clear(); deviceList.append(Drivers::factory().toString().c_str()); myDev = "debugInterfaceClient"; if(deviceList.find(myDev) != std::string::npos) yError("\t--> Seems OK\n"); else yError("\t--> Seems NOT OK. The device was not found, please activate the compilation using the corrisponding CMake flag.\n"); yError("\n2) Check if the robot has the 'debugInterfaceWrapper' device up and running. \n You should see from 'yarp name list' output, a port called\n"); yError("\t/robotName/part_name/debug/rpc:i\n If not, fix the robot configuration files to instantiate the 'debugInterfaceWrapper' device.\n"); yError("\nQuickFix: If you set the --dataToDumpAll and do not need the advanced debug feature (getRotorxxx) just remove this option. See help for more information.\n"); yError("------------- END ERROR MESSAGE ---------------\n\n"); Network::fini(); return false; } } bool logToFile = false; if (options.check("logToFile")) logToFile = true; portPrefix= dumpername.c_str() + part.asString() + "/"; //boardDumperThread *myDumper = new boardDumperThread(&dd, rate, portPrefix, dataToDump[0]); //myDumper->setThetaMap(thetaMap, nJoints); myDumper = new boardDumperThread[nData]; for (int i = 0; i < nData; i++) { if (dataToDump[i] == "getEncoders" ) { if (ddBoard.view(ienc)) { yInfo("Initializing a getEncs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetEncs.setInterface(ienc); if (ddBoard.view(istmp)) { yInfo("getEncoders::The time stamp initalization interfaces was successfull! \n"); myGetEncs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetEncs); } } else if (dataToDump[i] == "getEncoderSpeeds") { if (ddBoard.view(ienc)) { yInfo("Initializing a getSpeeds thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetSpeeds.setInterface(ienc); if (ddBoard.view(istmp)) { yInfo("getEncodersSpeed::The time stamp initalization interfaces was successfull! \n"); myGetSpeeds.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetSpeeds); } } else if (dataToDump[i] == "getEncoderAccelerations") { if (ddBoard.view(ienc)) { yInfo("Initializing a getAccs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetAccs.setInterface(ienc); if (ddBoard.view(istmp)) { yInfo("getEncoderAccelerations::The time stamp initalization interfaces was successfull! \n"); myGetAccs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetAccs); } } else if (dataToDump[i] == "getPosPidReferences") { if (ddBoard.view(ipid)) { yInfo("Initializing a getErrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetPidRefs.setInterface(ipid); if (ddBoard.view(istmp)) { yInfo("getPosPidReferences::The time stamp initalization interfaces was successfull! \n"); myGetPidRefs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetPidRefs); } } else if (dataToDump[i] == "getTrqPidReferences") { if (ddBoard.view(itrq)) { yInfo("Initializing a getErrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetTrqRefs.setInterface(itrq); if (ddBoard.view(istmp)) { yInfo("getTrqPidReferences::The time stamp initalization interfaces was successfull! \n"); myGetTrqRefs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetTrqRefs); } } else if (dataToDump[i] == "getControlModes") { if (ddBoard.view(icmod)) { yInfo("Initializing a getErrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetControlModes.setInterface(icmod, nJoints); if (ddBoard.view(istmp)) { yInfo("getControlModes::The time stamp initalization interfaces was successfull! \n"); myGetControlModes.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetControlModes); } } else if (dataToDump[i] == "getInteractionModes") { if (ddBoard.view(iimod)) { yInfo("Initializing a getErrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetInteractionModes.setInterface(iimod, nJoints); if (ddBoard.view(istmp)) { yInfo("getInteractionModes::The time stamp initalization interfaces was successfull! \n"); myGetInteractionModes.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetInteractionModes); } } else if (dataToDump[i] == "getPositionErrors") { if (ddBoard.view(ipid)) { yInfo("Initializing a getErrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetPosErrs.setInterface(ipid); if (ddBoard.view(istmp)) { yInfo("getPositionErrors::The time stamp initalization interfaces was successfull! \n"); myGetPosErrs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetPosErrs); } } else if (dataToDump[i] == "getOutputs") { if (ddBoard.view(ipid)) { yInfo("Initializing a getOuts thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetOuts.setInterface(ipid); if (ddBoard.view(istmp)) { yInfo("getOutputs::The time stamp initalization interfaces was successfull! \n"); myGetOuts.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetOuts); } } else if (dataToDump[i] == "getCurrents") { if (ddBoard.view(iamp)) { yInfo("Initializing a getCurrs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetCurrs.setInterface(iamp); if (ddBoard.view(istmp)) { yInfo("getCurrents::The time stamp initalization interfaces was successfull! \n"); myGetCurrs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetCurrs); } } else if (dataToDump[i] == "getTorques") { if (ddBoard.view(itrq)) { yInfo("Initializing a getTorques thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetTrqs.setInterface(itrq); if (ddBoard.view(istmp)) { yInfo("getTorques::The time stamp initalization interfaces was successfull! \n"); myGetTrqs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetTrqs); } } else if (dataToDump[i] == "getTorqueErrors") { if (ddBoard.view(itrq)) { yInfo("Initializing a getTorqueErrors thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetTrqErrs.setInterface(itrq); if (ddBoard.view(istmp)) { yInfo("getTorqueErrors::The time stamp initalization interfaces was successfull! \n"); myGetTrqErrs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetTrqErrs); } } else if (dataToDump[i] == "getRotorPositions") { { if (idbg==0 && ddDebug.isValid()) ddDebug.view(idbg); if (idbg!=0) { yInfo("Initializing a getRotorPosition thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetRotorPoss.setInterface(idbg); if (ddDebug.view(istmp)) { yInfo("getRotorPositions::The time stamp initalization interfaces was successfull! \n"); myGetRotorPoss.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetRotorPoss); } else { yError("Debug Interface not available, cannot dump %s.\n", dataToDump[i].c_str()); Network::fini(); return false; } } } else if (dataToDump[i] == "getRotorSpeeds") { if (idbg==0 && ddDebug.isValid()) ddDebug.view(idbg); if (idbg!=0) { yInfo("Initializing a getRotorSpeed thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetRotorVels.setInterface(idbg); if (ddDebug.view(istmp)) { yInfo("getRotorSpeeds::The time stamp initalization interfaces was successfull! \n"); myGetRotorVels.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetRotorVels); } else { printf("Debug Interface not available, cannot dump %s.\n", dataToDump[i].c_str()); Network::fini(); return false; } } else if (dataToDump[i] == "getRotorAccelerations") { if (idbg==0 && ddDebug.isValid()) ddDebug.view(idbg); if (idbg!=0) { yInfo("Initializing a getRotorAcceleration thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetRotorAccs.setInterface(idbg); if (ddDebug.view(istmp)) { yInfo("getRotorAccelerations::The time stamp initalization interfaces was successfull! \n"); myGetRotorAccs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetRotorAccs); } else { printf("Debug Interface not available, cannot dump %s.\n", dataToDump[i].c_str()); Network::fini(); return false; } } else if (dataToDump[i] == "getTemperatures") { if (ddBoard.view(imotenc)) { yInfo("Initializing a getTemps thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetTemps.setInterface(imot); if (ddBoard.view(istmp)) { yInfo("getEncoders::The time stamp initalization interfaces was successfull! \n"); myGetTemps.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetTemps); } } else if (dataToDump[i] == "getMotorEncoders") { if (ddBoard.view(imotenc)) { yInfo("Initializing a getEncs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetMotorEncs.setInterface(imotenc); if (ddBoard.view(istmp)) { yInfo("getEncoders::The time stamp initalization interfaces was successfull! \n"); myGetMotorEncs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetMotorEncs); } } else if (dataToDump[i] == "getMotorEncoderSpeeds") { if (ddBoard.view(imotenc)) { yInfo("Initializing a getSpeeds thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetMotorSpeeds.setInterface(imotenc); if (ddBoard.view(istmp)) { yInfo("getEncodersSpeed::The time stamp initalization interfaces was successfull! \n"); myGetMotorSpeeds.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetMotorSpeeds); } } else if (dataToDump[i] == "getMotorEncoderAccelerations") { if (ddBoard.view(imotenc)) { yInfo("Initializing a getAccs thread\n"); myDumper[i].setDevice(&ddBoard, &ddDebug, rate, portPrefix, dataToDump[i], logToFile); myDumper[i].setThetaMap(thetaMap, nJoints); myGetMotorAccs.setInterface(imotenc); if (ddBoard.view(istmp)) { yInfo("getEncoderAccelerations::The time stamp initalization interfaces was successfull! \n"); myGetMotorAccs.setStamp(istmp); } else yError("Problems getting the time stamp interfaces \n"); myDumper[i].setGetter(&myGetMotorAccs); } } } Time::delay(1); for (int i = 0; i < nData; i++) myDumper[i].start(); return true; }
void doubleTouchThread::run() { skinContactList *skinContacts = skinPort -> read(false); sendOutput(); if (checkMotionDone()) { switch (step) { case 0: steerArmsHome(); yInfo("[doubleTouch] WAITING FOR CONTACT...\n"); step++; break; case 1: if(skinContacts) { printMessage(2,"Waiting for contact..\n"); if (detectContact(skinContacts)) { yInfo("[doubleTouch] CONTACT!!! skinPart: %s Link: %i Position: %s NormDir: %s", SkinPart_s[cntctSkinPart].c_str(), cntctLinkNum,cntctPosLink.toString(3,3).c_str(), cntctNormDir.toString(3,3).c_str()); printMessage(1,"Switching to impedance position mode..\n"); imodeS -> setInteractionMode(2,VOCAB_IM_COMPLIANT); imodeS -> setInteractionMode(3,VOCAB_IM_COMPLIANT); step++; } } break; case 2: solveIK(); yInfo("[doubleTouch] Going to taxel... Desired EE: %s\n",(sol->ee).toString(3,3).c_str()); printMessage(1,"Desired joint configuration: %s\n",(sol->joints*iCub::ctrl::CTRL_RAD2DEG).toString(3,3).c_str()); step++; recFlag = 1; break; case 3: configureHands(); if (record != 0) { Time::delay(2.0); } if (curTaskType == "LHtoR" || curTaskType == "RHtoL") { goToTaxelMaster(); } else { goToTaxelSlave(); } step++; break; case 4: Time::delay(2.0); step++; break; case 5: if (curTaskType == "LHtoR" || curTaskType == "RHtoL") { goToTaxelSlave(); } else { goToTaxelMaster(); } step++; break; case 6: recFlag = 0; bool flag; if (record == 0) { Time::delay(3.0); flag=1; if (flag == 1) { testAchievement(); step += 2; } } else { testAchievement(); printMessage(0,"Waiting for the event to go back.\n"); step++; } break; case 7: if(skinContacts) { if (record == 1) { if (testAchievement2(skinContacts)) step++; else if (robot == "icub" && exitFromDeadlock(skinContacts)) step++; } else if (record == 2) { if (exitFromDeadlock(skinContacts)) step++; else testAchievement2(skinContacts); } } break; case 8: if (!dontgoback) { printMessage(0,"Going to rest...\n"); clearTask(); steerArmsHomeMasterSlave(); step++; } break; case 9: printMessage(1,"Switching to position mode..\n"); imodeS -> setInteractionMode(2,VOCAB_IM_STIFF); imodeS -> setInteractionMode(3,VOCAB_IM_STIFF); yInfo("[doubleTouch] WAITING FOR CONTACT...\n"); step = 1; break; default: yError("[doubleTouch] doubleTouchThread should never be here!!!\nStep: %d",step); Time::delay(2.0); break; } } }
void embObjPrintError(char *string) { yError("%s", string); }
bool doubleTouchThread::threadInit() { skinPort -> open(("/"+name+"/contacts:i").c_str()); outPort -> open(("/"+name+"/status:o").c_str()); if (autoconnect) { yInfo("[doubleTouch] Autoconnect flag set to ON"); if (!Network::connect("/skinManager/skin_events:o",("/"+name+"/contacts:i").c_str())) { Network::connect("/virtualContactGeneration/virtualContacts:o", ("/"+name+"/contacts:i").c_str()); } } Network::connect(("/"+name+"/status:o").c_str(),"/visuoTactileRF/input:i"); Network::connect(("/"+name+"/status:o").c_str(),"/visuoTactileWrapper/doubleTouch:i"); Property OptR; OptR.put("robot", robot.c_str()); OptR.put("part", "right_arm"); OptR.put("device", "remote_controlboard"); OptR.put("remote",("/"+robot+"/right_arm").c_str()); OptR.put("local", ("/"+name +"/right_arm").c_str()); if (!ddR.open(OptR)) { yError("[doubleTouch] Could not open right_arm PolyDriver!"); return false; } Property OptL; OptL.put("robot", robot.c_str()); OptL.put("part", "left_arm"); OptL.put("device", "remote_controlboard"); OptL.put("remote",("/"+robot+"/left_arm").c_str()); OptL.put("local", ("/"+name +"/left_arm").c_str()); if (!ddL.open(OptL)) { yError("[doubleTouch] Could not open left_arm PolyDriver!"); return false; } bool ok = 1; // Left arm is the master, right arm is the slave if (ddR.isValid()) { ok = ok && ddR.view(iencsR); ok = ok && ddR.view(iposR); ok = ok && ddR.view(imodeR); ok = ok && ddR.view(iimpR); ok = ok && ddR.view(ilimR); } iencsR->getAxes(&jntsR); encsR = new Vector(jntsR,0.0); if (ddL.isValid()) { ok = ok && ddL.view(iencsL); ok = ok && ddL.view(iposL); ok = ok && ddL.view(imodeL); ok = ok && ddL.view(iimpL); ok = ok && ddL.view(ilimL); } iencsL->getAxes(&jntsL); encsL = new Vector(jntsL,0.0); if (!ok) { yError("[doubleTouch] Problems acquiring either left_arm or right_arm interfaces!!!!\n"); return false; } if (robot == "icub") { ok = 1; ok = ok && iimpL->setImpedance(0, 0.4, 0.03); ok = ok && iimpL->setImpedance(1, 0.35, 0.03); ok = ok && iimpL->setImpedance(2, 0.35, 0.03); ok = ok && iimpL->setImpedance(3, 0.2, 0.02); ok = ok && iimpL->setImpedance(4, 0.2, 0.00); ok = ok && iimpL->setImpedance(0, 0.4, 0.03); ok = ok && iimpL->setImpedance(1, 0.35, 0.03); ok = ok && iimpL->setImpedance(2, 0.35, 0.03); ok = ok && iimpL->setImpedance(3, 0.2, 0.02); ok = ok && iimpL->setImpedance(4, 0.2, 0.00); if (!ok) { yError("[doubleTouch] Problems settings impedance values for either left_arm or right_arm!!!\n"); return false; } } return true; }
void embObjPrintFatal(char *string) { yError("EMS received the following FATAL error: %s", string); }
bool AgentDetector::updateModule() { LockGuard lg(m); bool isRefreshed = client.getDepthAndPlayers(depth,players); client.getRgb(rgb); bool tracked; if (handleMultiplePlayers) tracked=client.getJoints(joints); else tracked=client.getJoints(joint, EFAA_KINECT_CLOSEST_PLAYER); //cout<<"Tracking value = "<<tracked<<endl; if (tracked) { if (handleMultiplePlayers) client.getSkeletonImage(joints,skeletonImage); else { client.getSkeletonImage(joint,skeletonImage); joints.clear(); joints.push_back(joint); } } client.getPlayersImage(players,playersImage); client.getDepthImage(depth,depthToDisplay); if (depthPort.getOutputCount()>0) { depthPort.prepare()=depthToDisplay; depthPort.write(); } if (imagePort.getOutputCount()>0) { imagePort.prepare()=rgb; imagePort.write(); } if (playersPort.getOutputCount()>0) { playersPort.prepare()=playersImage; playersPort.write(); } if (skeletonPort.getOutputCount()>0) { skeletonPort.prepare()=skeletonImage; skeletonPort.write(); } if (showImages) { cvConvertScale((IplImage*)depthToDisplay.getIplImage(),depthTmp,1.0/255); cvCvtColor((IplImage*)rgb.getIplImage(),rgbTmp,CV_BGR2RGB); string mode=showMode; string submode; while (!mode.empty()) { if (showImageParser(mode,submode)) { if (submode=="rgb") cvShowImage("rgb",rgbTmp); else if (submode=="depth") cvShowImage("depth",depthTmp); else if (submode=="skeleton") cvShowImage("skeleton",(IplImage*)skeletonImage.getIplImage()); else if (submode=="players") cvShowImage("players",(IplImage*)playersImage.getIplImage()); else yError("unrecognized show mode!"); } } cvWaitKey(1); } //Send the players information to the OPC //Allow click calibration if (!checkCalibration()) { if (AgentDetector::clicked==clicked_left) { AgentDetector::clicked=idle; //Get the clicked point coordinate in Kinect space Vector clickedPoint(3); cout<<"Processing a click on ("<<AgentDetector::clickX<<" "<<AgentDetector::clickY<<") --> "; client.get3DPoint((int)AgentDetector::clickX,(int)AgentDetector::clickY,clickedPoint); cout<<clickedPoint.toString(3,3)<<endl; Bottle bCond; Bottle bObject; Bottle bRTObject; bObject.addString(EFAA_OPC_ENTITY_TAG); bObject.addString("=="); bObject.addString(EFAA_OPC_ENTITY_OBJECT); bRTObject.addString(EFAA_OPC_ENTITY_TAG); bRTObject.addString("=="); bRTObject.addString(EFAA_OPC_ENTITY_RTOBJECT); Bottle bPresent; bPresent.addString(EFAA_OPC_OBJECT_PRESENT_TAG); bPresent.addString("=="); bPresent.addDouble(1.0); bCond.addList()=bObject; bCond.addString("&&"); bCond.addList()=bPresent; bCond.addString("||"); bCond.addList()=bRTObject; bCond.addString("&&"); bCond.addList()=bPresent; opc->checkout(); opc->isVerbose=true; list<Entity*> presentObjects=opc->Entities(bCond); opc->isVerbose=false; Object *o=nullptr; if (presentObjects.size()==1) { o=dynamic_cast<Object*>(presentObjects.front()); } else { for(auto& presentObject : presentObjects) { if(presentObject->name() == "target") { o=dynamic_cast<Object*>(presentObject); break; } } } if(o) { Bottle botRPH, botRPHRep; botRPH.addString("add"); botRPH.addString("kinect"); Bottle &cooKinect=botRPH.addList(); cooKinect.addDouble(clickedPoint[0]); cooKinect.addDouble(clickedPoint[1]); cooKinect.addDouble(clickedPoint[2]); Bottle &cooiCub=botRPH.addList(); cooiCub.addDouble(o->m_ego_position[0]); cooiCub.addDouble(o->m_ego_position[1]); cooiCub.addDouble(o->m_ego_position[2]); rfh.write(botRPH,botRPHRep); cout<<"Sent to RFH: "<<botRPH.toString().c_str()<<endl; cout<<"Got from RFH: "<<botRPHRep.toString().c_str()<<endl; pointsCnt++; } else { yWarning("There should be 1 and only 1 object on the table"); yWarning("If there is more than one object, the object you want"); yWarning("to calibrate must be called \"target\""); } } else if (AgentDetector::clicked==clicked_right) { AgentDetector::clicked=idle; if (pointsCnt>=3) { Bottle calibBottle,calibReply; calibBottle.addString("cal"); calibBottle.addString("kinect"); rfh.write(calibBottle,calibReply); cout<<"Calibrated ! "<<calibReply.toString().c_str()<<endl; calibBottle.clear(); calibBottle.addString("save"); rfh.write(calibBottle,calibReply); cout<<"Saved to file ! "<<calibReply.toString().c_str()<<endl; checkCalibration(); } else yWarning("Unable to calibrate with less than 3 points pairs collected"); } } if (isRefreshed) { // yInfo() << " refreshed"; ////////////////////////////////////////////////////////////////// //Clear the previous agents //for(map<int, Agent*>::iterator pA=identities.begin(); pA!=identities.end() ; pA++) //{ // pA->second->m_present = 0.0; //} //partner->m_present = 0.0; // check if last apparition was more than dThreshlodDisaparition ago if (tracked) { //Go through all skeletons for(deque<Player>::iterator p=joints.begin(); p!=joints.end(); p++) { //check if this skeletton is really tracked bool reallyTracked = false; for(map<string,Joint>::iterator jnt = p->skeleton.begin() ; jnt != p->skeleton.end() ; jnt++) { if (jnt->second.x != 0 && jnt->second.y != 0 && jnt->second.z != 0) { reallyTracked = true; break; } } if (reallyTracked) { dSince = (clock() - dTimingLastApparition) / (double) CLOCKS_PER_SEC; //yInfo() << " is REALLY tracked"; string playerName = partner_default_name; //If the skeleton is tracked we dont identify if (identities.find(p->ID) != identities.end()) { playerName = identities[p->ID]; } else { //Check if we should learn this face if (currentTrainingFace != "") { setIdentity(*p,currentTrainingFace); currentTrainingFace = ""; } //if (useFaceRecognition) playerName = getIdentity(*p); } //We interact with OPC only if the calibration is done if (isCalibrated) { //main bottle to be streamed with loc of all agent body part Bottle& bAgentLoc = agentLocOutPort.prepare(); bAgentLoc.clear(); //Retrieve this player in OPC or create if does not exist opc->checkout(); partner = opc->addOrRetrieveEntity<Agent>(partner_default_name); partner->m_present = 1.0; // reset the timing. dTimingLastApparition = clock(); if (identities.find(p->ID) == identities.end()) { cout<<"Assigning name "<<playerName<<" to skeleton "<<p->ID<<endl; //Agent* specificAgent = opc->addEntity<Agent>(playerName); Agent* specificAgent = opc->addOrRetrieveEntity<Agent>(playerName); if(specificAgent == nullptr) { yError() << "SHIT specificAgent"; } else { identities[p->ID] = specificAgent->name(); specificAgent->m_present = 1.0; yInfo() << " specific agent is commited"; opc->commit(specificAgent); yInfo() << " specific agent is commited done"; } } // Relation r(partner->name(),"named",playerName); // opc->addRelation(r,1.0); // cout<<"Commiting : "<<r.toString()<<endl; yarp::os::Bottle &skeleton = outputSkeletonPort.prepare(); skeleton.clear(); //Convert the skeleton into efaaHelpers body. We loose orientation in the process... for(map<string,Joint>::iterator jnt = p->skeleton.begin() ; jnt != p->skeleton.end() ; jnt++) { Bottle bBodyPartLoc; Vector kPosition(4); kPosition[0] = jnt->second.x; kPosition[1] = jnt->second.y; kPosition[2] = jnt->second.z; kPosition[3] = 1; Vector icubPos = kinect2icub * kPosition; Vector irPos = icubPos.subVector(0,2); if (isMounted) { irPos = transform2IR(irPos); Bottle jntBtl; jntBtl.clear(); jntBtl.addString(jnt->first); jntBtl.addDouble(jnt->second.x); jntBtl.addDouble(jnt->second.y); jntBtl.addDouble(jnt->second.z); skeleton.addList() = jntBtl; } if (jnt->first == EFAA_OPC_BODY_PART_TYPE_HEAD) { partner->m_ego_position = irPos; } partner->m_body.m_parts[jnt->first] = irPos; bBodyPartLoc.addString(jnt->first); bBodyPartLoc.addString(irPos.toString()); bAgentLoc.addList() = bBodyPartLoc; } agentLocOutPort.write(); opc->commit(partner); // cout << skeleton.toString()<< endl; outputSkeletonPort.write(); //opc->commit(agent); } // cout<<'1'<<endl; } } } else { if (dSince > dThresholdDisparition) { opc->checkout(); partner = opc->addOrRetrieveEntity<Agent>(partner_default_name); partner->m_present = 0.0; opc->commit(partner); } else { //yInfo() << " clock is: " << clock() << "\t last apparition: " << dTimingLastApparition << "\t dSince: " << dSince; //yInfo() << " agent dissapeared but not for too long."; } } } return true; }
bool vPreProcessModule::configure(yarp::os::ResourceFinder &rf) { bool pepper = rf.check("pepper") && rf.check("pepper", yarp::os::Value(true)).asBool(); bool rectify = rf.check("rectify") && rf.check("rectify", yarp::os::Value(true)).asBool(); bool undistort = rf.check("undistort") && rf.check("undistort", yarp::os::Value(true)).asBool(); bool truncate = rf.check("truncate") && rf.check("truncate", yarp::os::Value(true)).asBool(); bool flipx = rf.check("flipx") && rf.check("flipx", yarp::os::Value(true)).asBool(); bool flipy = rf.check("flipy") && rf.check("flipy", yarp::os::Value(true)).asBool(); bool precheck = rf.check("precheck") && rf.check("precheck", yarp::os::Value(true)).asBool(); bool split = rf.check("split") && rf.check("split", yarp::os::Value(true)).asBool(); bool local_stamp = rf.check("local_stamp") && rf.check("local_stamp", yarp::os::Value(true)).asBool(); if(precheck) yInfo() << "Performing precheck for event corruption"; if(flipx) yInfo() << "Flipping vision horizontally"; if(flipy) yInfo() << "Flipping vision vertically"; if(pepper) yInfo() << "Applying salt and pepper filter"; if(rectify) yInfo() << "Rectifying image pairs using extrinsic parameters"; if(undistort && truncate) yInfo() << "Applying camera undistortion - truncating to sensor size"; if(undistort && !truncate) yInfo() << "Applying camera undistortion - without truncation"; if(split) yInfo() << "Splitting into left/right streams"; eventManager.initBasic(rf.check("name", yarp::os::Value("/vPreProcess")).asString(), rf.check("height", yarp::os::Value(240)).asInt(), rf.check("width", yarp::os::Value(304)).asInt(), precheck, flipx, flipy, pepper, rectify, undistort, split, local_stamp); if(pepper) { eventManager.initPepper(rf.check("spatialSize", yarp::os::Value(1)).asDouble(), rf.check("temporalSize", yarp::os::Value(0.1)).asDouble() * vtsHelper::vtsscaler); } if(undistort) { yarp::os::ResourceFinder calibfinder; calibfinder.setVerbose(); calibfinder.setDefaultContext(rf.check("calibContext", yarp::os::Value("cameraCalib")).asString().c_str()); calibfinder.setDefaultConfigFile(rf.check("calibFile", yarp::os::Value("iCubEyes-ATIS.ini")).asString().c_str()); calibfinder.configure(0, 0); yarp::os::Bottle &leftParams = calibfinder.findGroup("CAMERA_CALIBRATION_LEFT"); yarp::os::Bottle &rightParams = calibfinder.findGroup("CAMERA_CALIBRATION_RIGHT"); yarp::os::Bottle &stereoParams = calibfinder.findGroup("STEREO_DISPARITY"); if(leftParams.isNull() || rightParams.isNull()) { yError() << "Could not load intrinsic camera parameters"; return false; } if (rectify && stereoParams.isNull()) { yError() << "Could not load extrinsic camera parameters"; return false; } std::cout << leftParams.toString() << std::endl; std::cout << rightParams.toString() << std::endl; std::cout << stereoParams.toString() << std::endl; eventManager.initUndistortion(leftParams, rightParams, stereoParams, truncate); } return eventManager.start(); }