Пример #1
0
    virtual void run()
    {
        t=Time::now();

        generateTarget();

        if (state==STATE_TRACK)
        {
            // look at the target (streaming)
            igaze->lookAtFixationPoint(fp);

            // some verbosity
            printStatus();

            // we collect from time to time
            // some interesting points (POI)
            // where to look at soon afterwards
            storeInterestingPoint();

            if (t-t2>=SWITCH_STATE_PER)
            {
                // switch state
                state=STATE_RECALL;
            }
        }

        if (state==STATE_RECALL)
        {
            // pick up the first POI
            // and clear the list
            Vector ang=poiList.front();
            poiList.clear();

            fprintf(stdout,"Retrieving POI #0 ... (%s) [deg]\n",
                    ang.toString(3,3).c_str());

            // register the motion-done event, attaching the callback
            // that will be executed as soon as the gaze is accomplished
            igaze->registerEvent(*this);

            // look at the chosen POI
            igaze->lookAtAbsAngles(ang);

            // switch state
            state=STATE_WAIT;
        }

        if (state==STATE_STILL)
        {
            if (t-t4>=STILL_STATE_TIME)
            {
                fprintf(stdout,"done\n");

                t1=t2=t3=t;

                // switch state
                state=STATE_TRACK;
            }
        }
    }
Пример #2
0
    bool threadInit()
    {
        string name=rf.find("name").asString().c_str();
        double period=rf.find("period").asDouble();
        setRate(int(1000.0*period));

        // open a client interface to connect to the gaze server
        // we suppose that:
        // 1 - the iCub simulator (icubSim) is running
        // 2 - the gaze server iKinGazeCtrl is running and
        //     launched with the following options:
        //     --robot icubSim --context cameraCalibration/conf --config icubSimEyes.ini
        Property optGaze("(device gazecontrollerclient)");
        optGaze.put("remote","/iKinGazeCtrl");
        optGaze.put("local",("/"+name+"/gaze").c_str());

        if (!clientGaze.open(optGaze))
            return false;

        // open the view
        clientGaze.view(igaze);

        // latch the controller context in order to preserve
        // it after closing the module
        // the context contains the tracking mode, the neck limits and so on.
        igaze->storeContext(&startup_context_id);

        // set trajectory time:
        igaze->setNeckTrajTime(0.8);
        igaze->setEyesTrajTime(0.4);

        port.open(("/"+name+"/target:i").c_str());

        return true;
    }
Пример #3
0
    bool updateModule()
    {
        if (imgOutPort.getOutputCount()>0)
        {
            if (ImageOf<PixelBgr> *pImgBgrIn=imgInPort.read(false))
            {
                Vector xa,oa;
                iarm->getPose(xa,oa);

                Matrix Ha=axis2dcm(oa);
                Ha.setSubcol(xa,0,3);

                Vector v(4,0.0); v[3]=1.0;
                Vector c=Ha*v;

                v=0.0; v[0]=0.05; v[3]=1.0;
                Vector x=Ha*v;

                v=0.0; v[1]=0.05; v[3]=1.0;
                Vector y=Ha*v;

                v=0.0; v[2]=0.05; v[3]=1.0;
                Vector z=Ha*v;

                v=solution; v.push_back(1.0);
                Vector t=Ha*v;

                Vector pc,px,py,pz,pt;
                int camSel=(eye=="left")?0:1;
                igaze->get2DPixel(camSel,c,pc);
                igaze->get2DPixel(camSel,x,px);
                igaze->get2DPixel(camSel,y,py);
                igaze->get2DPixel(camSel,z,pz);
                igaze->get2DPixel(camSel,t,pt);

                CvPoint point_c=cvPoint((int)pc[0],(int)pc[1]);
                CvPoint point_x=cvPoint((int)px[0],(int)px[1]);
                CvPoint point_y=cvPoint((int)py[0],(int)py[1]);
                CvPoint point_z=cvPoint((int)pz[0],(int)pz[1]);
                CvPoint point_t=cvPoint((int)pt[0],(int)pt[1]);

                cvCircle(pImgBgrIn->getIplImage(),point_c,4,cvScalar(0,255,0),4);
                cvCircle(pImgBgrIn->getIplImage(),point_t,4,cvScalar(255,0,0),4);

                cvLine(pImgBgrIn->getIplImage(),point_c,point_x,cvScalar(0,0,255),2);
                cvLine(pImgBgrIn->getIplImage(),point_c,point_y,cvScalar(0,255,0),2);
                cvLine(pImgBgrIn->getIplImage(),point_c,point_z,cvScalar(255,0,0),2);
                cvLine(pImgBgrIn->getIplImage(),point_c,point_t,cvScalar(255,255,255),2);

                tip.clear();
                tip.addInt(point_t.x);
                tip.addInt(point_t.y);

                imgOutPort.prepare()=*pImgBgrIn;
                imgOutPort.write();
            }
        }

        return true;
    }
Пример #4
0
    virtual bool threadInit()
    {
        string name=rf.check("name",Value("faceTracker")).asString().c_str();
        string robot=rf.check("robot",Value("icub")).asString().c_str();
        int period=rf.check("period",Value(50)).asInt();
        eye=rf.check("eye",Value("left")).asString().c_str();
        arm=rf.check("arm",Value("right")).asString().c_str();
        eyeDist=rf.check("eyeDist",Value(1.0)).asDouble();
        holdoff=rf.check("holdoff",Value(3.0)).asDouble();

        Property optGaze("(device gazecontrollerclient)");
        optGaze.put("remote","/iKinGazeCtrl");
        optGaze.put("local",("/"+name+"/gaze_client").c_str());

        if (!clientGaze.open(optGaze))
            return false;

        clientGaze.view(igaze);
        igaze->storeContext(&startup_gazeContext_id);
        igaze->blockNeckRoll(0.0);

        if (arm!="none")
        {
            Property optArm("(device cartesiancontrollerclient)");
            optArm.put("remote",("/"+robot+"/cartesianController/"+arm+"_arm").c_str());
            optArm.put("local",("/"+name+"/arm_client").c_str());
    
            if (!clientArm.open(optArm))
                return false;
    
            clientArm.view(iarm);
            iarm->storeContext(&startup_armContext_id);
        }

        portImgIn.open(("/"+name+"/img:i").c_str());
        portImgOut.open(("/"+name+"/img:o").c_str());
        portTopDown.open(("/"+name+"/topdown:i").c_str());
        portSetFace.open(("/"+name+"/setFace:rpc").c_str());

        if (!fd.init(rf.findFile("descriptor").c_str()))
        {
            fprintf(stdout,"Cannot load descriptor!\n");
            return false;
        }

        Rand::init();

        resetState();
        armCmdState=0;
        queuedFaceExprFlag=false;

        setRate(period);
        cvSetNumThreads(1);

        t0=Time::now();

        return true;
    }
    virtual bool threadInit()
    {
        // open a client interface to connect to the gaze server
        // we suppose that:
        // 1 - the iCub simulator (icubSim) is running
        // 2 - the gaze server iKinGazeCtrl is running and
        //     launched with --robot icubSim option
        Property optGaze("(device gazecontrollerclient)");
        optGaze.put("remote","/iKinGazeCtrl");
        optGaze.put("local","/gaze_client");

        clientGaze=new PolyDriver;
        if (!clientGaze->open(optGaze))
        {
            delete clientGaze;    
            return false;
        }

        // open the view
        clientGaze->view(igaze);

        // set trajectory time:
        // we'll go like hell since we're using the simulator :)
        igaze->setNeckTrajTime(0.4);
        igaze->setEyesTrajTime(0.1);

        // put the gaze in tracking mode, so that
        // when the torso moves, the gaze controller 
        // will compensate for it
/*pramod modified starts
        igaze->setTrackingMode(true);

        Property optTorso("(device remote_controlboard)");
        optTorso.put("remote","/icubSim/torso");
        optTorso.put("local","/torso_client");

        clientTorso=new PolyDriver;
        if (!clientTorso->open(optTorso))
        {
            delete clientTorso;    
            return false;
        }

        // open the view
        clientTorso->view(ienc);
        clientTorso->view(ipos);
        ipos->setRefSpeed(0,10.0);
pramod modified ends */
        fp.resize(3);

        state=STATE_TRACK;

		t=t0=t1=t2=t3=t4=Time::now();

        return true;
    }
Пример #6
0
    virtual bool threadInit()
    {
        // open a client interface to connect to the gaze server
        // we suppose that:
        // 1 - the iCub simulator is running;
        // 2 - the gaze server iKinGazeCtrl is running and
        //     launched with the following options: "--from configSim.ini"
        Property optGaze("(device gazecontrollerclient)");
        optGaze.put("remote","/iKinGazeCtrl");
        optGaze.put("local","/gaze_client");

        if (!clientGaze.open(optGaze))
            return false;

        // open the view
        clientGaze.view(igaze);

        // latch the controller context in order to preserve
        // it after closing the module
        // the context contains the tracking mode, the neck limits and so on.
        igaze->storeContext(&startup_context_id);

        // set trajectory time:
        igaze->setNeckTrajTime(0.8);
        igaze->setEyesTrajTime(0.4);

        // put the gaze in tracking mode, so that
        // when the torso moves, the gaze controller 
        // will compensate for it
        igaze->setTrackingMode(true);

        // print out some info about the controller
        Bottle info;
        igaze->getInfo(info);
        fprintf(stdout,"info = %s\n",info.toString().c_str());

        Property optTorso("(device remote_controlboard)");
        optTorso.put("remote","/icubSim/torso");
        optTorso.put("local","/torso_client");

        if (!clientTorso.open(optTorso))
            return false;

        // open the view
        clientTorso.view(ienc);
        clientTorso.view(ipos);
        ipos->setRefSpeed(0,10.0);

        fp.resize(3);

        state=STATE_TRACK;

        t=t0=t1=t2=t3=t4=Time::now();

        return true;
    }
Пример #7
0
    virtual void threadRelease()
    {    
        // we require an immediate stop
        // before closing the client for safety reason
        igaze->stopControl();

        // it's a good rule to restore the controller
        // context as it was before opening the module
        igaze->restoreContext(startup_context_id);

        clientGaze.close();
        clientTorso.close();
    }
Пример #8
0
    bool read(ConnectionReader &connection)
    {
        Bottle data;
        data.read(connection);
        if ((data.size()>=2) && enabled)
        {
            Vector xa,oa;
            iarm->getPose(xa,oa);

            Vector xe,oe;
            if (eye=="left")
                igaze->getLeftEyePose(xe,oe);
            else
                igaze->getRightEyePose(xe,oe);

            Matrix Ha=axis2dcm(oa);
            Ha.setSubcol(xa,0,3);

            Matrix He=axis2dcm(oe);
            He.setSubcol(xe,0,3);

            Matrix H=Prj*SE3inv(He)*Ha;
            Vector p(2);
            p[0]=data.get(0).asDouble();
            p[1]=data.get(1).asDouble();

            if (logPort.getOutputCount()>0)
            {
                Vector &log=logPort.prepare();

                log=p;
                for (int i=0; i<H.rows(); i++)
                    log=cat(log,H.getRow(i));
                for (int i=0; i<Prj.rows(); i++)
                    log=cat(log,Prj.getRow(i));
                for (int i=0; i<Ha.rows(); i++)
                    log=cat(log,Ha.getRow(i));
                for (int i=0; i<He.rows(); i++)
                    log=cat(log,He.getRow(i));

                logPort.write();
            }

            mutex.wait();
            solver.addItem(p,H);
            mutex.post();
        }

        return true;
    }
    virtual void threadRelease()
    {    
        // we require an immediate stop
        // before closing the client for safety reason
        // (anyway it's already done internally in the
        // destructor)
        igaze->stopControl();

        // it's a good rule to reinstate the tracking mode
        // as it was
        igaze->setTrackingMode(false);

        delete clientGaze;
 //       delete clientTorso; //pramod
    }
Пример #10
0
	virtual void threadRelease() {

		/* Stop and close ports */
		cportL->interrupt();
		cportR->interrupt();
		susPort->interrupt();

		cportL->close();
		cportR->close();
		susPort->close();

		delete cportL;
		delete cportR;
		delete susPort;

		/* stop motor interfaces and close */
		gaze->stopControl();
		clientGazeCtrl.close();
		carm->stopControl();
		clientArmCart.close();

		robotTorso.close();
		robotHead.close();
		robotArm.close();

	}
Пример #11
0
 bool interruptModule()
 {
     imgLPortIn.interrupt();
     imgRPortIn.interrupt();
     igaze->stopControl();
     iarm->stopControl();
     return true;
 }
Пример #12
0
 void look_down()
 {
     // look at 0.5 meter in front of the robot
     Vector pos(3);
     pos[0] = -0.3;
     pos[1] =  0.0;
     pos[2] =  0.0;
     igaze->lookAtFixationPoint(pos);
 }
Пример #13
0
    void gazeRest()
    {
        Vector ang(3);
        ang[0]=GAZE_REST_AZI;
        ang[1]=GAZE_REST_ELE;
        ang[2]=GAZE_REST_VER;

        igaze->lookAtAbsAngles(ang);
    }
Пример #14
0
    void generateTarget()
    {   
        // translational target part: a circular trajectory
        // in the yz plane centered in [-0.3,-0.1,0.1] with radius=0.1 m
        // and frequency 0.1 Hz
//        xd[0]=-0.3;
//        xd[1]=-0.1+0.2*cos(2.0*M_PI*0.1*(t-t0));
//        xd[2]=+0.1+0.2*sin(2.0*M_PI*0.1*(t-t0));            
   
        // we keep the orientation of the left arm constant:
        // we want the middle finger to point forward (end-effector x-axis)
        // with the palm turned down (end-effector y-axis points leftward);
        // to achieve that it is enough to rotate the root frame of pi around z-axis
        od[0]=0.0; od[1]=0.0; od[2]=1.0; od[3]=M_PI;
        
        //adding code for taget location through vision
        //incoming image from both eyes
        Bottle *target = incomingPort.read();
        Vector r(2), l(2);
        r[0] = target->get(0).asDouble();
        r[1] = target->get(1).asDouble();
        l[0] = target->get(3).asDouble();
        l[1] = target->get(4).asDouble();
        
        if(r[0]==0.0 && r[1]==0.0 && l[0]==0.0 && l[1]==0.0){
            xd[0]=-0.3;
            xd[1]=-.1;
            xd[2]=.1;
            
        }
        else{
            igaze->lookAtStereoPixels(l,r);
            igaze->triangulate3DPoint(l,r,xd);
        }

        
        
        
    }
Пример #15
0
    // the motion-done callback
    virtual void gazeEventCallback()
    {
        Vector ang;
        igaze->getAngles(ang);

        fprintf(stdout,"Actual gaze configuration: (%s) [deg]\n",
                ang.toString(3,3).c_str());

        fprintf(stdout,"Moving the torso; check if the gaze is compensated ...\n");

        // move the torso yaw
        double val;
        ienc->getEncoder(0,&val);
        ipos->positionMove(0,val>0.0?-30.0:30.0);

        t4=t;
        
        // detach the callback
        igaze->unregisterEvent(*this);

        // switch state
        state=STATE_STILL;
    }
Пример #16
0
    bool close()
    {
        igaze->restoreContext(startup_context_gaze);
        drvGaze.close();

        iarm->restoreContext(startup_context_arm);
        drvArm.close();

        imgLPortIn.close();
        imgRPortIn.close();
        imgLPortOut.close();
        imgRPortOut.close();
        rpcPort.close();
        return true;
    }
Пример #17
0
    bool close()
    {
        handlerPort.close();
		objectsPort.close();

		igaze->stopControl();
		igaze->restoreContext(startupGazeContextID);
		igaze->deleteContext(startupGazeContextID);
		igaze->deleteContext(currentGazeContextID);

		if (clientGazeCtrl.isValid())
			clientGazeCtrl.close();

		icartLeft->stopControl();
		icartLeft->restoreContext(startupArmLeftContextID);
		icartLeft->deleteContext(startupArmLeftContextID);
		icartLeft->deleteContext(currentArmLeftContextID);

		if (clientArmLeft.isValid())
			clientArmLeft.close();

		icartRight->stopControl();
		icartRight->restoreContext(startupArmRightContextID);
		icartRight->deleteContext(startupArmRightContextID);
		icartRight->deleteContext(currentArmRightContextID);

		if (clientArmLeft.isValid())
			clientArmLeft.close();

		itorsoVelocity->stop();
		
		if (clientTorso.isValid())
		clientTorso.close();
		
        return true;
    }
Пример #18
0
    void printStatus()
    {        
        if (t-t1>=PRINT_STATUS_PER)
        {
            // we get the current fixation point in the
            // operational space
            Vector x;
            igaze->getFixationPoint(x);

            fprintf(stdout,"+++++++++\n");
            fprintf(stdout,"fp         [m] = (%s)\n",fp.toString(3,3).c_str());
            fprintf(stdout,"x          [m] = (%s)\n",x.toString(3,3).c_str());
            fprintf(stdout,"norm(fp-x) [m] = %g\n",norm(fp-x));
            fprintf(stdout,"---------\n\n");

            t1=t;
        }
    }
Пример #19
0
    void run()
    {
        Bottle *pTarget=port.read(false);
        if (pTarget!=NULL)
        {
            if (pTarget->size()>2)
            {
                if (pTarget->get(2).asInt()!=0)
                {
                    Vector px(2);
                    px[0]=pTarget->get(0).asDouble();
                    px[1]=pTarget->get(1).asDouble();

                    // track the moving target within
                    // the camera image
                    igaze->lookAtMonoPixel(0,px);   // 0: left image plane is used, 1: for right
                }
            }
        }
    }
Пример #20
0
    bool lookForFaces()
    {
        yarp::sig::Vector pxl(2,0.0);
        yarp::sig::Vector pxr(2,0.0);

        while (true)
        {
            imageInR = imagePortInR -> read(false);
            imageInL = imagePortInL -> read(false);

            if (imageInL)
            {
                ImageOf<PixelBgr> imageOutL;
                detectorL->loop(imageInL,imageOutL);
                imagePortOutL.write(imageOutL);
            }

            if (imageInR)
            {
               ImageOf<PixelBgr> imageOutR;
               detectorR->loop(imageInR,imageOutR);
               imagePortOutR.write(imageOutR);
            }
            printMessage(2,"counterL: %i counterR: %i\n",detectorL->counter,detectorR->counter);
            if (detectorL->counter > certainty &&
                detectorR->counter > certainty)
            {
                pxl=detectorL->getCenterOfFace();
                pxr=detectorR->getCenterOfFace();
                printMessage(1,"I have found a face! pxl: %s pxr: %s\n",pxl.toString().c_str(),pxr.toString().c_str());
                if (pxl(0)!=0.0 && pxr(0)!=0.0)
                {
                    igaze->lookAtStereoPixels(pxl,pxr); 
                }
                return true;
            }
            Time::delay(0.1);
        }

        return false;
    }
    void storeInterestingPoint()
    {
        if (t-t3>=STORE_POI_PER)
        {
            Vector ang;

            // we store the current azimuth, elevation
            // and vergence wrt the absolute reference frame
            // The absolute reference frame for the azimuth/elevation couple
            // is head-centered with the robot in rest configuration
            // (i.e. torso and head angles zeroed). 
            igaze->getAngles(ang);            

            fprintf(stdout,"Storing POI #%d ... %s [deg]\n",
                    poiList.size(),ang.toString().c_str());

            poiList.push_back(ang);

            t3=t;
        }
    }
Пример #22
0
 void armTrack()
 {
     if (arm!="none")
     {
         if (ARMISTRACKING(armCmdState))
         {
             Vector fp;
             igaze->getFixationPoint(fp);
 
             if (fp[0]>REACH_X_MAX)
                 fp[0]=REACH_X_MAX;
 
             fp[2]+=REACH_OFFS_Z;
 
             iarm->goToPosition(fp);
 
             fprintf(stdout,"Reaching for: (%.1f,%.1f,%.1f) m\n",fp[0],fp[1],fp[2]);
         }
         else
             armCmdState++;
     }
 }
Пример #23
0
    virtual void threadRelease()
    {        
        armRest();
        gazeRest();

        igaze->restoreContext(startup_gazeContext_id);
        clientGaze.close();

        if (arm!="none")
        {
            iarm->restoreContext(startup_armContext_id);
            clientArm.close();
        }

        portImgIn.interrupt();
        portImgOut.interrupt();
        portTopDown.interrupt();
        portSetFace.interrupt();

        portImgIn.close();
        portImgOut.close();
        portTopDown.close();
        portSetFace.close();
    }
Пример #24
0
    bool close()
    {
        iarm->stopControl();
        iarm->restoreContext(startup_context);
        drvCart.close();

        ivel->stop(joints.size(),joints.getFirst());
        for (size_t i=0; i<modes.size(); i++)
            modes[i]=VOCAB_CM_POSITION;
        imod->setControlModes(joints.size(),joints.getFirst(),modes.getFirst());
        drvHand.close();

        if (simulator)
        {
            Bottle cmd,reply;
            cmd.addString("world");
            cmd.addString("del");
            cmd.addString("all");
            simPort.write(cmd,reply);

            simPort.close();
        }

        if (gaze)
        {
            igaze->stopControl();
            drvGaze.close();
        }

        igeo->stopFeedback();
        igeo->setTransformation(eye(4,4));
        drvGeomagic.close();
        forceFbPort.close();

        return true;
    }
Пример #25
0
	virtual void run() {

		while (isStopping() != true) {

			/* poll the click ports containers to see if we have left/right ready to go */
			bfL.lock();	bfR.lock();
			if (bfL.size() == 2 && bfR.size() == 2) {

				printf("got a hit!\n");

				/* if they are, raise the flag that action is beginning, save current joint configuration */
				Bottle susmsg;
				susmsg.addInt(1);
				susPort->write(susmsg);

				//get the current joint configuration for the torso, head, and arm
				tang.clear(); tang.resize(3);
				tEnc->getEncoders(tang.data());
				hang.clear(); hang.resize(6);
				hEnc->getEncoders(hang.data());
				aang.clear(); aang.resize(16);
				aEnc->getEncoders(aang.data());

				/* get the xyz location of the gaze point */
				Vector bvL(2); Vector bvR(2);
				bvL[0] = bfL.get(0).asDouble(); bvL[1] = bfL.get(1).asDouble();
				bvR[0] = bfR.get(0).asDouble(); bvR[1] = bfR.get(1).asDouble();
				objPos.clear(); objPos.resize(3);
				gaze->triangulate3DPoint(bvL,bvR,objPos);

				/* servo the head to gaze at that point */
				//gaze->lookAtStereoPixels(bvL,bvR);
				gaze->lookAtFixationPoint(objPos);
				gaze->waitMotionDone(1.0,10.0);
				gaze->stopControl();

				printf("object position estimated as: %f, %f, %f\n", objPos[0], objPos[1], objPos[2]);
				printf("is this ok?\n");
				string posResp = Network::readString().c_str();

				if (posResp == "yes" || posResp == "y") {

					/* move to hover the hand over the XY position of the target: [X, Y, Z=0.2], with palm upright */
					objPos[2] = 0.1;

					Vector axa(4);
					axa.zero();
					if (armInUse) {
						axa[2] = 1.0; axa[3] = M_PI;
					} else {
						axa[1] = 1.0; axa[3] = M_PI;
					}

					carm->goToPoseSync(objPos,axa);
					carm->waitMotionDone(1.0,10.0);
					Time::delay(2.0);

					//curl fingers and thumb slightly to hold object
					Vector armCur(16);
					aEnc->getEncoders(armCur.data());
					armCur[8] = 3; armCur[10] = 25;
					armCur[11] = 25; armCur[12] = 25;
					armCur[13] = 25; armCur[14] = 25;
					armCur[15] = 55;
					aPos->positionMove(armCur.data());
					Time::delay(2.0);

					/* wait for terminal signal from user that object has been moved to the hand */
					bool validTarg = false;
					printf("object position reached, place in hand and enter target xy position\n");
					while (!validTarg) {

						string objResp = Network::readString().c_str();

						/* ask the user to enter in an XY target location, or confirm use of previous one */
						Bottle btarPos(objResp.c_str());
						if (btarPos.size() < 2) {

							//if user enters no target position, try to use last entered position
							if (targetPos.length() != 3) {
								printf("no previous target position available, please re-enter:\n");
							} else {
								validTarg = true;
							}

						} else {

							targetPos.clear(); targetPos.resize(3);
							targetPos[0] = btarPos.get(0).asDouble();
							targetPos[1] = btarPos.get(1).asDouble();
							targetPos[2] = 0.1;
							validTarg = true;

						}
					}

					/* move the arm to above the target location */
					axa.zero();
					if (armInUse) {
						axa[2] = 1.0; axa[3] = M_PI;
					} else {
						axa[1] = 1.0; axa[3] = M_PI;
					}
					carm->goToPoseSync(targetPos,axa);
					//carm->goToPosition(targetPos);
					carm->waitMotionDone(1.0,10.0);
					Time::delay(2.0);

					/* wait for user signal that the object has been removed */
					printf("object has been moved to target location. please remove object and hit enter\n");
					string tarResp = Network::readString().c_str();


				}
				/* return to saved motor configuration, clear click buffers, lower flag signaling action done */

				printf("gaze done, attempting reset\n");
				tPos->positionMove(tang.data());
				hPos->positionMove(hang.data());
				aPos->positionMove(aang.data());

				bfL.clear(); bfR.clear();
				bfL.unlock(); bfR.unlock();

				susmsg.clear();
				susmsg.addInt(0);
				susPort->write(susmsg);


			}
			else {

				bfL.unlock(); bfR.unlock();

			}
		}
	}
Пример #26
0
	virtual bool threadInit() {


		/* Read in parameters from resource finder */
		loadParams();

		/* Start up gaze control client interface */
		Property option("(device gazecontrollerclient)");
		option.put("remote","/iKinGazeCtrl");
		option.put("local","/client/gaze");
		clientGazeCtrl.open(option);
		gaze=NULL;
		if (clientGazeCtrl.isValid()) {
			clientGazeCtrl.view(gaze);
		} else {
			printf("could not initialize gaze control interface, failing...\n");
			return false;
		}

		//set gaze control interface params
		gaze->setNeckTrajTime(neckTT);
		gaze->setEyesTrajTime(eyeTT);

		gaze->bindNeckPitch(-30,30);
		gaze->bindNeckYaw(-25,25);
		gaze->bindNeckRoll(-10,10);

		/* Start up arm cartesian control client interface */
		Property optiona("(device cartesiancontrollerclient)");
		if (armInUse) {
			string tmpcrname = "/" + robotname + "/cartesianController/right_arm";
			optiona.put("remote",tmpcrname.c_str());
			optiona.put("local","/cartesian_client/right_arm");
		}
		else {
			string tmpcrname = "/" + robotname + "/cartesianController/left_arm";
			optiona.put("remote",tmpcrname.c_str());
			optiona.put("local","/cartesian_client/left_arm");
		}

		clientArmCart.open(optiona);
		carm = NULL;
		if (clientArmCart.isValid())
		{
			clientArmCart.view(carm);
		} else {
			printf("could not initialize arm cartesian control interface, failing...\n");
			return false;
		}

		// set trajectory time
		carm->setTrajTime(trajTime);	//slow for safety

		// get the torso dofs
		Vector newDof, curDof;
		carm->getDOF(curDof);
		newDof=curDof;

		//enable torso pitch and yaw, also wrist movements
		newDof[0]=1;
		newDof[1]=0;
		newDof[2]=1;
		newDof[7]=1;
		newDof[8]=1;
		newDof[9]=1;

		// impose some restriction on the torso pitch
		double tpmin, tpmax;
		carm->getLimits(0,&tpmin,&tpmax);
		carm->setLimits(0,tpmin,maxPitch);

		// send the request for dofs reconfiguration
		carm->setDOF(newDof,curDof);


		string lname, rname;
		Property doption;
		doption.put("device", "remote_controlboard");

		lname = "/"+name+"/torso"; rname = "/"+robotname+"/torso";
		doption.put("local", lname.c_str());
		doption.put("remote", rname.c_str());

		robotTorso.open(doption);
		if (!robotTorso.isValid()) {
			printf("could not initialize torso control interface, failing...\n");
			return false;
		}
		robotTorso.view(tPos); robotTorso.view(tEnc);

		lname = "/"+name+"/head"; rname = "/"+robotname+"/head";
		doption.put("local", lname.c_str());
		doption.put("remote", rname.c_str());

		robotHead.open(doption);
		if (!robotHead.isValid()) {
			printf("could not initialize head control interface, failing...\n");
			return false;
		}
		robotHead.view(hPos); robotHead.view(hEnc);

		lname = "/"+name+"/"+armname+"_arm"; rname = "/"+robotname+"/"+armname+"_arm";
		doption.put("local", lname.c_str());
		doption.put("remote", rname.c_str());

		robotArm.open(doption);
		if (!robotArm.isValid()) {
			printf("could not initialize arm control interface, failing...\n");
			return false;
		}
		robotArm.view(aPos); robotArm.view(aEnc);

		for (int i=0; i<7; i++) {
			aPos->setRefSpeed(i, 10.0);
		}

		/* Create and open ports */
		cportL=new ClickPort(bfL);
		string cplName="/"+name+"/clk:l";
		cportL->open(cplName.c_str());
		cportL->useCallback();

		cportR=new ClickPort(bfR);
		string cprName="/"+name+"/clk:r";
		cportR->open(cprName.c_str());
		cportR->useCallback();

		susPort=new Port;
		string suspName="/"+name+"/sus:o";
		susPort->open(suspName.c_str());

		return true;

	}
Пример #27
0
    bool updateModule()
    {
        ImageOf<PixelRgb> *imgIn=imgInPort.read(true);
        if (imgIn==NULL)
            return false;

        ImageOf<PixelRgb> &imgOut=imgOutPort.prepare();
        imgOut=*imgIn;

        cv::Mat img=cv::cvarrToMat(imgOut.getIplImage());

        Vector xa,oa;
        iarm->getPose(xa,oa);

        Matrix Ha=axis2dcm(oa);
        xa.push_back(1.0);
        Ha.setCol(3,xa);

        Vector pc;
        igaze->get2DPixel(camSel,xa,pc);

        cv::Point point_c((int)pc[0],(int)pc[1]);
        cv::circle(img,point_c,4,cv::Scalar(0,255,0),4);

        Vector analogs,encs(nEncs),joints;
        if (ianalog!=NULL)
            ianalog->read(analogs);
        iencs->getEncoders(encs.data());

        for (int i=0; i<3; i++)
        {
            if (ianalog!=NULL)
                finger[i].getChainJoints(encs,analogs,joints);
            else
                finger[i].getChainJoints(encs,joints);
            finger[i].setAng(CTRL_DEG2RAD*joints);
        }

        for (int fng=0; fng<3; fng++)
        {
            deque<cv::Point> point_f;
            for (int i=-1; i<(int)finger[fng].getN(); i++)
            {
                Vector fc;
                igaze->get2DPixel(camSel,Ha*(i<0?finger[fng].getH0().getCol(3):
                                                 finger[fng].getH(i,true).getCol(3)),fc);
                point_f.push_front(cv::Point((int)fc[0],(int)fc[1]));
                cv::circle(img,point_f.front(),3,cv::Scalar(0,0,255),4);

                if (i>=0)
                {
                    cv::line(img,point_f.front(),point_f.back(),cv::Scalar(255,255,255),2);
                    point_f.pop_back();
                }
                else
                    cv::line(img,point_c,point_f.front(),cv::Scalar(255,0,0),2);
            }
        }

        imgOutPort.writeStrict();
        return true;
    }
Пример #28
0
    /* 
    * Configure function. Receive a previously initialized
    * resource finder object. Use it to configure your module.
    * Open port and attach it to message handler.
    */
    bool configure(yarp::os::ResourceFinder &rf)
    {
/*
        list.push_back("The market is open.");          // 4
        list.push_back("The orange is sweet.");         // 4
        list.push_back("I love to play guitar.");       // 5
        list.push_back("He is in the kitchen.");        // 5
        list.push_back("I can speak English.");         // 4
        list.push_back("He is going home.");            // 4
        list.push_back("Tom is a funny man.");          // 5
        list.push_back("I have three apples.");         // 4

        list.push_back("The music; was good.");         // 4
        list.push_back("I am not a robot.");            // 5
        list.push_back("She is very pretty.");          // 4
        list.push_back("This song is great.");          // 4
        list.push_back("My friend has a horse.");       // 5
        list.push_back("The apple tasted good.");       // 4
        list.push_back("I like red flowers.");          // 4
        list.push_back("My horse runs very fast.");     // 5

        list.push_back("I have a nice house.");         // 5
        list.push_back("Jill wants a doll.");           // 4
        list.push_back("I have a computer.");           // 4
        list.push_back("Tom is stronger than Dan.");    // 5
        list.push_back("We, sing; a song.");            // 4
        list.push_back("I was very happy yesterday.");  // 5
        list.push_back("He eats white bread.");         // 4
        list.push_back("Jack wants a toy.");            // 4

        list.push_back("She is in the shower.");        // 5
        list.push_back("The baby plays with toys.");    // 5
        list.push_back("She is a teacher.");            // 4
        list.push_back("I have a nice box.");           // 5
        list.push_back("You look very happy.");         // 4
        list.push_back("The baby fell asleep.");        // 4
        list.push_back("They are; my friends.");        // 4
        list.push_back("I went to school.");            // 4
*/

	english = 1;
	if(english)
	{
		list.push_back("Benvenuti.");
		list.push_back("Io sono AICAB.");
		list.push_back("Sono felice di vederti.");
		list.push_back("Ciao.");
		list.push_back("Come va?");
		list.push_back("Questa e casa mia.");
		list.push_back("Io sono un robot.");
		list.push_back("Ciao.");

/*		list.push_back("Hi, how are you?");
		list.push_back("Hello.");
		list.push_back("I'm looking at you.");
		list.push_back("How is it going?");
		list.push_back("Hello, I'm eye cub.");
		list.push_back("Welcome to my home.");
		list.push_back("Hi there.");
		list.push_back("Hello.");
*/
/*		list.push_back("We sing a song.");
		list.push_back("I was very happy yesterday.");
		list.push_back("I have a computer.");
		list.push_back("Jill wants a doll.");
		list.push_back("He eats white bread.");
		list.push_back("I love to play guitar.");
		list.push_back("He is in the kitchen.");
		list.push_back("She has a nice bike.");

		list.push_back("I went to school.");
		list.push_back("Tom is stronger than Dan.");
		list.push_back("She is a teacher.");
		list.push_back("I like red flowers.");
		list.push_back("The music was good.");
		list.push_back("The apple tasted good.");
		list.push_back("You look very happy.");
		list.push_back("I have three apples.");

		list.push_back("Jack wants a toy.");
		list.push_back("The baby plays with toys.");
		list.push_back("I have a nice box.");
		list.push_back("This song is great.");
		list.push_back("Tom is a funny man.");
		list.push_back("My friend has a horse.");
		list.push_back("The baby fell asleep.");
		list.push_back("I can speak English.");

		list.push_back("I am not a robot.");
		list.push_back("My horse runs very fast.");
		list.push_back("They are going home.");
		list.push_back("She is very pretty.");
		list.push_back("The market is open.");
		list.push_back("She is in the shower.");
		list.push_back("They are; my friends.");
		list.push_back("The apple is sweet.");*/
	}
	else
	{
		list.push_back("Il bar è aperto.");
		list.push_back("Mi piace molto ballare."); 
		list.push_back("Non bevo il caffè.");
		list.push_back("Marco ha tanti cani.");
		list.push_back("Il mare è calmo.");
		list.push_back("Il cane abbaia spesso.");
		list.push_back("Sono allergico al latte.");
		list.push_back("La sedia è in camera.");

		list.push_back("La luce è rossa.");
		list.push_back("Lui ha sempre ragione.");
		list.push_back("Giovedí scorso era festa.");
		list.push_back("Oggi splende il sole.");
		list.push_back("Ho comprato il pane.");
		list.push_back("Guido tutti i giorni.");
		list.push_back("Vado spesso al mare.");
		list.push_back("La notte è buia.");

		list.push_back("Mi piace il gelato.");
		list.push_back("Devo scrivere un tema.");
		list.push_back("Lei ama cucire.");
		list.push_back("Io ballo la polka.");
		list.push_back("Gino canta molto bene.");
		list.push_back("Vado a dormire presto.");
		list.push_back("La lettera è firmata.");
		list.push_back("Gioco spesso a carte.");

		list.push_back("Mi piace l'opera.");
		list.push_back("Ho mangiato le mele.");
		list.push_back("Sua nonna sta bene.");
		list.push_back("Ho una maglia blu.");
		list.push_back("Il piatto è sul tavolo.");
		list.push_back("La giornata è piovosa.");
		list.push_back("Il film dura due ore.");
		list.push_back("La bottiglia è piena.");

	}
        gazeCount=0;
        speech_counter=0;
		online=1;
        prev = Time::now();
        waittime=5.0;
        
        withgaze=1;
        prevyes=0;
        state=1;
        order=2;
        initstate=1;
        prevStr = "quiet";
        donespeaking=1;
        wordnumber=0;
	charnumber=0;
        prevgaze=0;
        facecounter=0;
        gazelength=0.0;
	gazelen=0.0;
        motorson=1;
        firstspeech=0;

        if(motorson==1)
        {
            Property optGaze("(device gazecontrollerclient)");
            optGaze.put("remote","/iKinGazeCtrl");
            optGaze.put("local","/icub_eyetrack/gaze");
                printf("\nHello.\n");
            if (!clientGaze.open(optGaze))
            {
                printf("\nGAZE FAILED\n");
                return false;
            }
            else
                printf("\nGAZE OPEN\n");

            clientGaze.view(igaze);
	    igaze->blockNeckRoll(0.0);
            igaze->setNeckTrajTime(0.8);
            igaze->setEyesTrajTime(0.4);
	    yarp::sig::Vector azelr(3);
///	    azelr[0] = -30.0;
///	    azelr[1] = 0.0;
	    azelr[0] = 0.0;
	    azelr[1] = 20.0;
	    azelr[2] = 0.0;
            igaze->lookAtAbsAngles(azelr);
        }
        handlerPort.open("/dictationcontroller");
        gazeIn.open( "/dictationcontroller/gaze:i" );
        speechOut.open( "/dictationcontroller/speech:o" );
        logOut.open( "/dictationcontroller/log:o" );
        attach(handlerPort);
        Network::connect("/dlibgazer/out", "/dictationcontroller/gaze:i");
        Network::connect("/dictationcontroller/speech:o", "/iSpeak");
        speechStatusPort.open("/dictationcontroller/iSpeakrpc");
        Network::connect("/dictationcontroller/iSpeakrpc", "/iSpeak/rpc");

	frame_counter=0;
        cout<<"Done configuring!" << endl;
        return true;
    }
Пример #29
0
    bool updateModule()
    {
        now = Time::now();
        double dT = now - prev;

        Bottle ispeakreply, ispeakcmd;
        ispeakcmd.add("stat");
        speechStatusPort.write(ispeakcmd, ispeakreply);
        if (prevStr=="quiet" && ispeakreply.toString()=="speaking")         // starting to speak
        {
            if(firstspeech)     // there was no previous writing
            {
                gazelength=0.0;
             //   prevgazetime = Time::now();
            }
            else  // sum up previous writing
            {
                if(prevgaze)
                {
                    gazelength = gazelength + Time::now() - gazestart;
                    gazestart = Time::now();
//                    gazelength = gazelength + Time::now() - prevgazetime;
//                    prevgazetime = Time::now();
                }
                stringstream logstream1;
                logstream1 << "Gaze time is: " << gazelength;
                log(now, logstream1.str());
                gazelength=0.0;
            }
            log(now,"Starting to speak.");
            firstspeech=0;
        }
        if (prevStr=="speaking" && ispeakreply.toString()=="quiet")         // starting to write
        {
            if(prevgaze)
            {
                gazelength = gazelength + Time::now() - gazestart;
                gazestart = Time::now();
                //                gazelength = gazelength + Time::now() - prevgazetime;
            }
            stringstream logstream1;
            logstream1 << "Gaze time is: " << gazelength;
            log(now, logstream1.str());
//            log(now,"Done speaking.");
            log(now,"Starting to write.");
            gazelength=0.0;
        }

        Bottle* out=gazeIn.read(false);
        Bottle mutgaze;
        if(out!=NULL)                               // is there gaze information
        {
            //cerr << out->toString() << endl;
            Bottle& faces = out->findGroup("faces");
            //cerr << faces.toString() << endl;
            Bottle& face = faces.findGroup("face");
            mutgaze = face.findGroup("mutualgaze");
            Bottle& facerect = face.findGroup("facerect");
            int facex = facerect.get(1).asInt();
            int facey = facerect.get(2).asInt();
            int faceheight = facerect.get(3).asInt();
            int facewidth = facerect.get(4).asInt();
            int facecenterx = facex + facewidth+10;
            int facecentery = facey + faceheight;

            if(faceheight>0)
            {
                facecounter++;
                if(facecounter==30)                // the robot updates its target of gaze every second (facecounter==100)
                {
                    facecounter=0;
                    Vector face_center;
                    face_center.resize(2);
                    face_center[0]=facecenterx;
                    face_center[1]=facecentery-60;
                    if(frame_counter % 20 == 0)
                    {
                        if(motorson)
                        {
				cout << "Face center: " << face_center[0] << " " << face_center[1] << endl;
                            igaze->lookAtMonoPixel(0,face_center);
                        }
                    }
                }
            }
        }

        if(state==1 || state==4 || state==7 || state==10)
        {
            if(dT>13.0 && initstate==1)
            {
	    	if(english)
		{
			saythis("Hello I'm eye cub.");
                	initstate=1;
                	prev = Time::now();
                	state++;
                	gazelength=0.0;
		}
///			saythis("Hello I'm eye cub. Today we will perform a dictation.");
		else
			saythis("Ciao, sono aicab. Oggi faremo un dettato.");
                if(state==1 || state==10)
                {
                    if(order==1)
                    {
			if(english)
                        	
                        	saythis("This is procedure Alpha.");
			
			else			
			        saythis("Questa è la procedura Alpha.");
                        withgaze=0;
                    }
                    else
                    {
			if(english)
                        	saythis("This is procedure Beta.");
			else                        
				saythis("Questa è la procedura Beta.");
                        withgaze=1;
                    }

                }
                if(state==4 || state == 7)
                {
                    if(order==1)
                    {
			if(english)
	                        saythis("This is procedure Beta.");
			else
				saythis("Questa è la procedura Beta.");
                        withgaze=1;
                    }
                    else
                    {
                        if(english)
				saythis("This is procedure Alpha.");
			else
			        saythis("Questa è la procedura Alpha.");
	
                        withgaze=0;
                    }
                }
                initstate++;
                gazelength=0.0;
            }
            if(dT>22.0 && initstate==2)
            {
/*		if(english)
                	saythis("Please; write on the board the following sentences.");
		else                
			saythis("Per favore scriva sulla lavagna le seguenti frasi."); */
		if(english)
			saythis("Please, take your pen and be ready to write down the following sentences.");
		else
			saythis("Per favore, prendi il pennarello e scrivi le frasi che ti detto.");
                initstate++;
                gazelength=0.0;
            }
            if(dT>32.0 && initstate==3)
            {
                initstate=1;
                prev = Time::now();
                state++;
                gazelength=0.0;
            }
            firstspeech=1;
        }

        if(state==3 || state==6 || state==9)        // waiting for experimenter's button press to continue with next stage
        {
            log(now, "Waiting.");
            std::cin.clear();
            std::fflush(stdin);
            std::cin.get ();    // get c-string
            log(now, "Done waiting.");
            prev = Time::now();
            state++;
            gazelength=0.0;
	    yarp::sig::Vector azelr(3);
///	    azelr[0] = -30.0;
///	    azelr[1] = 0.0;
	    azelr[0] = 0.0;
	    azelr[1] = 20.0;
	    azelr[2] = 0.0;
            igaze->lookAtAbsAngles(azelr);
        }

        if(state==2 || state==5 || state==8 || state==11)
        {
            if(withgaze)                            // procedure Beta - with gaze
            {
                if(prevyes || initstate==1)
                {
                    initstate++;

/*                    stringstream logstream2;
                    logstream2 << "Gaze time is: " << gazelength;
                    log(now, logstream2.str());
*/
                    std::string hullo;
                    hullo = list.back();
                    list.pop_back();
                    saythis(hullo);
///		    saythis("Hi. How are you?");
                    prev = Time::now();
                    prevyes=0;
                }
            }
            else                                // procedure Alpha - without gaze
            {
                if (prevStr=="speaking" && ispeakreply.toString()=="quiet")
                {
                    donespeaking=1;
//                    waittime = 2.6*(wordnumber);
//                    waittime = 2.1*(wordnumber);
		    waittime = 0.55 * charnumber;          // 0.55 = 2.4
                    stringstream logstream;
                    logstream << "Wait time is: " << waittime;
                    log(now, logstream.str());
                    prev = Time::now();
                    dT = now - prev;
                }
                std::string hullo;
                if((donespeaking && dT>waittime) || initstate==1)
                {
                    initstate++;

/*                    stringstream logstream1;
                    logstream1 << "Gaze time is: " << gazelength;
                    log(now, logstream1.str());
*/

                    if(initstate==10)
                     {
                         state++;

                         stringstream logstream1;
                         logstream1 << "Gaze time is: " << gazelength;
                         log(now, logstream1.str());

                         gazelength=0.0;
			 if(english)
                         	saythis("End of procedure Alpha.");
			 else
				saythis("Fine della procedura Alpha.");
                         initstate=1;
                         prev = Time::now();
                     }
                     else
                     {
                        hullo = list.back();
                        wordnumber = std::count(hullo.begin(), hullo.end(), ' ')+1;
 			charnumber = hullo.length() - std::count(hullo.begin(), hullo.end(), '.');
                        list.pop_back();
                        saythis(hullo);
                        donespeaking=0;
                     }
                }
            }

            if(out!=NULL)                   // if message received from gaze module
            {
                int ismutualgaze = mutgaze.get(1).asInt();
                if(ismutualgaze==1 && prevgaze==0)
                {
                    log(now, "glance on");
                    gazestart = Time::now();
                }
                if(ismutualgaze==0 && prevgaze==1)
                {

                    log(now, "glance off");
                    gazelength = gazelength + Time::now() - gazestart;
                }
                prevgaze=ismutualgaze;
                if (ismutualgaze)
                {
                    if(withgaze && prevyes==0 && dT>3.0)
                    {
                        gazeCount++;
//                        if(gazeCount==10 && prevyes==0)
			gazelen = Time::now() - gazestart;
                        if(gazelen>0.15 && prevyes==0)
                        {
//                            log(now, "Gaze event.");
                            stringstream logstream1;
                            logstream1 << "Trigger gaze length: " << gazelen;
                            log(now, logstream1.str());

                            gazeCount=0;
                            prevyes = 1;
                            if(initstate==9)
                            {
//                                state++;

                                stringstream logstream1;
                                logstream1 << "Gaze time is: " << gazelength;
                                log(now, logstream1.str());

                                gazelength=0.0;
                                initstate=1;
				cout << "Blah" << endl; 
		list.push_back("Benvenuti.");
		list.push_back("Io sono AICAB.");
		list.push_back("Sono felice di vederti.");
		list.push_back("Ciao.");
		list.push_back("Come va?");
		list.push_back("Questa e casa mia.");
		list.push_back("Io sono un robot.");
		list.push_back("Ciao.");
/*				if(english)
                                	saythis("End of procedure Beta");
				else
					saythis("Fine della procedura Beta.");
                                prev = Time::now();
                                if(order==1)
                                    withgaze=0;
                                else
                                    withgaze=1;*/
                            }
                        }
                    }
                }
            }

        }
        prevStr = ispeakreply.toString();
        return true;
    }
Пример #30
0
	virtual void run(){
		tmp = command;
		(*command)[0] = -60*(gsl_rng_uniform(r));
		(*command)[1] = 100*(gsl_rng_uniform(r));
		(*command)[2] = -35 + 95*(gsl_rng_uniform(r));
		(*command)[3] = 10 + 90*(gsl_rng_uniform(r));
		printf("%.1lf %.1lf %.1lf %.1lf\n", (*command)[0], (*command)[1], (*command)[2], (*command)[3]);
		//above 0 doesn't seem to be safe for joint 0
		if ((*command)[0] > 0 || (*command)[0] < -60){
			(*command)[0] = (*tmp)[0];
		}
		if ((*command)[1] > 100 || (*command)[1] < -0){
			(*command)[1] = (*tmp)[1];
		}
		if ((*command)[2] > 60 || (*command)[2] < -35){
			(*command)[2] = (*tmp)[2];
		}
		if ((*command)[3] > 100 || (*command)[3] < 10){
			(*command)[3] = (*tmp)[3];
		}
		//use fwd kin to find end effector position
		Bottle plan, pred;
		for (int i = 0; i < nj; i++){
			plan.add((*command)[i]);
		}
		armPlan->write(plan);
		armPred->read(pred);
		Vector commandCart(3);
		for (int i = 0; i < 3; i++){
			commandCart[i] = pred.get(i).asDouble();
		}
		printf("Cartesian safety check\n");
		double rad = sqrt(commandCart[0]*commandCart[0]+commandCart[1]*commandCart[1]);
		// safety radius back to 30 cm
		if (rad > 0.3){
			pos->positionMove(command->data());
			bool done = false;
			while(!done){
				pos->checkMotionDone(&done);
				Time::delay(0.1);
			}
			printf("Moved arm to new location\n");
			Vector &armJ = armLocJ->prepare();
			Vector encoders(nj);
			enc->getEncoders(encoders.data());
			armJ = encoders;
			Vector noisyArm(3);
			for(int i = 0; i < 3; i++){
				//noisyArm[i] = commandCart[i] + 0.01*(2*gsl_rng_uniform(r)-1);
				//sanity check
				noisyArm[i] = commandCart[i] + 0.005*(2*gsl_rng_uniform(r)-1);
			}
			//insert here:
			//read off peak saliences
			//fixate there
			//calculate cartesian value, compare to real cart. value of arm


			printf("Looking at arm\n");
			igaze->lookAtFixationPoint(noisyArm);
			done = false;
			while(!done){
				igaze->checkMotionDone(&done);
				Time::delay(0.5);
			}
			//igaze->waitMotionDone(0.1,30);

			printf("Saw arm\n");

			Vector &headAng = headLoc->prepare();
			igaze->getAngles(headAng);
			Bottle tStamp;
			tStamp.clear();
			tStamp.add(Time::now());
			headLoc->setEnvelope(tStamp);


			headLoc->write();
			armLocJ->write();
			headLoc->unprepare();
			armLocJ->unprepare();

		}
		else{
			printf("Self collision detected!\n");
		}
	}