Example #1
0
fakestdbool_t feat_manage_skin_data(FEAT_boardnumber_t boardnum, eOprotID32_t id32, void *arrayofcandata)
{   
    static int error = 0;
    IethResource* skin;
    ethFeatType_t type;

    if (_interface2ethManager == NULL)
        return fakestdbool_false;

    bool ret = _interface2ethManager->getHandle(boardnum, id32, &skin, &type);

    if(!ret)
    {
        yDebug("EMS callback was unable to get access to the embObjSkin");
        return fakestdbool_false;
    }

    if(type != ethFeatType_Skin)
    {   // the ethmanager does not know this object yet or the dynamic cast failed because it is not an embObjSkin
        char nvinfo[128];
        eoprot_ID2information(id32, nvinfo, sizeof(nvinfo));
        if(0 == (error%1000) )
            yWarning() << "feat_manage_skin_data() received a request from BOARD" << boardnum << "with ID:" << nvinfo << "but no class was jet instatiated for it";

        error++;
        return fakestdbool_false;
    }
    else if(false == skin->initialised())
    {   // the ethmanager has the object, but the device was not fully opened yet. cannot use it
        return fakestdbool_false;
    }
    else
    {   // the object exists and is completed: it can be used
        skin->update(id32, yarp::os::Time::now(), (void *)arrayofcandata);
    }

    return fakestdbool_true;
}
Example #2
0
bool ears::configure(yarp::os::ResourceFinder &rf)
{
    string moduleName = rf.check("name", Value("ears")).asString().c_str();
    setName(moduleName.c_str());

    yInfo() << moduleName << " : finding configuration files...";
    period = rf.check("period", Value(0.1)).asDouble();

    //Create an iCub Client and check that all dependencies are here before starting
    bool isRFVerbose = false;
    iCub = new ICubClient(moduleName, "ears", "client.ini", isRFVerbose);
    iCub->opc->isVerbose = false;
    if (!iCub->connect())
    {
        yInfo() << " iCubClient : Some dependencies are not running...";
        Time::delay(1.0);
    }
    rpc.open(("/" + moduleName + "/rpc").c_str());
    attach(rpc);

    portToBehavior.open("/" + moduleName + "/behavior:o");
    while (!Network::connect(portToBehavior.getName(),"/BehaviorManager/trigger:i ")) {
        yWarning() << " Behavior is not reachable";
        yarp::os::Time::delay(0.5);
    }

    portTarget.open("/" + moduleName + "/target:o");

    MainGrammar = rf.findFileByName(rf.check("MainGrammar", Value("MainGrammar.xml")).toString());

    bShouldListen = true;

    yInfo() << "\n \n" << "----------------------------------------------" << "\n \n" << moduleName << " ready ! \n \n ";
    
    return true;
}
bool JointTorqueControl::setTorqueOffset(int j, double v)
{
    //yarp::os::LockGuard(this->controlMutex);
    yWarning("JointTorqueControl::setTorqueOffset not implemented");
    return false;
}
bool JointTorqueControl::enableTorquePid(int j)
{
    //yarp::os::LockGuard(this->controlMutex);
    yWarning("JointTorqueControl::enableTorquePid not implemented");
    return false;
}
Example #5
0
bool SpeechRecognizerModule::updateModule()
{
    cout<<".";
    USES_CONVERSION;
    CSpEvent event;

    // Process all of the recognition events
    while (event.GetFrom(m_cpRecoCtxt) == S_OK)
    {
        switch (event.eEventId)
        {
            case SPEI_SOUND_START:
                {
                    m_bInSound = TRUE;
				    yInfo() << "Sound in...";
                    break;
                }

            case SPEI_SOUND_END:
                if (m_bInSound)
                {
                    m_bInSound = FALSE;
                    if (!m_bGotReco)
                    {
                        // The sound has started and ended, 
                        // but the engine has not succeeded in recognizing anything
						yWarning() << "Chunk of sound detected: Recognition is null";
                    }
                    m_bGotReco = FALSE;
                }
                break;

            case SPEI_RECOGNITION:
                // There may be multiple recognition results, so get all of them
                {
                    m_bGotReco = TRUE;
                    static const WCHAR wszUnrecognized[] = L"<Unrecognized>";

                    CSpDynamicString dstrText;
                    if (SUCCEEDED(event.RecoResult()->GetText(SP_GETWHOLEPHRASE, SP_GETWHOLEPHRASE, TRUE, 
                                                            &dstrText, NULL)))
                    {
						SPPHRASE* pPhrase = NULL;
						bool successGetPhrase = SUCCEEDED(event.RecoResult()->GetPhrase(&pPhrase));  
                        int confidence=pPhrase->Rule.Confidence;
						
						string fullSentence = ws2s(dstrText);
						yInfo() <<"Recognized "<<fullSentence<<" with confidence "<<confidence ;
                        

                        //Send over yarp
                        Bottle bOut;
                        bOut.addString(fullSentence.c_str());
                        bOut.addInt(confidence);
                        m_portContinuousRecognition.write(bOut);

                        //Treat the semantic
                        if (successGetPhrase)
                        {
                            //Send sound
                            if (m_forwardSound)
                            {
                                yarp::sig::Sound& rawSnd = m_portSound.prepare();
                                rawSnd = toSound(event.RecoResult());
                                m_portSound.write();
                            }

                            //--------------------------------------------------- 1. 1st subBottle : raw Sentence -----------------------------------------------//
                            int wordCount = pPhrase->Rule.ulCountOfElements;
                            string rawPhrase = "";
                            for(int i=0; i< wordCount; i++){
                                rawPhrase += ws2s(pPhrase->pElements[i].pszDisplayText) + " ";
                                yDebug() << "word : " <<  ws2s(pPhrase->pElements[i].pszDisplayText) ;
                            }
                            yInfo() <<"Raw sentence: "<<rawPhrase ;
                            if (&pPhrase->Rule == NULL)
                            {
                                yError() <<"Cannot parse the sentence!";
                                return true;
                            }
                            //--------------------------------------------------- 2. 2nd subottle : Word/Role ---------------------------------------------------//
                            Bottle bOutGrammar;
                            bOutGrammar.addString(rawPhrase.c_str());
                            bOutGrammar.addList()=toBottle(pPhrase,&pPhrase->Rule);
							yInfo() << "Sending semantic bottle : " << bOutGrammar.toString().c_str() ;
                            m_portContinuousRecognitionGrammar.write(bOutGrammar);
                            ::CoTaskMemFree(pPhrase);
                        }

                        if (m_useTalkBack)
                            say(fullSentence);
                    }
                }
                break;
        }
    }
    return true;
}
void embObjPrintWarning(char *string)
{
    yWarning("%s", string);
}
Example #7
0
bool AgentDetector::configure(ResourceFinder &rf)
{
    period = rf.check("period",Value(0.03)).asDouble();
    int verbosity=rf.check("verbosity",Value(0)).asInt();
    string name=rf.check("name",Value("agentDetector")).asString().c_str();
    useFaceRecognition = rf.check("useFaceRecognition");
    handleMultiplePlayers = rf.check("multiplePlayers");
    isMounted = !rf.check("isFixed");
    string show=rf.check("showImages",Value("false")).asString().c_str();
    showMode=rf.check("showMode",Value("rgb+depth+skeleton+players")).asString().c_str();
    dThresholdDisparition = rf.check("dThresholdDisparition",Value("3.0")).asDouble();    

    // initialise timing in case of misrecognition
    dTimingLastApparition = clock();

    //Open the OPC Client
    partner_default_name=rf.check("partner_default_name",Value("partner")).asString().c_str();

    string opcName=rf.check("opc",Value("OPC")).asString().c_str();
    opc = new OPCClient(name);
    dSince = 0.0;
    while (!opc->connect(opcName))
    {
        cout<<"Waiting connection to OPC..."<<endl;
        Time::delay(1.0);
    }
    opc->checkout();

    list<shared_ptr<Entity>> entityList = opc->EntitiesCacheCopy();
    for(auto e : entityList) {
        if(e->entity_type() == "agent" && e->name() != "icub") {
            partner_default_name = e->name();
        }
    }

    partner = opc->addOrRetrieveEntity<Agent>(partner_default_name);
    partner->m_present = 0.0;
    opc->commit(partner);

    //Retrieve the calibration matrix from RFH
    string rfhName=rf.check("rfh",Value("referenceFrameHandler")).asString().c_str();
    string rfhLocal = "/";
    rfhLocal+=name;
    rfhLocal+="/rfh:o";
    rfh.open(rfhLocal.c_str());

    string rfhRemote = "/";
    rfhRemote += rfhName ;
    rfhRemote += "/rpc";

    while (!Network::connect(rfhLocal.c_str(),rfhRemote.c_str()))
    {
        cout<<"Waiting connection to RFH..."<<endl;
        Time::delay(1.0);
    }

    isCalibrated=false;
    if(!checkCalibration())
        yWarning() << " ========================= KINECT NEED TO BE CALIBRATED ============================" ;

    string clientName = name;
    clientName += "/kinect";

    outputSkeletonPort.open(("/"+name+"/skeleton:o").c_str());
    depthPort.open( ("/"+clientName+"/depthPort:o").c_str());
    imagePort.open(("/"+clientName+"/imagePort:o").c_str());
    playersPort.open(("/"+clientName+"/playersPort:o").c_str());
    skeletonPort.open(("/"+clientName+"/skeletonPort:o").c_str());

    agentLocOutPort.open(("/"+clientName+"/agentLoc:o").c_str());

    Property options;
    options.put("carrier","tcp");
    options.put("remote","kinectServer");
    options.put("local",clientName.c_str());
    options.put("verbosity",verbosity);

    if (!client.open(options))
        return false;

    Property opt;
    client.getInfo(opt);

    showImages=(show=="true")?true:false;
    int xPos = rf.check("x",Value(10)).asInt();
    int yPos = rf.check("y",Value(10)).asInt();

    int img_width=opt.find("img_width").asInt();
    int img_height=opt.find("img_height").asInt();
    int depth_width=opt.find("depth_width").asInt();
    int depth_height=opt.find("depth_height").asInt();

    rgb.resize(img_width, img_height);
    depth.resize(depth_width,depth_height);
    depthToDisplay.resize(depth_width,depth_height);
    playersImage.resize(depth_width,depth_height);
    skeletonImage.resize(depth_width,depth_height);

    depthTmp=cvCreateImage(cvSize(depth_width,depth_height),IPL_DEPTH_32F,1);
    rgbTmp=cvCreateImage(cvSize(img_width,img_height),IPL_DEPTH_8U,3);

    if (showImages)
    {
        vector<bool> alreadyOpen(4,false); 
        string mode=showMode;
        string submode;
        while (!mode.empty())
        {
            if (showImageParser(mode,submode))
            {            
                if (submode=="rgb")
                {
                    if (!alreadyOpen[0])
                    {
                        int x_rgb=rf.check("x-rgb",Value(xPos)).asInt(); 
                        int y_rgb=rf.check("y-rgb",Value(yPos)).asInt();
                        yInfo("\"rgb\" window selected in (%d,%d)",x_rgb,y_rgb);

                        cvNamedWindow("rgb",CV_WINDOW_AUTOSIZE);
                        cvMoveWindow("rgb",x_rgb,y_rgb);
                        alreadyOpen[0]=true;
                    }
                    else
                        yWarning("\"rgb\" window already open");
                }
                else if (submode=="depth")
                {
                    if (!alreadyOpen[1])
                    {
                        int x_depth=rf.check("x-depth",Value(xPos+300)).asInt();
                        int y_depth=rf.check("y-depth",Value(yPos)).asInt();
                        yInfo("\"depth\" window selected in (%d,%d)",x_depth,y_depth);

                        cvNamedWindow("depth",CV_WINDOW_AUTOSIZE);
                        cvMoveWindow("depth",x_depth,y_depth);
                        cvSetMouseCallback("depth",AgentDetector::click_callback,
                                           (void*)depthToDisplay.getIplImage());
                        alreadyOpen[1]=true;
                    }
                    else
                        yWarning("\"depth\" window already open");
                }
                else if (submode=="skeleton")
                {
                    if (!alreadyOpen[2])
                    {
                        int x_skeleton=rf.check("x-skeleton",Value(xPos)).asInt(); 
                        int y_skeleton=rf.check("y-skeleton",Value(yPos+300)).asInt();
                        yInfo("\"skeleton\" window selected in (%d,%d)",x_skeleton,y_skeleton);

                        cvNamedWindow("skeleton",CV_WINDOW_AUTOSIZE);
                        cvMoveWindow("skeleton",x_skeleton,y_skeleton);
                        alreadyOpen[2]=true;
                    }
                    else
                        yWarning("\"skeleton\" window already open");
                }
                else if (submode=="players")
                {
                    if (!alreadyOpen[3])
                    {
                        int x_players=rf.check("x-players",Value(xPos+300)).asInt();
                        int y_players=rf.check("y-players",Value(yPos+300)).asInt();
                        yInfo("\"players\" window selected in (%d,%d)",x_players,y_players);

                        cvNamedWindow("players",CV_WINDOW_AUTOSIZE);
                        cvMoveWindow("players",x_players,y_players);
                        alreadyOpen[3]=true;
                    }
                    else
                        yWarning("\"players\" window already open");
                }
                else
                    yError("unrecognized show mode!");
            }
        }
    }

    //Initialise Face Recognizer
    if (useFaceRecognition)
    {    
        string faceRecognName = "/";
        faceRecognName+=name;
        faceRecognName+="/faceRecognizer:rpc";
        faceRecognizerModule.open(faceRecognName.c_str());

        string faceRecognResName = "/";
        faceRecognResName+=name;
        faceRecognResName+="/faceRecognizer/results:i";
        faceRecognizerModuleResults.open(faceRecognResName.c_str());

        // WARNING: Do not use getContextPath if that ever should be used again!
        //recognizer->loadTrainingSet(rf.getContextPath().c_str());
        //recognizer->Train();
        //cout<<"Loading recognizer: "
        //    <<recognizer->loadRecognizer("defaultFaces.tpc")<<endl;
    }
    else
    {
        //Load the skeletonPatterns
    }

    string rpcName = "/";
    rpcName+=name;
    rpcName+="/rpc";
    rpc.open(rpcName.c_str());
    attach(rpc);

    pointsCnt=0;
    return true;
}
Example #8
0
    void run()
    {
        mutex.wait();
        double current_time = yarp::os::Time::now();
        if (actions.current_status==ACTION_IDLE)
        {
            // do nothing
        }
        else if (actions.current_status == ACTION_STOP)
        {
            int nj = actions.action_vector[0].get_n_joints();
            actions.current_status = ACTION_IDLE;
        }
        else if (actions.current_status == ACTION_RESET)
        {
            int nj = actions.action_vector[0].get_n_joints();
            for (int j = 0; j < nj; j++)
            {
                driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION);
            }
            actions.current_status = ACTION_IDLE;
        }
        else if (actions.current_status == ACTION_RUNNING)
        {
            size_t last_action = actions.action_vector.size();
            if (last_action == 0)
            {
                yError("sequence empty!");
                actions.current_status=ACTION_RESET;
                return;
            }

            //if it's not the last action
            if (actions.current_action < last_action-1)
            {
                //if enough time is passed from the previous action
                //double duration = actions.action_vector[actions.current_action+1].time -
                //                  actions.action_vector[actions.current_action].time;
                double elapsed_time = actions.action_vector[actions.current_action].time;
                if (current_time-start_time > elapsed_time)
                {
                    //printf("%d %+6.6f \n", actions.current_action, current_time-start_time);
                    //last_time = current_time;
                    actions.current_action++;
                    compute_and_send_command(actions.current_action);
                    yDebug("Executing action: %4d/%4d", actions.current_action , last_action);
                    //printf("EXECUTING %d, elapsed_time:%.5f requested_time:%.5f\n", actions.current_action, current_time-last_time, duration);
                }
                else
                {
                    //printf("WAITING %d, elapsed_time:%.5f requested_time:%.5f\n", actions.current_action, current_time-last_time, duration);
                }
            }
            else
            {
                if (actions.forever==false)
                {
                    yInfo("sequence complete");
                    actions.current_status=ACTION_RESET;
                }
                else
                {
                    yInfo("sequence complete, restarting");
                    actions.current_action=0;
                    start_time = yarp::os::Time::now();
                }
            }
        }
        else if (actions.current_status==ACTION_START)
        {
            if (actions.action_vector.size()>0)
            {
                double *ll = actions.action_vector[0].q_joints;
                int nj = actions.action_vector[0].get_n_joints();
                for (int j = 0; j < nj; j++)
                {
                    driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION);
                }
                yarp::os::Time::delay(0.1);
                for (int j = 0; j < nj; j++)
                {
                    driver->ipos_ll->positionMove(j, ll[j]);
                }
                
                yInfo() << "going to to home position";
                double enc[50];
                int loops = 100;
                bool check = true;
                do
                {
                    check = true;
                    for (int j = 0; j < nj; j++)
                    {
                        driver->ienc_ll->getEncoder(j, &enc[j]);
                        double err = fabs(enc[j] - ll[j]);
                        check &= (err < 2.0);
                    }
                    yarp::os::Time::delay(0.1);
                    loops--;
                } while (!check && loops>0);

                if (check)
                {
                    yInfo() << "done";

                    for (int j = 0; j <nj; j++)
                    {
                        driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION_DIRECT);
                    }
                    yarp::os::Time::delay(0.1);
                    compute_and_send_command(0);

                    actions.current_status = ACTION_RUNNING;
                    start_time = yarp::os::Time::now();
                }
                else
                {
                    yError() << "unable to reach start position!";
                    if (0) 
                    {
                        //very strict behavior! if your are controlling fingers, you will probably end here
                        actions.current_status = ACTION_STOP;
                    }
                    else
                    {
                        for (int j = 0; j <nj; j++)
                        {
                            driver->icmd_ll->setControlMode(j, VOCAB_CM_POSITION_DIRECT);
                        }
                        yarp::os::Time::delay(0.1);
                        compute_and_send_command(0);

                        actions.current_status = ACTION_RUNNING;
                        start_time = yarp::os::Time::now();
                    }
                }
            }
            else
            {
                yWarning("no sequence in memory");
                actions.current_status=ACTION_STOP;
            }
        }
        else
        {
            yError() << "unknown current_status";
        }
        mutex.post();
    }
Example #9
0
bool ControlThread::threadInit()
{
    //open the joystick port
    port_joystick_control.open(localName + "/joystick:i");

        //try to connect to joystickCtrl output
        if (rf.check("joystick_connect"))
        {
            int joystick_trials = 0; 
            do
            {
                yarp::os::Time::delay(1.0);
                if (yarp::os::Network::connect("/joystickCtrl:o", localName+"/joystick:i"))
                    {
                        yInfo("Joystick has been automatically connected");
                        break;
                    }
                else
                    {
                        yWarning("Unable to find the joystick port, retrying (%d/5)...",joystick_trials);
                        joystick_trials++;
                    }

                if (joystick_trials>=5)
                    {
                        yError("Unable to find the joystick port, giving up");
                        break;
                    }
            }
            while (1);
        }
        
    // open the control board driver
    yInfo("Opening the motors interface...\n");
    int trials = 0;
    double start_time = yarp::os::Time::now();
    Property control_board_options("(device remote_controlboard)");
    control_board_options.put("remote", remoteName.c_str());
    control_board_options.put("local", localName.c_str());

    do
    {
        double current_time = yarp::os::Time::now();

        //remove previously existing drivers
        if (control_board_driver)
        {
            delete control_board_driver;
            control_board_driver = 0;
        }

        //creates the new device driver
        control_board_driver = new PolyDriver(control_board_options);
        bool connected = control_board_driver->isValid();

        //check if the driver is connected
        if (connected) break;

        //check if the timeout (10s) is expired
        if (current_time - start_time > 10.0)
        {
            yError("It is not possible to instantiate the device driver. I tried %d times!", trials);
            if (control_board_driver)
            {
                delete control_board_driver;
                control_board_driver = 0;
            }
            return false;
        }

        yarp::os::Time::delay(0.5);
        trials++;
        yWarning("Unable to connect the device driver, trying again...");
    } while (true);

    control_board_driver->view(iDir);
    control_board_driver->view(iVel);
    control_board_driver->view(iEnc);
    control_board_driver->view(iPos);
    control_board_driver->view(iCmd);
    if (iDir == 0 || iEnc == 0 || iVel == 0 || iCmd == 0)
    {
        yError() << "Failed to open interfaces";
        return false;
    }

    yarp::os::Time::delay(1.0);
    iEnc->getEncoder(0, &enc_init_elong);
    iEnc->getEncoder(1, &enc_init_roll);
    iEnc->getEncoder(2, &enc_init_pitch);
    iVel ->setRefAcceleration (0,10000000);
    iVel ->setRefAcceleration (1,10000000);
    iVel ->setRefAcceleration (2,10000000);

    yDebug() << "Initial vals" << enc_init_elong << enc_init_roll << enc_init_pitch;
    return true;
}
bool PlannerThread::checkFailure()
{
    loadObjs();
    string line;
    int horizon;
    vector<string> config_data;
    configFile.open(configFileName.c_str());
    if (!configFile.is_open())
    {
        yWarning("unable to open config file!");
        return false;
    }
    while (getline(configFile, line)){
        config_data.push_back(line);
    }
    for (int i=0; i<config_data.size(); ++i){
        if (config_data[i].find("[PRADA]") != std::string::npos){
            horizon = 5;
            break;
        }
    }
    configFile.close();
    vector<string> not_comp_goals, fail_obj, aux_fail_obj;
    string temp_str;
    if (horizon > 15){
        not_comp_goals.clear();
        for (int t = 0; t < goal.size(); ++t){
            if (find_element(state, goal[t]) == 0){
                not_comp_goals.push_back(goal[t]);
            }
        }
        temp_str = "";
        for (int i = 0; i < not_comp_goals.size(); ++i){
            temp_str = temp_str + not_comp_goals[i] + " ";
        }
        for (int i = 0; i < object_IDs.size(); ++i){
            if (temp_str.find(object_IDs[i][0]) != std::string::npos){
                fail_obj.push_back(object_IDs[i][0]);
            }
        }
        for (int u = 0; u < fail_obj.size(); ++u){
            if (fail_obj[u] != "11" && fail_obj[u] != "12" && find_element(toolhandle,fail_obj[u]) == 0){
                for (int t = 0; t < object_IDs.size(); ++t){
                    if (find_element(object_IDs[t], fail_obj[u]) == 1){
                        aux_fail_obj.push_back(object_IDs[t][1]);
                        break;
                    }
                }
            }
        }
        Bottle& prax_bottle_out = prax_yarp.prepare();
        prax_bottle_out.clear();
        prax_bottle_out.addString("FAIL");
        for (int u = 0; u < aux_fail_obj.size(); ++u){
            for (int inde = 0; inde < object_IDs.size(); ++inde){
                if (object_IDs[inde][0] == aux_fail_obj[u]){
                    if (object_IDs[inde][1] != "rake" && object_IDs[inde][1] != "stick" && object_IDs[inde][1] != "left" && object_IDs[inde][1] != "right"){
                        prax_bottle_out.addString(object_IDs[inde][1]);
                    }
                }
            }
        }
        yInfo(" %s", prax_bottle_out.toString().c_str());
        prax_yarp.write();
        restartPlan = true;
        return true;
    }
    return true;
}
bool PlannerThread::compileGoal()
{
    if (goal_yarp.getOutputCount() == 0) {
        yWarning("Goal Compiler module not connected, unable to compile goals");
        return false;
    }
    Bottle& goal_bottle_out = goal_yarp.prepare();
    goal_bottle_out.clear();
    goal_bottle_out.addString("praxicon");
    goal_yarp.write();
    yInfo("waiting for praxicon...");
    while (true) {
        goal_bottle_in = goal_yarp.read(false);
        if (goal_bottle_in){
            if (goal_bottle_in->toString() == "done")
            {
                yInfo("Praxicon instruction received, compiling...");
                break;
            }
            else if (goal_bottle_in->toString() == "failed")
            {
                yWarning("Praxicon disconnected or crashed, compiling failed.");
                return false;
            }
            else if (goal_bottle_in->toString() == "unknown")
            {
                yWarning("Unknown object in Praxicon message, unable to compile.");
                return false;
            }
            else
            {
                yWarning("non-standard message received, something failed with the Goal Compiler module.");
                return false;
            }
        }
        if (goal_yarp.getOutputCount() == 0)
        {
            yWarning("Goal compiler module crashed");
            return false;
        }
        Time::delay(0.5);
    }
    goal_bottle_out = goal_yarp.prepare();
    goal_bottle_out.clear();
    goal_bottle_out.addString("update");
    goal_yarp.write();
    while (true) {
        goal_bottle_in = goal_yarp.read(false);
        if (goal_bottle_in)
        {
            if (goal_bottle_in->toString() != "" && goal_bottle_in->toString() != "()")
            {
                yInfo("Goal Compiling is complete!");
                break;
            }
            else
            {
                yWarning("empty bottle received, something might be wrong with the Goal Compiler module.");
                return false;
            }
        }
        if (goal_yarp.getOutputCount() == 0)
        {
            yWarning("Goal compiler module crashed");
            return false;
        }
        Time::delay(0.5);
    }
    return true;
}
bool PlannerThread::groundRules()
{
    string command, data;
    if (geo_yarp.getOutputCount() == 0) {
        yWarning("geometric grounding module not connected, unable to ground rules!");
        return false;
    }
    if (aff_yarp.getOutputCount() == 0) {
        yWarning("Affordances communication module not connected, unable to ground rules!");
        return false;
    }
    Bottle& aff_bottle_out = aff_yarp.prepare();
    aff_bottle_out.clear();
    aff_bottle_out.addString("start");
    aff_yarp.write();
    Time::delay(0.3);
    aff_bottle_out = aff_yarp.prepare();
    aff_bottle_out.clear();
    aff_bottle_out.addString("update");
    aff_yarp.write();
    while (true) {
        aff_bottle_in = aff_yarp.read(false);
        if (aff_bottle_in){
            break;
        }
        if (aff_yarp.getOutputCount() == 0)
        {
            yWarning("Affordances communication module crashed");
            return false;
        }
        Time::delay(0.1);
    }
    Bottle& geo_bottle_out = geo_yarp.prepare();
    geo_bottle_out.clear();
    geo_bottle_out.addString("update");
    geo_yarp.write();
    yInfo("grounding...");
    while (true) {
        geo_bottle_in = geo_yarp.read(false);
        if (geo_bottle_in != NULL){
            command = geo_bottle_in->toString();
        }
        if (command == "ready"){
            yInfo("Grounding Complete!");
            break;
        }
        else {
            yWarning("something went wrong with the geometric grounding module.");
            return false;
        }
        if (geo_yarp.getOutputCount() == 0)
        {
            yWarning("Geometric Grounding module crashed");
            return false;
        }
        Time::delay(0.5);
    }
    aff_bottle_out = aff_yarp.prepare();
    aff_bottle_out.clear();
    aff_bottle_out.addString("query");
    aff_yarp.write();
    while (true) {
        aff_bottle_in = aff_yarp.read(false);
        if (aff_bottle_in){
            data = aff_bottle_in->toString();
            if (data == "()" || data == "")
            {
                yWarning("empty bottle received, something might be wrong with the affordances module.");
                return false;
            }
            while (true){
                if (data.find('"') != std::string::npos){
                    data.replace(data.find('"'),1,"");
                }
                else {
                    break;
                }
            }
            toolhandle = split(data,' ');
            break;
        }
        if (aff_yarp.getOutputCount() == 0)
        {
            yWarning("Affordance communication module crashed");
            return false;
        }
        Time::delay(0.1);
    }
    return true;
}
bool PlannerThread::completePlannerState()
{
    string line, state_str, temp_str;
    vector<string> data, temp_vect, temp_vect2, avail_symb, temp_state;
    vector<vector<string> >  symbols;
    symbolFile.open(symbolFileName.c_str());
    stateFile.open(stateFileName.c_str());
    if (!stateFile.is_open())
    {
        yWarning("unable to open state file!");
        return false;
    }
    if (!symbolFile.is_open())
    {
        yWarning("unable to open symbols file!");
        return false;
    }
    /*objFile.open(objFileName.c_str());
    if (!objFile.is_open())
    {
        yWarning("unable to open objects file!");
        return false;
    }*/
    while (getline(symbolFile, line)){
        data.push_back(line);
    }
    symbolFile.close();
    for (int i = 0; i < data.size(); ++i){
        temp_vect = split(data[i],' ');
        temp_vect2.clear();
        temp_vect2.push_back(temp_vect[0]);
        temp_vect2.push_back(temp_vect[2]);
        symbols.push_back(temp_vect2);
    }
    if (stateFile.is_open()){
        getline(stateFile, line);
        while (true){
            if (line.find('-') != std::string::npos){
                line.replace(line.find('-'),1,"");
            }
            else if (line.find('(') != std::string::npos){
                line.replace(line.find('('),1,"");
            }
            else if (line.find(')') != std::string::npos){
                line.replace(line.find(')'),1,"");
            }
            else {
                break;
            }
        }
        temp_state = split(line,' ');
    }
    stateFile.close();
    for (int i = 0; i < symbols.size(); ++i){
        temp_str = "-" + symbols[i][0];
        avail_symb.push_back(symbols[i][0]);
        avail_symb.push_back(temp_str);
    }
    data.clear();
    for (int i = 0; i < temp_state.size(); ++i){
        if (find_element(avail_symb, temp_state[i]) == 1){
            data.push_back(temp_state[i]);
        }
    }
    for (int i = 0; i < data.size(); ++i){
        temp_str = data[i]+"()";
        if (i == 0){
            state_str = temp_str;
        }
        else {
            state_str = state_str + " " + temp_str;
        }
    }
    state_str = state_str + " ";
    for (int i = 0; i < symbols.size(); ++i){
        if ((find_element(data, symbols[i][0]) == 0) && (symbols[i][1] == "primitive")){
            state_str = state_str + "-" + symbols[i][0] + "() ";
        }
    }
    newstateFile.open(stateFileName.c_str());
    if (!newstateFile.is_open())
    {
        yWarning("unable to open state file!");
        return false;
    }
    state_str = state_str + '\n';
    newstateFile << state_str;
    newstateFile.close();
    return true;
}
SkinMeshThreadPort::SkinMeshThreadPort(Searchable& config,int period) : RateThread(period),mutex(1)
{
    yDebug("SkinMeshThreadPort running at %g ms.",getRate());
    mbSimpleDraw=config.check("light");

    sensorsNum=0;
    for (int t=0; t<MAX_SENSOR_NUM; ++t)
    {
        sensor[t]=NULL;
    }

    std::string part="/skinGui/";
    std::string part_virtual="";
    if (config.check("name"))
    {
        part ="/";
        part+=config.find("name").asString().c_str();
        part+="/";
    }
    part.append(config.find("robotPart").asString());
    part_virtual = part;
    part_virtual.append("_virtual");
    part.append(":i");
    part_virtual.append(":i");

    skin_port.open(part.c_str());

    // Ideally, we would use a --virtual flag. since this would make the skinmanager xml file unflexible,
    // let's keep the code structure without incurring in any problem whatsoever
    // if (config.check("virtual"))
    if (true)
    {
        skin_port_virtual.open(part_virtual.c_str());
    }

    int width =config.find("width" ).asInt();
    int height=config.find("height").asInt();

    bool useCalibration = config.check("useCalibration");
    if (useCalibration==true)   yInfo("Using calibrated skin values (0-255)");
    else                        yDebug("Using raw skin values (255-0)");
    
    Bottle *color = config.find("color").asList();
    unsigned char r=255, g=0, b=0;
    if(color)
    {
        if(color->size()<3 || !color->get(0).isInt() || !color->get(1).isInt() || !color->get(2).isInt())
        {
            yError("Error while reading the parameter color: three integer values should be specified (%s).", color->toString().c_str());
        }
        else
        {
            r = color->get(0).asInt();
            g = color->get(1).asInt();
            b = color->get(2).asInt();
            yInfo("Using specified color: %d %d %d", r, g, b);
        }
    }
    else
    {
        yDebug("Using red as default color.");
    }
    defaultColor.push_back(r);
    defaultColor.push_back(g);
    defaultColor.push_back(b);

    skinThreshold = config.check("skinThreshold")?config.find("skinThreshold").asDouble():SKIN_THRESHOLD;
    yDebug("Skin Threshold set to %g", skinThreshold);

    yarp::os::Bottle sensorSetConfig=config.findGroup("SENSORS").tail();

    for (int t=0; t<sensorSetConfig.size(); ++t)
    {       
        yarp::os::Bottle sensorConfig(sensorSetConfig.get(t).toString());

        std::string type(sensorConfig.get(0).asString());
      
        if (type=="triangle"       || type=="fingertip" || type=="fingertip2L" || type=="fingertip2R" ||
            type=="triangle_10pad" || type=="quad16"    || type=="palmR"       || type=="palmL")
        {
            int    id=sensorConfig.get(1).asInt();
            double xc=sensorConfig.get(2).asDouble();
            double yc=sensorConfig.get(3).asDouble();
            double th=sensorConfig.get(4).asDouble();
            double gain=sensorConfig.get(5).asDouble();
            int    lrMirror=sensorConfig.get(6).asInt();
            int    layoutNum=sensorConfig.get(7).asInt();

            yDebug("%s %d %f",type.c_str(),id,gain);

            if (id>=0 && id<MAX_SENSOR_NUM)
            {
                if (sensor[id])
                {
                    yError("Triangle %d already exists.",id);
                }
                else
                {
                    if (type=="triangle")
                    {
                        sensor[id]=new Triangle(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="triangle_10pad")
                    {
                        sensor[id]=new Triangle_10pad(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="fingertip")
                    {
                        sensor[id]=new Fingertip(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="fingertip2L")
                    {
                        sensor[id]=new Fingertip2L(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="fingertip2R")
                    {
                        sensor[id]=new Fingertip2R(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="quad16")
                    {
                        sensor[id]=new Quad16(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="palmR")
                    {
                        sensor[id]=new PalmR(xc,yc,th,gain,layoutNum,lrMirror);
                    }
                    else if (type=="palmL")
                    {
                        sensor[id]=new PalmL(xc,yc,th,gain,layoutNum,lrMirror);
                    }

                    sensor[id]->setCalibrationFlag(useCalibration);
                    ++sensorsNum;
                }
            }
            else
            {
                yWarning(" %d is invalid triangle Id [0:%d].",id, MAX_SENSOR_NUM-1);
            }
        }
        else
        {
            yWarning(" sensor type %s unknown, discarded.",type.c_str());
        }
    }

    int max_tax=0;
    for (int t=0; t<MAX_SENSOR_NUM; ++t)
    {
        
        if (sensor[t]) 
        {
            sensor[t]->min_tax=max_tax;
            max_tax = sensor[t]->min_tax+sensor[t]->get_nTaxels();
            sensor[t]->max_tax=max_tax-1;
            sensor[t]->setColor(r, g, b);
        } 
        else
        {
            //this deals with the fact that some traingles can be not present,
            //but they anyway broadcast an array of zeros...
            max_tax += 12; 
        }
    }

    resize(width,height);
}
Example #15
0
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Module implementation
bool SpeechRecognizerModule::configure(ResourceFinder &rf )
{
    setName( rf.check("name",Value("speechRecognizer")).asString().c_str() );
    m_timeout = rf.check("timeout",Value(10000)).asInt();
    USE_LEGACY = !rf.check("noLegacy");
    m_forwardSound = rf.check("forwardSound");
    m_tmpFileFolder = rf.getHomeContextPath().c_str();
    interruptRecognition = false;

    //Deal with speech recognition
    string grammarFile = rf.check("grammarFile",Value("defaultGrammar.grxml")).asString().c_str();
	grammarFile = rf.findFile(grammarFile).c_str();

	std::wstring tmp = s2ws(grammarFile);
    LPCWSTR cwgrammarfile = tmp.c_str();

    m_useTalkBack = rf.check("talkback");

    //Initialise the speech crap
    bool everythingIsFine = true;
    HRESULT hr;
    everythingIsFine = SUCCEEDED( m_cpRecoEngine.CoCreateInstance(CLSID_SpInprocRecognizer));
    everythingIsFine &= SUCCEEDED( SpCreateDefaultObjectFromCategoryId(SPCAT_AUDIOIN, &m_cpAudio));
    everythingIsFine &= SUCCEEDED( m_cpRecoEngine->CreateRecoContext( &m_cpRecoCtxt ));

    // Here, all we are interested in is the beginning and ends of sounds, as well as
    // when the engine has recognized something
    const ULONGLONG ullInterest = SPFEI(SPEI_RECOGNITION);
    everythingIsFine &= SUCCEEDED(m_cpRecoCtxt->SetInterest(ullInterest, ullInterest));

    // set the input for the engine
    everythingIsFine &= SUCCEEDED( m_cpRecoEngine->SetInput(m_cpAudio, TRUE));
    everythingIsFine &= SUCCEEDED( m_cpRecoEngine->SetRecoState( SPRST_ACTIVE ));

    //Load grammar from file
    everythingIsFine &= SUCCEEDED( m_cpRecoCtxt->CreateGrammar( 1, &m_cpGrammarFromFile ));
    everythingIsFine &= SUCCEEDED( m_cpGrammarFromFile->SetGrammarState(SPGS_DISABLED));
    everythingIsFine &= SUCCEEDED( m_cpGrammarFromFile->LoadCmdFromFile(cwgrammarfile, SPLO_DYNAMIC));
//  everythingIsFine &= loadGrammarFromRf(rf);

    //Create a runtime grammar
    everythingIsFine &= SUCCEEDED( m_cpRecoCtxt->CreateGrammar( 2, &m_cpGrammarRuntime ));
    everythingIsFine &= SUCCEEDED( m_cpGrammarRuntime->SetGrammarState(SPGS_DISABLED));

    //Create a dictation grammar
    everythingIsFine &= SUCCEEDED(m_cpRecoCtxt->CreateGrammar( GID_DICTATION, &m_cpGrammarDictation ));
    everythingIsFine &= SUCCEEDED(m_cpGrammarDictation->LoadDictation(NULL, SPLO_STATIC));
    everythingIsFine &= SUCCEEDED(m_cpGrammarDictation->SetDictationState(SPRS_INACTIVE));
    
    //Setup thing for the raw audio processing
    everythingIsFine &= SUCCEEDED(m_cAudioFmt.AssignFormat(SPSF_22kHz16BitMono));
    hr = m_cpRecoCtxt->SetAudioOptions(SPAO_RETAIN_AUDIO, &m_cAudioFmt.FormatId(), m_cAudioFmt.WaveFormatExPtr());
    //everythingIsFine &= SUCCEEDED(hr = SPBindToFile((const WCHAR *)"C:\\temp.wav", SPFM_CREATE_ALWAYS, &m_streamFormat, &m_cAudioFmt.FormatId(), m_cAudioFmt.WaveFormatExPtr()));

    //CComPtr <ISpStream>     cpStream = NULL;
    //CSpStreamFormat         cAudioFmt;
    //hr = cAudioFmt.AssignFormat(SPSF_22kHz16BitMono);
    //hr = SPBindToFile((const WCHAR *)"c:\\ttstemp.wav", SPFM_CREATE_ALWAYS, &cpStream,  &cAudioFmt.FormatId(), cAudioFmt.WaveFormatExPtr());

    if( everythingIsFine )
    {
        string pName = "/";
        pName += getName().c_str();
        pName += "/recog/continuous:o";
        m_portContinuousRecognition.open( pName.c_str() );

        pName = "/";
		pName += getName().c_str();
        pName += "/recog/continuousGrammar:o";
        m_portContinuousRecognitionGrammar.open( pName.c_str() );

        pName = "/";
		pName += getName().c_str();
        pName += "/recog/sound:o";   
        m_portSound.open(pName.c_str());

        //iSpeak
        pName = "/";
		pName += getName().c_str();
        pName += "/tts/iSpeak:o"; 
        m_port2iSpeak.open( pName.c_str() );
                
        pName = "/";
		pName += getName().c_str();
        pName += "/tts/iSpeak/rpc"; 
        m_port2iSpeakRpc.open( pName.c_str() );
        if (Network::connect(m_port2iSpeak.getName().c_str(),"/iSpeak")&&Network::connect(m_port2iSpeakRpc.getName().c_str(),"/iSpeak/rpc"))
            yInfo() <<"Connection to iSpeak succesfull" ;
        else
            yWarning() <<"Unable to connect to iSpeak. Connect manually." ;

        pName = "/";
		pName += getName().c_str();
        pName += "/rpc";
        m_portRPC.open( pName.c_str() );
        attach(m_portRPC);

        //Start recognition
        //everythingIsFine &= SUCCEEDED(m_cpRecoEngine->SetRecoState(SPRST_ACTIVE_ALWAYS));
        everythingIsFine &= SUCCEEDED(m_cpGrammarFromFile->SetRuleState(NULL, NULL, SPRS_ACTIVE));
        everythingIsFine &= SUCCEEDED( m_cpGrammarFromFile->SetGrammarState(SPGS_ENABLED));
    }

    return (everythingIsFine);
}
void JointTorqueControl::computeOutputMotorTorques()
{
    //Compute joint level torque PID
    double dt = this->getRate() * 0.001;

    for(int j=0; j < this->axes; j++ )
    {
        JointTorqueLoopGains &gains = jointTorqueLoopGains[j];
        jointTorquesError[j]        = measuredJointTorques[j] - desiredJointTorques[j];
        integralState[j]            = saturation(integralState[j] + gains.ki*dt*jointTorquesError(j),gains.max_int,-gains.max_int );
        jointControlOutputBuffer[j] = desiredJointTorques[j] - gains.kp*jointTorquesError[j] - integralState[j];
    }

    toEigenVector(jointControlOutput) = couplingMatrices.fromJointTorquesToMotorTorques * toEigenVector(jointControlOutputBuffer);
    toEigenVector(measuredMotorVelocities) = couplingMatrices.fromJointVelocitiesToMotorVelocities * toEigenVector(measuredJointVelocities);

    // Evaluation of coulomb friction with smoothing close to zero velocity
    double coulombFriction;
    double coulombVelThre;
    for (int j = 0; j < this->axes; j++)
    {
        MotorParameters motorParam = motorParameters[j];
        coulombVelThre =  motorParam.coulombVelThr;
        //viscous friction compensation
        if (fabs(measuredMotorVelocities[j]) >= coulombVelThre)
        {
            coulombFriction = sign(measuredMotorVelocities[j]);
        }
        else
        {
            coulombFriction = pow(measuredMotorVelocities[j] / coulombVelThre, 3);
        }
        if (measuredMotorVelocities[j] > 0 )
        {
            coulombFriction = motorParam.kcp*coulombFriction;
        }
        else
        {
            coulombFriction = motorParam.kcn*coulombFriction;
        }
        jointControlOutput[j] = motorParam.kff*jointControlOutput[j] + motorParameters[j].frictionCompensation * (motorParam.kv*measuredMotorVelocities[j] + coulombFriction);
    }

    if (streamingOutput)
    {
        yarp::sig::Vector& output = portForStreamingPWM.prepare();
        output = jointControlOutput;
        portForStreamingPWM.write();
    }

    toEigenVector(jointControlOutput) = couplingMatricesFirmware.fromMotorTorquesToJointTorques * toEigenVector(jointControlOutput);

    bool isNaNOrInf = false;
    for(int j = 0; j < this->axes; j++)
    {

        jointControlOutput[j] = saturation(jointControlOutput[j], jointTorqueLoopGains[j].max_pwm, -jointTorqueLoopGains[j].max_pwm);
        if (isnan(jointControlOutput[j]) || isinf(jointControlOutput[j])) { //this is not std c++. Supported in C99 and C++11
            jointControlOutput[j] = 0;
            isNaNOrInf = true;
        }
    }
    if (isNaNOrInf) {
        yWarning("Inf or NaN found in control output");
    }

}
void Controller::run()
{
    LockGuard guard(mutexRun);
    
    mutexCtrl.lock();
    bool jointsHealthy=areJointsHealthyAndSet();
    mutexCtrl.unlock();

    if (!jointsHealthy)
    {
        stopControlHelper();
        port_xd->get_new()=false;
    }

    string event="none";

    // verify if any saccade is still underway
    mutexCtrl.lock();
    if (commData->get_isSaccadeUnderway() && (Time::now()-saccadeStartTime>=Ts))
    {
        bool done;
        posHead->checkMotionDone(eyesJoints.size(),eyesJoints.getFirst(),&done);
        commData->get_isSaccadeUnderway()=!done;

        if (!commData->get_isSaccadeUnderway())
            notifyEvent("saccade-done");
    }
    mutexCtrl.unlock();
    
    // get data
    double x_stamp;
    Vector xd=commData->get_xd();
    Vector x=commData->get_x(x_stamp);
    Vector new_qd=commData->get_qd();

    // read feedbacks
    q_stamp=Time::now();
    if (!getFeedback(fbTorso,fbHead,drvTorso,drvHead,commData,&q_stamp))
    {
        yWarning("Communication timeout detected!");
        notifyEvent("comm-timeout");
        suspend();
        return;
    }

    IntState->reset(fbHead);

    fbNeck=fbHead.subVector(0,2);
    fbEyes=fbHead.subVector(3,5);

    double errNeck=norm(new_qd.subVector(0,2)-fbNeck);
    double errEyes=norm(new_qd.subVector(3,new_qd.length()-1)-fbEyes);
    bool swOffCond=(Time::now()-ctrlActiveRisingEdgeTime<GAZECTRL_SWOFFCOND_DISABLETIME) ? false :
                   (!commData->get_isSaccadeUnderway() &&
                   (errNeck<GAZECTRL_MOTIONDONE_NECK_QTHRES*CTRL_DEG2RAD) &&
                   (errEyes<GAZECTRL_MOTIONDONE_EYES_QTHRES*CTRL_DEG2RAD));

    // verify control switching conditions
    if (commData->get_isCtrlActive())
    {
        // switch-off condition
        if (swOffCond)
        {
            event="motion-done";

            mutexCtrl.lock();
            stopLimb(false);
            mutexCtrl.unlock();
        }
        // manage new target while controller is active
        else if (port_xd->get_new())
        {
            event="motion-onset";

            mutexData.lock();
            motionOngoingEventsCurrent=motionOngoingEvents;
            mutexData.unlock();
        }

        port_xd->get_new()=false;
    }
    else if (jointsHealthy)
    {
        // inhibition is cleared upon new target arrival
        if (ctrlInhibited)
            ctrlInhibited=!port_xd->get_new();

        // switch-on condition
        commData->get_isCtrlActive()=port_xd->get_new() ||
                                     (!ctrlInhibited &&
                                     (new_qd[0]!=qd[0]) || (new_qd[1]!=qd[1]) || (new_qd[2]!=qd[2]) ||
                                     (!commData->get_canCtrlBeDisabled() && (norm(port_xd->get_xd()-x)>GAZECTRL_MOTIONSTART_XTHRES)));

        // reset controllers
        if (commData->get_isCtrlActive())
        {
            ctrlActiveRisingEdgeTime=Time::now();
            port_xd->get_new()=false;

            Vector zeros(3,0.0);
            mjCtrlNeck->reset(zeros);
            mjCtrlEyes->reset(zeros);
            IntPlan->reset(fbNeck);

            event="motion-onset";

            mutexData.lock();
            motionOngoingEventsCurrent=motionOngoingEvents;
            mutexData.unlock();
        }
    }

    mutexCtrl.lock();
    if (event=="motion-onset")
    {
        setJointsCtrlMode();
        q0deg=CTRL_RAD2DEG*fbHead;
    }
    mutexCtrl.unlock();

    qd=new_qd;
    qdNeck=qd.subVector(0,2);
    qdEyes=qd.subVector(3,5);
    
    if (commData->get_isCtrlActive())
    {
        // control loop
        vNeck=mjCtrlNeck->computeCmd(neckTime,qdNeck-fbNeck);
        IntPlan->integrate(vNeck);

        if (unplugCtrlEyes)
        {
            if (Time::now()-saccadeStartTime>=Ts)
                vEyes=commData->get_counterv();
        }
        else
            vEyes=mjCtrlEyes->computeCmd(eyesTime,qdEyes-fbEyes)+commData->get_counterv();
    }
    else
    {
        vNeck=0.0;
        vEyes=0.0;
    }

    v.setSubvector(0,vNeck);
    v.setSubvector(3,vEyes);

    // apply bang-bang just in case to compensate
    // for unachievable low velocities
    for (size_t i=0; i<v.length(); i++)
        if ((v[i]!=0.0) && (v[i]>-minAbsVel) && (v[i]<minAbsVel))
            v[i]=yarp::math::sign(qd[i]-fbHead[i])*minAbsVel;

    // convert to degrees
    mutexData.lock();
    qddeg=CTRL_RAD2DEG*qd;
    qdeg =CTRL_RAD2DEG*fbHead;
    vdeg =CTRL_RAD2DEG*v;
    mutexData.unlock();

    // send commands to the robot
    if (commData->get_isCtrlActive())
    {
        mutexCtrl.lock();

        Vector posdeg;
        if (neckPosCtrlOn)
        {
            posdeg=(CTRL_RAD2DEG)*IntPlan->get();
            posNeck->setPositions(neckJoints.size(),neckJoints.getFirst(),posdeg.data());
            velHead->velocityMove(eyesJoints.size(),eyesJoints.getFirst(),vdeg.subVector(3,5).data());
        }
        else
            velHead->velocityMove(vdeg.data());

        if (commData->debugInfoEnabled && (port_debug.getOutputCount()>0))
        {
            Bottle info;
            int j=0;

            if (neckPosCtrlOn)
            {
                for (size_t i=0; i<posdeg.length(); i++)
                {
                    ostringstream ss;
                    ss<<"pos_"<<i;
                    info.addString(ss.str().c_str());
                    info.addDouble(posdeg[i]);
                }

                j=eyesJoints[0];
            }

            for (size_t i=j; i<vdeg.length(); i++)
            {
                ostringstream ss;
                ss<<"vel_"<<i;
                info.addString(ss.str().c_str());
                info.addDouble(vdeg[i]);
            }

            port_debug.prepare()=info;
            txInfo_debug.update(q_stamp);
            port_debug.setEnvelope(txInfo_debug);
            port_debug.writeStrict();
        }

        mutexCtrl.unlock();
    }

    // print info
    if (commData->verbose)
        printIter(xd,x,qddeg,qdeg,vdeg,1.0); 

    // send x,q through YARP ports
    Vector q(nJointsTorso+nJointsHead);
    int j;
    for (j=0; j<nJointsTorso; j++)
        q[j]=CTRL_RAD2DEG*fbTorso[j];
    for (; (size_t)j<q.length(); j++)
        q[j]=qdeg[j-nJointsTorso];

    txInfo_x.update(x_stamp);
    if (port_x.getOutputCount()>0)
    {
        port_x.prepare()=x;
        port_x.setEnvelope(txInfo_x);
        port_x.write();
    }

    txInfo_q.update(q_stamp);
    if (port_q.getOutputCount()>0)
    {
        port_q.prepare()=q;
        port_q.setEnvelope(txInfo_q);
        port_q.write();
    }

    // update pose information
    mutexChain.lock();

    for (int i=0; i<nJointsTorso; i++)
    {
        chainNeck->setAng(i,fbTorso[i]);
        chainEyeL->setAng(i,fbTorso[i]);
        chainEyeR->setAng(i,fbTorso[i]);
    }
    for (int i=0; i<3; i++)
    {
        chainNeck->setAng(nJointsTorso+i,fbHead[i]);
        chainEyeL->setAng(nJointsTorso+i,fbHead[i]);
        chainEyeR->setAng(nJointsTorso+i,fbHead[i]);
    }
    chainEyeL->setAng(nJointsTorso+3,fbHead[3]);               chainEyeR->setAng(nJointsTorso+3,fbHead[3]);
    chainEyeL->setAng(nJointsTorso+4,fbHead[4]+fbHead[5]/2.0); chainEyeR->setAng(nJointsTorso+4,fbHead[4]-fbHead[5]/2.0);

    txInfo_pose.update(q_stamp);

    mutexChain.unlock();

    if (event=="motion-onset")
        notifyEvent(event);

    motionOngoingEventsHandling();

    if (event=="motion-done")
    {
        motionOngoingEventsFlush();
        notifyEvent(event);
    }

    // update joints angles
    fbHead=IntState->integrate(v);
    commData->set_q(fbHead);
    commData->set_torso(fbTorso);
    commData->set_v(v);
}
Example #18
0
void vtWThread::run()
{
    optFlowPos.resize(3,0.0);
    pf3dTrackerPos.resize(3,0.0);
    doubleTouchPos.resize(3,0.0);
    fgtTrackerPos.resize(3,0.0);

    bool isEvent = false;
    events.clear();

    // process the genericObject port
    if (genericObjectsBottle = genericObjectsPort.read(false))
    {
        for (int i = 0; i < genericObjectsBottle->size(); i++)
        {
            Bottle b = *(genericObjectsBottle->get(i).asList());
            if (b.size()>=4)
            {
                Vector p(3,0.0);
                double r=0;

                p[0] = b.get(0).asDouble();
                p[1] = b.get(1).asDouble();
                p[2] = b.get(2).asDouble();
                r    = b.get(3).asDouble();

                events.push_back(IncomingEvent(p,Vector(3,0.0),r,"genericObjects"));
                isEvent=true;
            }
        }
    }

    // process the optFlow
    if (optFlowBottle = optFlowPort.read(false))
    {
        if (optFlowBottle->size()>=3)
        {
            yDebug("Computing data from the optFlowTracker %g\n",getEstUsed());
            optFlowPos.zero();
            
            optFlowPos[0]=optFlowBottle->get(0).asDouble();
            optFlowPos[1]=optFlowBottle->get(1).asDouble();
            optFlowPos[2]=optFlowBottle->get(2).asDouble();

            AWPolyElement el(optFlowPos,Time::now());
            optFlowVel=linEst_optFlow->estimate(el);

            events.push_back(IncomingEvent(optFlowPos,optFlowVel,0.05,"optFlow"));
            isEvent=true;
        }
    }

    // process the pf3dTracker
    if (pf3dTrackerBottle = pf3dTrackerPort.read(false))
    {
        if (pf3dTrackerBottle->size()>6)
        {
            if (pf3dTrackerBottle->get(6).asDouble()==1.0)
            {
                Vector fp(4);
                fp[0]=pf3dTrackerBottle->get(0).asDouble();
                fp[1]=pf3dTrackerBottle->get(1).asDouble();
                fp[2]=pf3dTrackerBottle->get(2).asDouble();
                fp[3]=1.0;

                if (!gsl_isnan(fp[0]) && !gsl_isnan(fp[1]) && !gsl_isnan(fp[2]))
                {
                    yDebug("Computing data from the pf3dTracker %g\n",getEstUsed());
                    Vector x,o;
                    igaze->getLeftEyePose(x,o);
                    
                    Matrix T=axis2dcm(o);
                    T(0,3)=x[0];
                    T(1,3)=x[1];
                    T(2,3)=x[2];

                    pf3dTrackerPos=T*fp;
                    pf3dTrackerPos.pop_back();
                    
                    AWPolyElement el(pf3dTrackerPos,Time::now());
                    pf3dTrackerVel=linEst_pf3dTracker->estimate(el);
                    
                    events.push_back(IncomingEvent(pf3dTrackerPos,pf3dTrackerVel,0.05,"pf3dTracker"));
                    isEvent=true;
                }
            }
        }
    }
    
    // process the SensationManager port
    if (sensManagerBottle = sensManagerPort.read(false))
    {
        //yDebug("vtWThread::run(): reading from sensManagerPort, bottle with %d elements: %s\n", sensManagerBottle->size(),sensManagerBottle->toString().c_str());
        for (int i = 0; i < sensManagerBottle->size(); i++)
        {
            Bottle bl = *(sensManagerBottle->get(i).asList());\
            //yDebug("vtWThread::run(): bl has %d elements: %s\n", bl.size(),bl.toString().c_str());
            for(int j=0;j<bl.size();j++){
                Bottle b = *(bl.get(j).asList());
                if (b.size()>=5)
                {
                    Vector p(3,0.0);
                    double r=0.0;
                    double threat = 0.0;

                    p[0] = b.get(0).asDouble();
                    p[1] = b.get(1).asDouble();
                    p[2] = b.get(2).asDouble();
                    r = b.get(3).asDouble();
                    threat = b.get(4).asDouble();
                    //yDebug("Pushing event: %s, 0 0 0, %f, %f \n",p.toString().c_str(),r,threat);
                    events.push_back(IncomingEvent(p,Vector(3,0.0),r,threat,"sensManager"));
                    isEvent=true;
                }
                else{
                    yWarning("vtWThread::run(): reading from sensManagerPort, but bottle contains < 5 elements: %s\n", b.toString().c_str());
                }
            }
        }
    }
   
    if (!rf->check("noDoubleTouch"))
    {
        // processes the fingertipTracker
        if(fgtTrackerBottle = fgtTrackerPort.read(false))
        {
            if (doubleTouchBottle = doubleTouchPort.read(false))
            {           
                if(fgtTrackerBottle != NULL && doubleTouchBottle != NULL)
                {
                    if (doubleTouchBottle->get(3).asString() != "" && fgtTrackerBottle->get(0).asInt() != 0)
                    {
                        yDebug("Computing data from the fingertipTracker %g\n",getEstUsed());
                        doubleTouchStep = doubleTouchBottle->get(0).asInt();
                        fgtTrackerPos[0] = fgtTrackerBottle->get(1).asDouble();
                        fgtTrackerPos[1] = fgtTrackerBottle->get(2).asDouble();
                        fgtTrackerPos[2] = fgtTrackerBottle->get(3).asDouble();
                        AWPolyElement el2(fgtTrackerPos,Time::now());
                        fgtTrackerVel=linEst_fgtTracker->estimate(el2);

                        if(doubleTouchStep<=1)
                        {
                            Vector ang(3,0.0);
                            igaze -> lookAtAbsAngles(ang);
                        }
                        else if(doubleTouchStep>1 && doubleTouchStep<8)
                        {
                            events.clear();
                            events.push_back(IncomingEvent(fgtTrackerPos,fgtTrackerVel,-1.0,"fingertipTracker"));
                            isEvent=true;
                        }
                    }
                }
            }
        }

        // processes the doubleTouch !rf->check("noDoubleTouch")
        if(doubleTouchBottle = doubleTouchPort.read(false))
        {
            if(doubleTouchBottle != NULL)
            {
                if (doubleTouchBottle->get(3).asString() != "")
                {
                    Matrix T = eye(4);
                    Vector fingertipPos(4,0.0);
                    doubleTouchPos.resize(4,0.0);
                    
                    currentTask = doubleTouchBottle->get(3).asString();
                    doubleTouchStep = doubleTouchBottle->get(0).asInt();
                    fingertipPos = iCub::skinDynLib::matrixFromBottle(*doubleTouchBottle,20,4,4).subcol(0,3,3); // fixed translation from the palm
                    fingertipPos.push_back(1.0);

                    if(doubleTouchStep<=1)
                    {
                        Vector ang(3,0.0);
                        igaze -> lookAtAbsAngles(ang);
                    }
                    else if(doubleTouchStep>1 && doubleTouchStep<8)
                    {
                        if(currentTask=="LtoR" || currentTask=="LHtoR") //right to left -> the right index finger will be generating events
                        { 
                            iencsR->getEncoders(encsR->data());
                            Vector qR=encsR->subVector(0,6);
                            armR -> setAng(qR*CTRL_DEG2RAD);                        
                            T = armR -> getH(3+6, true);  // torso + up to wrist
                            doubleTouchPos = T * fingertipPos; 
                            //optionally, get the finger encoders and get the fingertip position using iKin Finger based on the current joint values 
                            //http://wiki.icub.org/iCub/main/dox/html/icub_cartesian_interface.html#sec_cart_tipframe
                            doubleTouchPos.pop_back(); //take out the last dummy value from homogenous form
                        }
                        else if(currentTask=="RtoL" || currentTask=="RHtoL") //left to right -> the left index finger will be generating events
                        {   
                            iencsL->getEncoders(encsL->data());
                            Vector qL=encsL->subVector(0,6);
                            armL -> setAng(qL*CTRL_DEG2RAD);                        
                            T = armL -> getH(3+6, true);  // torso + up to wrist
                            doubleTouchPos = T * fingertipPos; 
                            //optionally, get the finger encoders and get the fingertip position using iKin Finger based on the current joint values 
                            //http://wiki.icub.org/iCub/main/dox/html/icub_cartesian_interface.html#sec_cart_tipframe
                            doubleTouchPos.pop_back(); //take out the last dummy value from homogenous form
                        } 
                        else
                        {
                            yError(" [vtWThread] Unknown task received from the double touch thread!");
                        }
                        
                        yDebug("Computing data from the doubleTouch %g\n",getEstUsed());
                        AWPolyElement el2(doubleTouchPos,Time::now());
                        doubleTouchVel=linEst_doubleTouch->estimate(el2);
                        events.push_back(IncomingEvent(doubleTouchPos,doubleTouchVel,-1.0,"doubleTouch"));
                        isEvent=true;
                    }
                }
            }
        }
    }
    
    if (pf3dTrackerPos[0]!=0.0 && pf3dTrackerPos[1]!=0.0 && pf3dTrackerPos[2]!=0.0)
        igaze -> lookAtFixationPoint(pf3dTrackerPos);
    else if (doubleTouchPos[0]!=0.0 && doubleTouchPos[1]!=0.0 && doubleTouchPos[2]!=0.0)
        igaze -> lookAtFixationPoint(doubleTouchPos);
    else if (optFlowPos[0]!=0.0 && optFlowPos[1]!=0.0 && optFlowPos[2]!=0.0)
        igaze -> lookAtFixationPoint(optFlowPos);
    
    if (isEvent)
    {        
        Bottle& eventsBottle = eventsPort.prepare();
        eventsBottle.clear();
        for (size_t i = 0; i < events.size(); i++)
        {
            eventsBottle.addList()= events[i].toBottle();
        }
        eventsPort.write();
        timeNow = yarp::os::Time::now();
        sendGuiEvents();
    }
    else if (yarp::os::Time::now() - timeNow > 1.0)
    {
        yDebug("No significant event in the last second. Resetting the velocity estimators..");
        timeNow = yarp::os::Time::now();

        linEst_optFlow     -> reset();
        linEst_pf3dTracker -> reset();
        linEst_doubleTouch -> reset();
        linEst_fgtTracker  -> reset();
        deleteGuiEvents();
    }
}
Example #19
0
    virtual bool configure(ResourceFinder &rf)
    {
        Time::turboBoost();

        if (rf.check("name"))
            name=string("/")+rf.find("name").asString().c_str();
        else
            name="/trajectoryPlayer";

        rpcPort.open((name+"/rpc").c_str());
        attach(rpcPort);

        Property portProp;
        portProp.put("robot", rf.find("robot"));
        portProp.put("part", rf.find("part"));

        //*** start the robot driver
        if (!robot.configure(portProp))
        {
            yError() << "Error configuring position controller, check parameters";
            return false;
        }
        
        if (!robot.init())
        {
            yError() << "Error cannot connect to remote ports" ;
            return false;
        }

        //*** attach the robot driver to the thread and start it
        w_thread.attachRobotDriver(&robot);
        b_thread.attachRobotDriver(&robot);
        if (!w_thread.start())
        {
            yError() << "Working thread did not start, queue will not work";
        }
        else
        {
            yInfo() << "Working thread started";
        }

        if (rf.check("execute")==true)
        {
            yInfo() << "Enablig iPid->setReference() controller";
            w_thread.enable_execute_joint_command = true;
        }
        else
        {
            yInfo() << "Not using iPid->setReference() controller";
            w_thread.enable_execute_joint_command = false;
        }

        if (rf.check("period")==true)
        {
            int period = rf.find("period").asInt();
            yInfo() << "Thread period set to " << period << "ms";
            w_thread.setRate(period);
        }
        yInfo() << "Using parameters:"  << rf.toString();

        //*** open the position file
        yInfo() << "opening file...";
        if (rf.check("filename")==true)
        {
            string filename = rf.find("filename").asString().c_str();
            int req_joints = 0; //default value
            if (rf.check("joints")) 
            {
                req_joints = rf.find("joints").asInt();}
            else
            {
                yError() << "Missing parameter 'joints' (number of joints to control)";
                return false;
            }

            if (req_joints < robot.n_joints) 
            {
                yWarning () << "changing n joints from" << robot.n_joints << "to" << req_joints;
                robot.n_joints = req_joints;
            }
            else if (req_joints > robot.n_joints)
            {
                yError() << "Requested number of joints exceeds the number of available joints on the robot!";
                return false;
            }

            if (!w_thread.actions.openFile(filename,robot.n_joints))
            {
                yError() << "Unable to parse file " << filename;
                return false;
            };
        }
        else
        {
            yWarning() << "no sequence files load.";
        }

        yInfo() << "module succesffully configured. ready.";
        return true;
    }
Example #20
0
bool BehaviorManager::configure(yarp::os::ResourceFinder &rf)
{
    moduleName = rf.check("name",Value("BehaviorManager")).asString();
    setName(moduleName.c_str());
    yInfo()<<moduleName<<": finding configuration files...";//<<endl;
    period = rf.check("period",Value(1.0)).asDouble();

    Bottle grp = rf.findGroup("BEHAVIORS");
    behaviorList = *grp.find("behaviors").asList();  

    rpc_in_port.open("/" + moduleName + "/trigger:i");
    yInfo() << "RPC_IN : " << rpc_in_port.getName();

    for (int i = 0; i<behaviorList.size(); i++)
    {
        behavior_name = behaviorList.get(i).asString();
        if (behavior_name == "tagging") {
            behaviors.push_back(new Tagging(&mut, rf, "tagging"));
        } else if (behavior_name == "pointing") {
            behaviors.push_back(new Pointing(&mut, rf, "pointing"));
        } else if (behavior_name == "dummy") {
            behaviors.push_back(new Dummy(&mut, rf, "dummy"));
        } else if (behavior_name == "dummy2") {
            behaviors.push_back(new Dummy(&mut, rf, "dummy2"));
        }  else if (behavior_name == "reactions") {
            behaviors.push_back(new Reactions(&mut, rf, "reactions"));
        }  else if (behavior_name == "followingOrder") {
            behaviors.push_back(new FollowingOrder(&mut, rf, "followingOrder"));
        }  else if (behavior_name == "narrate") {
            behaviors.push_back(new Narrate(&mut, rf, "narrate"));
        }  else if (behavior_name == "recognitionOrder") {
            behaviors.push_back(new recognitionOrder(&mut, rf, "recognitionOrder"));
            // other behaviors here
        }  else {
            yDebug() << "Behavior " + behavior_name + " not implemented";
            return false;
        }
    }

    //Create an iCub Client and check that all dependencies are here before starting
    bool isRFVerbose = false;
    iCub = new ICubClient(moduleName, "behaviorManager","client.ini",isRFVerbose);

    if (!iCub->connect())
    {
        yInfo() << "iCubClient : Some dependencies are not running...";
        Time::delay(1.0);
    }

    while (!Network::connect("/ears/behavior:o", rpc_in_port.getName())) {
        yWarning() << "Ears is not reachable";
        yarp::os::Time::delay(0.5);
    }

    // id = 0;
    for(auto& beh : behaviors) {
        beh->configure();
        beh->openPorts(moduleName);
        beh->iCub = iCub;

        if (beh->from_sensation_port_name != "None") {
            while (!Network::connect(beh->from_sensation_port_name, beh->sensation_port_in.getName())) {
                yInfo()<<"Connecting "<< beh->from_sensation_port_name << " to " <<  beh->sensation_port_in.getName();// <<endl;
                yarp::os::Time::delay(0.5);
            }
        }
        if (beh->external_port_name != "None") {
            while (!Network::connect(beh->rpc_out_port.getName(), beh->external_port_name)) {
                yInfo()<<"Connecting "<< beh->rpc_out_port.getName() << " to " <<  beh->external_port_name;// <<endl;
                yarp::os::Time::delay(0.5);
            }   
        }
    }

    attach(rpc_in_port);
    yInfo("Init done");

    return true;
}
Example #21
0
bool AgentDetector::updateModule()
{
    LockGuard lg(m);

    bool isRefreshed = client.getDepthAndPlayers(depth,players);
    client.getRgb(rgb);

    bool tracked;

    if (handleMultiplePlayers)
        tracked=client.getJoints(joints);
    else
        tracked=client.getJoints(joint, EFAA_KINECT_CLOSEST_PLAYER);
    //cout<<"Tracking value = "<<tracked<<endl;

    if (tracked)
    {
        if (handleMultiplePlayers)
            client.getSkeletonImage(joints,skeletonImage);
        else
        {
            client.getSkeletonImage(joint,skeletonImage);
            joints.clear();
            joints.push_back(joint);
        }
    } 

    client.getPlayersImage(players,playersImage);
    client.getDepthImage(depth,depthToDisplay);

    if (depthPort.getOutputCount()>0)
    {
        depthPort.prepare()=depthToDisplay;
        depthPort.write();
    }

    if (imagePort.getOutputCount()>0)
    {
        imagePort.prepare()=rgb;
        imagePort.write();
    }

    if (playersPort.getOutputCount()>0)
    {
        playersPort.prepare()=playersImage;
        playersPort.write();
    }

    if (skeletonPort.getOutputCount()>0)
    {
        skeletonPort.prepare()=skeletonImage;
        skeletonPort.write();
    }

    if (showImages)
    {        
        cvConvertScale((IplImage*)depthToDisplay.getIplImage(),depthTmp,1.0/255);
        cvCvtColor((IplImage*)rgb.getIplImage(),rgbTmp,CV_BGR2RGB);

        string mode=showMode;
        string submode;
        while (!mode.empty())
        {
            if (showImageParser(mode,submode))
            {            
                if (submode=="rgb")
                    cvShowImage("rgb",rgbTmp);
                else if (submode=="depth")
                    cvShowImage("depth",depthTmp);
                else if (submode=="skeleton")
                    cvShowImage("skeleton",(IplImage*)skeletonImage.getIplImage());
                else if (submode=="players")
                    cvShowImage("players",(IplImage*)playersImage.getIplImage());
                else
                    yError("unrecognized show mode!");
            }
        }

        cvWaitKey(1);
    }

    //Send the players information to the OPC
    //Allow click calibration
    if (!checkCalibration())
    {
        if (AgentDetector::clicked==clicked_left)
        {
            AgentDetector::clicked=idle;

            //Get the clicked point coordinate in Kinect space
            Vector clickedPoint(3);
            cout<<"Processing a click on ("<<AgentDetector::clickX<<" "<<AgentDetector::clickY<<") --> ";
            client.get3DPoint((int)AgentDetector::clickX,(int)AgentDetector::clickY,clickedPoint);
            cout<<clickedPoint.toString(3,3)<<endl;

            Bottle bCond;
            Bottle bObject;
            Bottle bRTObject;

            bObject.addString(EFAA_OPC_ENTITY_TAG);
            bObject.addString("==");
            bObject.addString(EFAA_OPC_ENTITY_OBJECT);

            bRTObject.addString(EFAA_OPC_ENTITY_TAG);
            bRTObject.addString("==");
            bRTObject.addString(EFAA_OPC_ENTITY_RTOBJECT);

            Bottle bPresent;
            bPresent.addString(EFAA_OPC_OBJECT_PRESENT_TAG);
            bPresent.addString("==");
            bPresent.addDouble(1.0);

            bCond.addList()=bObject;
            bCond.addString("&&");
            bCond.addList()=bPresent;
            bCond.addString("||");
            bCond.addList()=bRTObject;
            bCond.addString("&&");
            bCond.addList()=bPresent;
            opc->checkout();
            opc->isVerbose=true;
            list<Entity*> presentObjects=opc->Entities(bCond);
            opc->isVerbose=false;
            
            Object *o=nullptr;
            if (presentObjects.size()==1) {
                o=dynamic_cast<Object*>(presentObjects.front());
            } else {
                for(auto& presentObject : presentObjects) {
                    if(presentObject->name() == "target") {
                        o=dynamic_cast<Object*>(presentObject);
                        break;
                    }
                }
            }
            if(o) {
                Bottle botRPH, botRPHRep;
                botRPH.addString("add");
                botRPH.addString("kinect");
                Bottle &cooKinect=botRPH.addList();
                cooKinect.addDouble(clickedPoint[0]);
                cooKinect.addDouble(clickedPoint[1]);
                cooKinect.addDouble(clickedPoint[2]);

                Bottle &cooiCub=botRPH.addList();
                cooiCub.addDouble(o->m_ego_position[0]);
                cooiCub.addDouble(o->m_ego_position[1]);
                cooiCub.addDouble(o->m_ego_position[2]);
                rfh.write(botRPH,botRPHRep);
                cout<<"Sent to RFH: "<<botRPH.toString().c_str()<<endl;
                cout<<"Got from RFH: "<<botRPHRep.toString().c_str()<<endl;

                pointsCnt++;
            } else {
                yWarning("There should be 1 and only 1 object on the table");
                yWarning("If there is more than one object, the object you want");
                yWarning("to calibrate must be called \"target\"");
            }
        }
        else if (AgentDetector::clicked==clicked_right)
        {
            AgentDetector::clicked=idle;
            if (pointsCnt>=3)
            {
                Bottle calibBottle,calibReply; 
                calibBottle.addString("cal");
                calibBottle.addString("kinect");
                rfh.write(calibBottle,calibReply);
                cout<<"Calibrated ! "<<calibReply.toString().c_str()<<endl;

                calibBottle.clear();
                calibBottle.addString("save");
                rfh.write(calibBottle,calibReply);
                cout<<"Saved to file ! "<<calibReply.toString().c_str()<<endl;
                checkCalibration();
            }
            else
                yWarning("Unable to calibrate with less than 3 points pairs collected");
        }
    }

    if (isRefreshed)
    {
//        yInfo() << " refreshed";
        //////////////////////////////////////////////////////////////////
        //Clear the previous agents
        //for(map<int, Agent*>::iterator pA=identities.begin(); pA!=identities.end() ; pA++)
        //{
        //    pA->second->m_present = 0.0;
        //}  
        //partner->m_present = 0.0;

        // check if last apparition was more than dThreshlodDisaparition ago


        if (tracked)
        {
            //Go through all skeletons
            for(deque<Player>::iterator p=joints.begin(); p!=joints.end(); p++)
            {
                //check if this skeletton is really tracked
                bool reallyTracked = false;
                for(map<string,Joint>::iterator jnt = p->skeleton.begin() ; jnt != p->skeleton.end() ; jnt++)
                {
                    if (jnt->second.x != 0 && jnt->second.y != 0 && jnt->second.z != 0)
                    {
                        reallyTracked = true; break;
                    }
                }
                if (reallyTracked)
                {
                    dSince = (clock() - dTimingLastApparition) / (double) CLOCKS_PER_SEC;
                    //yInfo() << " is REALLY tracked";
                    string playerName = partner_default_name;

                    //If the skeleton is tracked we dont identify
                    if (identities.find(p->ID) != identities.end())
                    {
                        playerName = identities[p->ID];
                    }
                    else
                    {   
                        //Check if we should learn this face
                        if (currentTrainingFace != "")
                        {
                            setIdentity(*p,currentTrainingFace);
                            currentTrainingFace = "";
                        }

                        //if (useFaceRecognition)
                        playerName = getIdentity(*p);
                    }

                    //We interact with OPC only if the calibration is done
                    if (isCalibrated)
                    {
                        //main bottle to be streamed with loc of all agent body part
                        Bottle& bAgentLoc = agentLocOutPort.prepare();
                        bAgentLoc.clear();

                        //Retrieve this player in OPC or create if does not exist
                        opc->checkout();
                        partner = opc->addOrRetrieveEntity<Agent>(partner_default_name);
                        partner->m_present = 1.0;

                        // reset the timing.
                        dTimingLastApparition = clock();
                        
                        if (identities.find(p->ID) == identities.end())
                        {
                            cout<<"Assigning name "<<playerName<<" to skeleton "<<p->ID<<endl;

                            //Agent* specificAgent = opc->addEntity<Agent>(playerName);
                            Agent* specificAgent = opc->addOrRetrieveEntity<Agent>(playerName);
                            if(specificAgent == nullptr) {
                                yError() << "SHIT specificAgent";
                            } else {
                                identities[p->ID] = specificAgent->name();
                                specificAgent->m_present = 1.0;
                                yInfo() << " specific agent is commited";
                                opc->commit(specificAgent);
                                yInfo() << " specific agent is commited done";
                            }
                        }

//                        Relation r(partner->name(),"named",playerName);
//                        opc->addRelation(r,1.0);

//                        cout<<"Commiting : "<<r.toString()<<endl;
                        yarp::os::Bottle &skeleton = outputSkeletonPort.prepare();
                        skeleton.clear();
                        //Convert the skeleton into efaaHelpers body. We loose orientation in the process...
                        for(map<string,Joint>::iterator jnt = p->skeleton.begin() ; jnt != p->skeleton.end() ; jnt++)
                        {
                            Bottle bBodyPartLoc;

                            Vector kPosition(4);
                            kPosition[0] = jnt->second.x;
                            kPosition[1] = jnt->second.y;
                            kPosition[2] = jnt->second.z;
                            kPosition[3] = 1;
                            Vector icubPos = kinect2icub * kPosition;
                            Vector irPos = icubPos.subVector(0,2);

                            if (isMounted)
                            {
                                irPos = transform2IR(irPos);
                                Bottle jntBtl;
                                jntBtl.clear();
                                jntBtl.addString(jnt->first);
                                jntBtl.addDouble(jnt->second.x);
                                jntBtl.addDouble(jnt->second.y);
                                jntBtl.addDouble(jnt->second.z);
                                skeleton.addList() = jntBtl;
                            }

                            if (jnt->first == EFAA_OPC_BODY_PART_TYPE_HEAD)
                            {
                                partner->m_ego_position = irPos;
                            }
                            partner->m_body.m_parts[jnt->first] = irPos;

                            bBodyPartLoc.addString(jnt->first);
                            bBodyPartLoc.addString(irPos.toString());

                            bAgentLoc.addList() = bBodyPartLoc;
                        }

                        agentLocOutPort.write();

                        opc->commit(partner);
//                        cout << skeleton.toString()<< endl;
                        outputSkeletonPort.write();
                        //opc->commit(agent);
                    }
//                    cout<<'1'<<endl;
                }
            }
        }
        else
        {
            if (dSince > dThresholdDisparition)
            {
                opc->checkout();
                partner = opc->addOrRetrieveEntity<Agent>(partner_default_name);
                partner->m_present = 0.0;
                opc->commit(partner);
            }
            else
            {
                //yInfo() << " clock is: " << clock() << "\t last apparition: " << dTimingLastApparition  << "\t dSince: " << dSince;
                //yInfo() << " agent dissapeared but not for too long.";
            }
        }
    }
    return true;
}
Example #22
0
/* Called periodically every getPeriod() seconds */
bool ears::updateModule() {
    LockGuard lg(mutex);
    if (bShouldListen)
    {
        yDebug() << "bListen";
        Bottle bRecognized, //recceived FROM speech recog with transfer information (1/0 (bAnswer))
        bAnswer, //response from speech recog without transfer information, including raw sentence
        bSemantic; // semantic information of the content of the recognition
        bRecognized = iCub->getRecogClient()->recogFromGrammarLoop(grammarToString(MainGrammar), 1, true);
        //bShouldListen=true;

        if (bRecognized.get(0).asInt() == 0)
        {
            yWarning() << " error in ears::updateModule | Error in speechRecog";
            return true;
        }

        bAnswer = *bRecognized.get(1).asList();

        if (bAnswer.get(0).asString() == "stop")
        {
            yInfo() << " in abmHandler::node1 | stop called";
            return true;
        }
        // bAnswer is the result of the regognition system (first element is the raw sentence, 2nd is the list of semantic element)

        bSemantic = *(*bAnswer.get(1).asList()).get(1).asList();
        cout << bSemantic.toString() << endl;
        string sObject, sAction;
        string sQuestionKind = bAnswer.get(1).asList()->get(0).toString();
        //string sPredicate = bSemantic.check("predicate", Value("none")).asString();

        string sObjectType, sCommand;
        if(sQuestionKind == "SENTENCEOBJECT") {
            sObject = bSemantic.check("object", Value("none")).asString();
            sAction = bSemantic.check("predicateObject", Value("none")).asString();
            sCommand = "followingOrder";
            sObjectType = "object";
        } else if(sQuestionKind == "SENTENCEBODYPART") {
            sObject = bSemantic.check("bodypart", Value("none")).asString();
            sCommand = "touchingOrder";
            sObjectType = "bodypart";
        } else if(sQuestionKind == "SENTENCENARRATIVE") {
            sCommand = "followingOrder";
            sAction = "narrate";
            sObjectType = "";
            sObject = "";
        }else{
            yError() << "[ears] Unknown predicate";
        
        }

        Bottle &bToTarget = portTarget.prepare();
        bToTarget.clear();
        bToTarget.addString(sAction);
        bToTarget.addString(sObjectType);
        bToTarget.addString(sObject);
        portTarget.write();

        Bottle bCondition;
        bCondition.addString(sCommand);
        //bCondition.addString(sAction);
        bCondition.addString(sObjectType);
        bCondition.addString(sObject);

        portToBehavior.write(bCondition);
 
        yDebug() << "Sending " + bCondition.toString();
    } else {
        yDebug() << "Not bListen";
    }

    return true;
}
Example #23
0
void feat_PrintWarning(char *string)
{
    yWarning("%s", string);
}
void vPreProcess::run()
{
    Stamp zynq_stamp;
    Stamp local_stamp;

    resolution resmod = res;
    resmod.height -= 1;
    resmod.width -= 1;
    int nm0 = 0, nm1 = 0, nm2 = 0, nm3 = 0, nm4 = 0;
    AE v;
    SkinEvent se;
    SkinSample ss;
    bool received_half_sample = false;
    int32_t salvage_sample[2] = {-1, 0};

    while(true) {

        double pyt = zynq_stamp.getTime();

        std::deque<AE> qleft, qright;
        std::deque<int32_t> qskin;
        std::deque<int32_t> qskinsamples;
        const std::vector<int32_t> *q = inPort.read(zynq_stamp);
        if(!q) break;

        delays.push_back((Time::now() - zynq_stamp.getTime()));
        if(pyt) intervals.push_back(zynq_stamp.getTime() - pyt);

        if(precheck) {
            nm0 = zynq_stamp.getCount();
            if(nm3 && nm0 - nm1 == 1 && nm1 - nm2 > 1 && nm1 - nm3 > 2) {
                yWarning() << "LOST" << nm1-nm2-1 << "PACKETS ["
                           << nm4 << nm3 << nm2 << nm1 << nm0 << "]"
                           << q->size() << "packet size";
            }
            nm4 = nm3;
            nm3 = nm2;
            nm2 = nm1;
            nm1 = nm0;
        }

        //unsigned int events_in_packet = 0;
        const int32_t *qi = q->data();

        while ((size_t)(qi - q->data()) < q->size()) {

            if(IS_SKIN(*(qi+1))) {
                if(IS_SAMPLE(*(qi+1))) {
                    qskinsamples.push_back(*(qi++)); //TS
                    qskinsamples.push_back(*(qi++)); //VALUE/TAXEL
                } else {
                    qskin.push_back(*(qi++)); //TS
                    qskin.push_back(*(qi++)); //TAXEL
                }
            } else { // IS_VISION

                v.decode(qi);

                //precheck
                if(precheck && (v.x < 0 || v.x > resmod.width || v.y < 0 || v.y > resmod.height)) {
                    yWarning() << "Event Corruption:" << v.getContent().toString();
                    continue;
                }

                //flipx and flipy
                if(flipx) v.x = resmod.width - v.x;
                if(flipy) v.y = resmod.height - v.y;

                //salt and pepper filter
                if(pepper && !thefilter.check(v.x, v.y, v.polarity, v.channel, v.stamp)) {
                    v_dropped++;
                    continue;
                }

                //undistortion (including rectification)
                if(undistort) {
                    cv::Vec2i mapPix;
                    if(v.getChannel() == 0)
                        mapPix = leftMap.at<cv::Vec2i>(v.y, v.x);
                    else
                        mapPix = rightMap.at<cv::Vec2i>(v.y, v.x);

                    //truncate to sensor bounds after mapping?
                    if(truncate && (mapPix[0] < 0 ||
                                    mapPix[0] > resmod.width ||
                                    mapPix[1] < 0 ||
                                    mapPix[1] > resmod.height)) {
                        continue;
                    }

                    v.x = mapPix[0];
                    v.y = mapPix[1];
                    //std::cout.precision(30);
                    //std::cout<<v.channel<<mapPix<<"timestamp:"<<pyt<<std::endl;

                }

                if(split && v.channel)
                {
                    qright.push_back(v);
                }   else {
                    qleft.push_back(v);
                }
            }

        }

        if(qskinsamples.size() > 2) { //if we have skin samples
            //check if we need to fix the ordering
            if(IS_SSV(qskinsamples[1])) { // missing address
                if(received_half_sample) { // but we have it from last bottle
                    qskinsamples.push_front(salvage_sample[1]);
                    qskinsamples.push_front(salvage_sample[0]);
                } else { // otherwise we are misaligned due to missing data
                    qskinsamples.pop_front();
                    qskinsamples.pop_front();
                }
            }
            received_half_sample = false; //either case the half sample is no longer valid

            //check if we now have a cut event
            int samples_overrun = qskinsamples.size() % packetSize(SkinSample::tag);
            if(samples_overrun == 2) {
                salvage_sample[1] = qskinsamples.back();
                qskinsamples.pop_back();
                salvage_sample[0] = qskinsamples.back();
                qskinsamples.pop_back();
                received_half_sample = true;
            } else if(samples_overrun) {
                yError() << "samples cut by " << samples_overrun;
            }
        }

        v_total += qleft.size() + qright.size();

        if(use_local_stamp) {
            local_stamp.update();
            zynq_stamp = local_stamp;
        }

        if(qleft.size()) {
            outPortCamLeft.write(qleft, zynq_stamp);
        }
        if(qright.size()) {
            outPortCamRight.write(qright, zynq_stamp);
        }
        if(qskin.size()) {
            outPortSkin.write(qskin, zynq_stamp);
        }
        if(qskinsamples.size()) {
            outPortSkinSamples.write(qskinsamples, zynq_stamp);
        }
    }

}