コード例 #1
0
void NaoMarkServiceDetection::callback(const std::string &key, const AL::ALValue &value, const AL::ALValue &msg) 
{
	AL::ALValue marks = fMemoryProxy.getData("LandmarkDetected");

	if(marks.getSize() > 0 && !_isMarkFound)
	{
		int TimeStampField = marks[0][1];
		if((int)marks[1][0][1][0] == _markToFind)
		{
			if((float)marks[1][0][0][3] < 0.2f)
			{
				if(_isAllowedToMove)
					motionProxy->moveToward(0.5f,0,marks[1][0][0][1]);
				_naoMarkDetected = true;
			}
			else
			{
				motionProxy->moveToward(0,0,0);
				_isMarkFound = true;
			}
		}
		else
			motionProxy->moveToward(0,0,0);
	}
}
コード例 #2
0
void SensorsModule::initializeSonarValues()
{
    // Get a proxy to the DCM.
    boost::shared_ptr<AL::DCMProxy> dcmProxy = broker_->getDcmProxy();
    if(dcmProxy != 0)
    {
        try
        {
            // For DCM::set() see:
            // http://www.aldebaran-robotics.com/documentation/naoqi/sensors/dcm-api.html#DCMProxy::set__AL::ALValueCR
            AL::ALValue dcmSonarCommand;

            dcmSonarCommand.arraySetSize(3);
            dcmSonarCommand[0] = std::string("Device/SubDeviceList/US/Actuator/Value"); // Device name.
            dcmSonarCommand[1] = std::string("ClearAll"); // Delete all timed commands before adding this one.

            dcmSonarCommand[2].arraySetSize(1); // A list of (1) timed-commands.
            dcmSonarCommand[2][0].arraySetSize(2);
            dcmSonarCommand[2][0][0] = 68.0; // The command itself.
            dcmSonarCommand[2][0][1] = dcmProxy->getTime(0); // The DCM time for the command to be applied.

            // Send the timed command to the sonars.
            dcmProxy->set(dcmSonarCommand);
        }
        catch(AL::ALError& e)
        {
            std::cout << "SensorsModule : Failed to initialize sonars, "
                      << e.toString() << std::endl;
        }
    }
}
コード例 #3
0
ファイル: main.cpp プロジェクト: dmerejkowsky/nao-gm
    void ParseWordRecognizedArray(std::vector<WordConfidencePair>& results, const AL::ALValue& value, float recognitionThreshold)
    {
        // unexpected data
        if (value.getType() != AL::ALValue::TypeArray)
            MODULE_ERROR("invalid array type");

        // unexpected data
        if (value.getSize() % 2 != 0)
            MODULE_ERROR("invalid array size");

        for (int i = 0; i < (int)value.getSize(); i += 2)
        {
            AL::ALValue wordValue = value[i];
            AL::ALValue confidenceValue = value[i + 1];

            float confidence = confidenceValue.operator float &();
            if (confidence >= recognitionThreshold)
            {
                WordConfidencePair pair = { wordValue.toString(), confidence };
                results.push_back(pair);
            }
        }

        std::sort(results.begin(), results.end(), WordConfidencePairComparison());
    }
コード例 #4
0
/**
  * \brief: Create the aliases for controlling the joint stiffnesses.
  **/
void hal_experimental::create_actuator_stiffness_aliases()
{
    LOG("[create_actuator_stiffness]", "create actuator stiffness initializing...");

    AL::ALValue jointAliases;
    try
    {
        jointAliases.clear();
        jointAliases.arraySetSize(2);
        jointAliases[0] = std::string("jointStiffness");
        jointAliases[1].arraySetSize(25);
        int j = 0;
        for(int i = NumOfPositionActuatorIds; i <= rAnkleRollStiffnessActuator; ++i)
        {
            jointAliases[1][j] = actuatorNames[i];
            ++j;
        }
        // jointAliases[1][HEAD_PITCH]        = std::string("Device/SubDeviceList/HeadPitch/Hardness/Actuator/Value");
        // jointAliases[1][HEAD_YAW]          = std::string("Device/SubDeviceList/HeadYaw/Hardness/Actuator/Value");
        // jointAliases[1][L_ANKLE_PITCH]     = std::string("Device/SubDeviceList/LAnklePitch/Hardness/Actuator/Value");
        // jointAliases[1][L_ANKLE_ROLL]      = std::string("Device/SubDeviceList/LAnkleRoll/Hardness/Actuator/Value");
        // jointAliases[1][L_ELBOW_ROLL]      = std::string("Device/SubDeviceList/LElbowRoll/Hardness/Actuator/Value");
        // jointAliases[1][L_ELBOW_YAW]       = std::string("Device/SubDeviceList/LElbowYaw/Hardness/Actuator/Value");
        // jointAliases[1][L_HAND]            = std::string("Device/SubDeviceList/LHand/Hardness/Actuator/Value");
        // jointAliases[1][L_HIP_PITCH]       = std::string("Device/SubDeviceList/LHipPitch/Hardness/Actuator/Value");
        // jointAliases[1][L_HIP_ROLL]        = std::string("Device/SubDeviceList/LHipRoll/Hardness/Actuator/Value");
        // jointAliases[1][L_HIP_YAW_PITCH]   = std::string("Device/SubDeviceList/LHipYawPitch/Hardness/Actuator/Value");
        // jointAliases[1][L_KNEE_PITCH]      = std::string("Device/SubDeviceList/LKneePitch/Hardness/Actuator/Value");
        // jointAliases[1][L_SHOULDER_PITCH]  = std::string("Device/SubDeviceList/LShoulderPitch/Hardness/Actuator/Value");
        // jointAliases[1][L_SHOULDER_ROLL]   = std::string("Device/SubDeviceList/LShoulderRoll/Hardness/Actuator/Value");
        // jointAliases[1][L_WRIST_YAW]       = std::string("Device/SubDeviceList/LWristYaw/Hardness/Actuator/Value");
        // jointAliases[1][R_ANKLE_PITCH]     = std::string("Device/SubDeviceList/RAnklePitch/Hardness/Actuator/Value");
        // jointAliases[1][R_ANKLE_ROLL]      = std::string("Device/SubDeviceList/RAnkleRoll/Hardness/Actuator/Value");
        // jointAliases[1][R_ELBOW_ROLL]      = std::string("Device/SubDeviceList/RElbowRoll/Hardness/Actuator/Value");
        // jointAliases[1][R_ELBOW_YAW]       = std::string("Device/SubDeviceList/RElbowYaw/Hardness/Actuator/Value");
        // jointAliases[1][R_HAND]            = std::string("Device/SubDeviceList/RHand/Hardness/Actuator/Value");
        // jointAliases[1][R_HIP_PITCH]       = std::string("Device/SubDeviceList/RHipPitch/Hardness/Actuator/Value");
        // jointAliases[1][R_HIP_ROLL]        = std::string("Device/SubDeviceList/RHipRoll/Hardness/Actuator/Value");
        // jointAliases[1][R_KNEE_PITCH]      = std::string("Device/SubDeviceList/RKneePitch/Hardness/Actuator/Value");
        // jointAliases[1][R_SHOULDER_PITCH]  = std::string("Device/SubDeviceList/RShoulderPitch/Hardness/Actuator/Value");
        // jointAliases[1][R_SHOULDER_ROLL]   = std::string("Device/SubDeviceList/RShoulderRoll/Hardness/Actuator/Value");
        // jointAliases[1][R_WRIST_YAW]       = std::string("Device/SubDeviceList/RWristYaw/Hardness/Actuator/Value");

    }
    catch(const AL::ALError &e)
    {
        std::cout << "[create_actuator_stiffness][ERROR]: Error setting up the aliase: " << e.toString() << std::endl;
    }

    try
    {
        dcm_proxy->createAlias(jointAliases);
        log_file << "stiffness alias added to DCM!\n";
    }
    catch(const AL::ALError &e)
    {
        LOG("[create_actuator_stiffness]","[ERROR]: An error occured while creating stiffness actuator aliases: " + e.toString());
    }
    LOG("[create_actuator_stiffness]","create actuator stiffness initialized");
}
コード例 #5
0
int main(int argc, char **argv)
{
  std::string robotIp = "127.0.0.1";

  if (argc < 2) {
    std::cerr << "Usage: almotion_setBreathConfig robotIp "
              << "(optional default \"127.0.0.1\")."<< std::endl;
  }
  else {
    robotIp = argv[1];
  }

  AL::ALMotionProxy motion(robotIp);

  // Example showing how to change the breathing configuration
  // Setting a relaxed configuration: 5 breaths per minute at max amplitude
  AL::ALValue breathConfig;
  breathConfig.arraySetSize(2);

  AL::ALValue tmp;
  tmp.arraySetSize(2);
  tmp[0] = "Bpm";
  tmp[1] = 5.0f;
  breathConfig[0] = tmp;

  tmp[0] = "Amplitude";
  tmp[1] = 1.0f;
  breathConfig[1] = tmp;
  motion.setBreathConfig(breathConfig);

  return 0;
}
コード例 #6
0
ファイル: gmalproxy.cpp プロジェクト: kalineh/nao-gm
gmVariable GMALProxy::CallReturnVariable(const char* function, gmVariable arg)
{
    const std::string function_string = std::string(function);

    AL::ALValue result = AL::ALValue();

    switch (arg.m_type)
    {
    case GM_INT: result = _proxy.call<AL::ALValue>(function_string, arg.GetInt()); break;
    case GM_FLOAT: result = _proxy.call<AL::ALValue>(function_string, arg.GetFloat()); break;
    case GM_VEC2: result = _proxy.call<AL::ALValue>(function_string, arg.GetVec2().x, arg.GetVec2().y); break;
    case GM_STRING: result = _proxy.call<AL::ALValue>(function_string, std::string(arg.GetCStringSafe())); break;
    default: result = _proxy.call<AL::ALValue>(function_string); break;
    }

    gmVariable result_variable;
    result_variable.Nullify();

    switch (result.getType())
    {
    case AL::ALValue::TypeBool: result_variable = gmVariable(int(result.operator bool &() ? 1 : 0)); break;
    case AL::ALValue::TypeInt: result_variable = gmVariable(int(result.operator int &())); break;
    case AL::ALValue::TypeFloat: result_variable = gmVariable(float(result.operator float &())); break;
    case AL::ALValue::TypeString: result_variable = gmVariable(VirtualMachine::Get()->GetVM().AllocStringObject(result.toString().c_str())); break;
    default:
        break;
    }

    return result_variable;
}
コード例 #7
0
Variant ALProxy::call(const std::string &method, const Variant &val)
{
    AL::ALValue param;
    AL::ALValue result;
    Variant vResult(0);
    param.arrayPush(val.toALValue());
    fProxy->genericCall(method, param,result);
    vResult.fromALValue(result);
    return vResult;
}
コード例 #8
0
/**
  * \brief: Creates an alias for all the position actuators.
  **/
void hal_experimental::create_actuator_position_aliases()
{
    LOG("[create_actuator_pos]","create actuator position initializing...");

    AL::ALValue jointAliases;

    jointAliases.arraySetSize(2);
    jointAliases[0] = std::string("jointActuators");
    jointAliases[1].arraySetSize(NumOfPositionActuatorIds);
    for(int i = 0; i < NumOfPositionActuatorIds; ++i)
    {
        jointAliases[1][i] = std::string(actuatorNames[i]);
    }


    // Joints actuator list
    //  jointAliases[1].arraySetSize(25);
    // jointAliases[1][HEAD_PITCH]       = std::string("Device/SubDeviceList/HeadPitch/Position/Actuator/Value");
    // jointAliases[1][HEAD_YAW]         = std::string("Device/SubDeviceList/HeadYaw/Position/Actuator/Value");
    // jointAliases[1][L_ANKLE_PITCH]    = std::string("Device/SubDeviceList/LAnklePitch/Position/Actuator/Value");
    // jointAliases[1][L_ANKLE_ROLL]     = std::string("Device/SubDeviceList/LAnkleRoll/Position/Actuator/Value");
    // jointAliases[1][L_ELBOW_ROLL]     = std::string("Device/SubDeviceList/LElbowRoll/Position/Actuator/Value");
    // jointAliases[1][L_ELBOW_YAW]      = std::string("Device/SubDeviceList/LElbowYaw/Position/Actuator/Value");
    // jointAliases[1][L_HAND]           = std::string("Device/SubDeviceList/LHand/Position/Actuator/Value");
    // jointAliases[1][L_HIP_PITCH]      = std::string("Device/SubDeviceList/LHipPitch/Position/Actuator/Value");
    // jointAliases[1][L_HIP_ROLL]       = std::string("Device/SubDeviceList/LHipRoll/Position/Actuator/Value");
    // jointAliases[1][L_HIP_YAW_PITCH]  = std::string("Device/SubDeviceList/LHipYawPitch/Position/Actuator/Value");
    // jointAliases[1][L_KNEE_PITCH]     = std::string("Device/SubDeviceList/LKneePitch/Position/Actuator/Value");
    // jointAliases[1][L_SHOULDER_PITCH] = std::string("Device/SubDeviceList/LShoulderPitch/Position/Actuator/Value");
    // jointAliases[1][L_SHOULDER_ROLL]  = std::string("Device/SubDeviceList/LShoulderRoll/Position/Actuator/Value");
    // jointAliases[1][L_WRIST_YAW]      = std::string("Device/SubDeviceList/LWristYaw/Position/Actuator/Value");
    // jointAliases[1][R_ANKLE_PITCH]    = std::string("Device/SubDeviceList/RAnklePitch/Position/Actuator/Value");
    // jointAliases[1][R_ANKLE_ROLL]     = std::string("Device/SubDeviceList/RAnkleRoll/Position/Actuator/Value");
    // jointAliases[1][R_ELBOW_ROLL]     = std::string("Device/SubDeviceList/RElbowRoll/Position/Actuator/Value");
    // jointAliases[1][R_ELBOW_YAW]      = std::string("Device/SubDeviceList/RElbowYaw/Position/Actuator/Value");
    // jointAliases[1][R_HAND]           = std::string("Device/SubDeviceList/RHand/Position/Actuator/Value");
    // jointAliases[1][R_HIP_PITCH]      = std::string("Device/SubDeviceList/RHipPitch/Position/Actuator/Value");
    // jointAliases[1][R_HIP_ROLL]       = std::string("Device/SubDeviceList/RHipRoll/Position/Actuator/Value");
    // jointAliases[1][R_KNEE_PITCH]     = std::string("Device/SubDeviceList/RKneePitch/Position/Actuator/Value");
    // jointAliases[1][R_SHOULDER_PITCH] = std::string("Device/SubDeviceList/RShoulderPitch/Position/Actuator/Value");
    // jointAliases[1][R_SHOULDER_ROLL]  = std::string("Device/SubDeviceList/RShoulderRoll/Position/Actuator/Value");
    // jointAliases[1][R_WRIST_YAW]      = std::string("Device/SubDeviceList/RWristYaw/Position/Actuator/Value");
    try
    {
        dcm_proxy->createAlias(jointAliases);
        log_file << "position alias added to DCM!\n";

    }
    catch(const AL::ALError &e)
    {
        LOG("[create_actuaor_pos]","[ERROR]: Error creating actuator position aliases: " + e.toString());
    }

    LOG("[create_actuator_pos]","create actuator position initialized");
}
コード例 #9
0
Variant ALProxy::call(const std::string &method, const Variant &val1, const Variant &val2)
{
    AL::ALValue param;
    AL::ALValue result;
    Variant resJava;
    param.arrayPush(val1.toALValue());
    param.arrayPush(val2.toALValue());
    fProxy->genericCall(method, param,result);
    resJava.fromALValue(result);
    return resJava;
}
コード例 #10
0
ファイル: naosim.cpp プロジェクト: kaihumuc/nao_dhri_tum
void NaoSim::ControlSteps(const int &num)
{
    for(int i=0; i<num; i++)
    {
        step();


        fastMotionCommands->GetValues(commandValues);

        //set angles
        //rearrange the command
        AL::ALValue dummyCommand;
        dummyCommand.arraySetSize(26);
        dummyCommand[HeadYaw] = commandValues[HEAD_YAW];
        dummyCommand[HeadPitch] = commandValues[HEAD_PITCH];

        dummyCommand[LShoulderPitch] = commandValues[L_SHOULDER_PITCH];
        dummyCommand[LShoulderRoll] = commandValues[L_SHOULDER_ROLL];
        dummyCommand[LElbowYaw] = commandValues[L_ELBOW_YAW];
        dummyCommand[LElbowRoll] = commandValues[L_ELBOW_ROLL];
        dummyCommand[LWristYaw] = commandValues[L_WRIST_YAW];
        dummyCommand[LHand] = 0.0f;

        dummyCommand[LHipYawPitch] = commandValues[HIP_YAW_PITCH];
        dummyCommand[LHipRoll] = commandValues[L_HIP_ROLL];
        dummyCommand[LHipPitch] = commandValues[L_HIP_PITCH];
        dummyCommand[LKneePitch] = commandValues[L_KNEE_PITCH];
        dummyCommand[LAnklePitch] = commandValues[L_ANKLE_PITCH];
        dummyCommand[LAnkleRoll] = commandValues[L_ANKLE_ROLL];

        dummyCommand[RHipYawPitch] = commandValues[HIP_YAW_PITCH];
        dummyCommand[RHipRoll] = commandValues[R_HIP_ROLL];
        dummyCommand[RHipPitch] = commandValues[R_HIP_PITCH];
        dummyCommand[RKneePitch] = commandValues[R_KNEE_PITCH];
        dummyCommand[RAnklePitch] = commandValues[R_ANKLE_PITCH];
        dummyCommand[RAnkleRoll] = commandValues[R_ANKLE_ROLL];

        dummyCommand[RShoulderPitch] = commandValues[R_SHOULDER_PITCH];
        dummyCommand[RShoulderRoll] = commandValues[R_SHOULDER_ROLL];
        dummyCommand[RElbowYaw] = commandValues[R_ELBOW_YAW];
        dummyCommand[RElbowRoll] = commandValues[R_ELBOW_ROLL];
        dummyCommand[RWristYaw] = commandValues[R_WRIST_YAW];
        dummyCommand[RHand] = 0.0f;

        motionProxy->setAngles("Body",dummyCommand, 1.0f);


    }
}
コード例 #11
0
ファイル: NaoEnactor.cpp プロジェクト: AmeenaK/nbites
/**
 * Creates the appropriate aliases with the DCM
 */
void NaoEnactor::initDCMAliases(){
    AL::ALValue positionCommandsAlias;
    positionCommandsAlias.arraySetSize(3);
    positionCommandsAlias[0] = string("AllActuatorPosition");
    positionCommandsAlias[1].arraySetSize(Kinematics::NUM_JOINTS);

    AL::ALValue hardCommandsAlias;
    hardCommandsAlias.arraySetSize(3);
    hardCommandsAlias[0] = string("AllActuatorHardness");
    hardCommandsAlias[1].arraySetSize(Kinematics::NUM_JOINTS);

    for (unsigned int i = 0; i<Kinematics::NUM_JOINTS; i++){
        positionCommandsAlias[1][i] = jointsP[i];
        hardCommandsAlias[1][i] = jointsH[i];
    }

    dcmProxy->createAlias(positionCommandsAlias);
    dcmProxy->createAlias(hardCommandsAlias);
}
コード例 #12
0
ファイル: KmeAction.cpp プロジェクト: kouretes/Monas
void KmeAction::DcmInit()
{
	AL::ALValue jointAliasses;
	vector<std::string> PosActuatorStrings = KDeviceLists::getPositionActuatorKeys();
	jointAliasses.arraySetSize(2);
	jointAliasses[0] = std::string("BodyWithoutHead"); // Alias for Body joint actuators without Head joins
	jointAliasses[1].arraySetSize(20);

	// Joints actuator list
	for (int i = KDeviceLists::L_ARM; i < KDeviceLists::NUMOFJOINTS; i++)
	{
		jointAliasses[1][i - KDeviceLists::L_ARM] = PosActuatorStrings[i];
	}

	// Create alias
	try
	{
		dcm->createAlias(jointAliasses);
	}
	catch (const AL::ALError &e)
	{
		throw ALERROR("KmeAction", "createKmeActuatorAlias()", "Error when creating Alias : " + e.toString());
	}

	// Create Commands
	commands.arraySetSize(6);
	commands[0] = string("BodyWithoutHead");
	commands[1] = string("ClearAll"); // Erase all previous commands
	commands[2] = string("time-separate");
	commands[3] = 0;
	commands[5].arraySetSize(20); // For all joints except head

	for (unsigned int i = 0; i < commands[5].getSize(); i++)
	{
		commands[5][i].arraySetSize(actionTimes[0].getSize()); 		//num of poses

		for (unsigned int j = 0; j < commands[5][i].getSize(); j++)
		{
			commands[5][i][j] = actionAngles[i + 2][j];					// actionAngles[joints][poses], commands[5][joints][poses]
			// Logger::Instance().WriteMsg("KmeACTION", "commands " + _toString(commands[5][i][j]) , Logger::ExtraInfo);
		}
	}
}
コード例 #13
0
/**
  * \brief: Set the stiffness value for every joint on the robot to a single value
  **/
void hal_experimental::set_all_actuator_stiffnesses(const float &stiffnessVal)
{
    log_file << "[set_all_stiffness]: << Setting All stiffness values to: " << stiffnessVal << std::endl;

    AL::ALValue stiffnessCommands;
    int DCMTime;

    try
    {
        DCMTime = dcm_proxy->getTime(1000); //Time NAOQi has been running + 1000ms
    }
    catch(const AL::ALError &e)
    {
        LOG("[set_all_stiffness]","[ERROR] An error occured while setting stiffness value: " + e.toString());
    }

    stiffnessCommands.arraySetSize(3);
    stiffnessCommands[0] = std::string("jointStiffness");
    stiffnessCommands[1] = std::string("Merge");
    stiffnessCommands[2].arraySetSize(1);
    stiffnessCommands[2][0].arraySetSize(2);
    stiffnessCommands[2][0][0] = stiffnessVal;
    stiffnessCommands[2][0][1] = DCMTime;

    log_file << "[set_all_stiffness] Alias set up..trying to set it now." << std::endl;
    std::cout << "[set_all_stiffness] Alias set up..trying to set it now." << std::endl;
    try
    {
        dcm_proxy->set(stiffnessCommands);
        LOG("[set_all_stiffness]","Stiffness Values Set!");
    }
    catch(const AL::ALError &e)
    {
        LOG("[set_all_stiffness]","[ERROR] An error occured while setting stiffness value: " + e.toString());
    }

    log_file << "[set_all_stiffness]: Done Setting all stiffness values to: " << stiffnessVal << std::endl;
}
コード例 #14
0
ファイル: NaoLights.cpp プロジェクト: WangHanbin/nbites
void NaoLights::sendLightCommand(AL::ALValue & command){
#ifdef DEBUG_NAOLIGHTS_COMMAND
    std::cout << "  NaoLights::sendCommand() " <<command.serializeToText()<< std::endl;
#endif

    try{
        command[4][0] = dcmProxy->getTime(0);

#ifdef LEDS_ENABLED
        dcmProxy->setAlias(command);
#endif

    } catch(AL::ALError& e) {
        std::cout << "dcm value set error in sendLightCommand:"
                  << e.toString() << std::endl;
    }
}
コード例 #15
0
void Variant::fromALValue(const AL::ALValue &val)
{

  if (val.getType() == AL::ALValue::TypeInvalid)
  {
    // nothing to do
  }
  else if (val.getType() == AL::ALValue::TypeArray)
  {
    fType = VARRAY;
    fValue = val;
  }
  else if (val.getType()==AL::ALValue::TypeInt)
  {
    fType = VINT;
    fValue = val;
  }
  else if (val.getType()== AL::ALValue::TypeString)
  {
    fType == VSTRING;
    fValue = val;
  }
  else if (val.getType()== AL::ALValue::TypeBinary)
  {
    fType == VCHARARRAY;
    fValue = val;
  }
  else if (val.getType()== AL::ALValue::TypeBool)
  {
    fType == VBOOL;
    fValue = val;
  }
  else if (val.getType()== AL::ALValue::TypeFloat)
  {
    fType == VFLOAT;
    fValue = val;
  }
  else
  {
    printf("unknow type %d\n",val.getType());
  }
}
コード例 #16
0
ファイル: interactions.cpp プロジェクト: dsapandora/nao_hri
/** This method try to match a localized sound to an identified face, and update
 * accordingly the corresponding 'human'.
 *
 * If none is found, a 'virtual' human called 'unknown_speaker' is created, with an
 * approximate position.
 */
void InteractionMonitor::identifySpeaker(const AL::ALValue &sounds, map<string, Human>& humans) {

    // according to the doc
    // http://www.aldebaran-robotics.com/documentation/naoqi/audio/alaudiosourcelocalization.html
    // only one sound may be localized at a given step.
    if (sounds.getSize() == 0) return;

    map<string, Human>::iterator it;

    Human h = makeHuman("unknown_speaker", sounds[0][1][0], sounds[0][1][1]);

    for (it=humans.begin(); it!=humans.end(); ++it)
    {
        if (distance(h, it->second) < MAX_DISTANCE_HUMAN_SOUND) {
            it->second.speaking = true;
            return;
        }
    }
    
    humans[h.id] = h;

}
コード例 #17
0
ファイル: GameCtrl.cpp プロジェクト: bhuman/GameController
  /**
   * The constructor sets up the structures required to communicate with NAOqi.
   * @param pBroker A NAOqi broker that allows accessing other NAOqi modules.
   */
  GameCtrl(boost::shared_ptr<AL::ALBroker> pBroker)
    : ALModule(pBroker, "GameCtrl"),
      proxy(0),
      memory(0),
      udp(0),
      teamNumber(0)
  {
    setModuleDescription("A module that provides packets from the GameController.");

    assert(numOfButtons == sizeof(buttonNames) / sizeof(*buttonNames));
    assert(numOfLEDs == sizeof(ledNames) / sizeof(*ledNames));

    init();

    try
    {
      memory = new AL::ALMemoryProxy(pBroker);
      proxy = new AL::DCMProxy(pBroker);

      AL::ALValue params;
      AL::ALValue result;
      params.arraySetSize(1);
      params.arraySetSize(2);

      params[0] = std::string("leds");
      params[1].arraySetSize(numOfLEDs);
      for(int i = 0; i < numOfLEDs; ++i)
        params[1][i] = std::string(ledNames[i]);
      result = proxy->createAlias(params);
      assert(result == params);

      ledRequest.arraySetSize(6);
      ledRequest[0] = std::string("leds");
      ledRequest[1] = std::string("ClearAll");
      ledRequest[2] = std::string("time-separate");
      ledRequest[3] = 0;
      ledRequest[4].arraySetSize(1);
      ledRequest[5].arraySetSize(numOfLEDs);
      for(int i = 0; i < numOfLEDs; ++i)
        ledRequest[5][i].arraySetSize(1);

      for(int i = 0; i < numOfButtons; ++i)
        buttons[i] = (float*) memory->getDataPtr(buttonNames[i]);

      // If no color was set, set it to black (no LED).
      // This actually has a race condition.
      if(memory->getDataList("GameCtrl/teamColour").empty())
        memory->insertData("GameCtrl/teamColour", TEAM_BLACK);

      playerNumber = (int*) memory->getDataPtr("GameCtrl/playerNumber");
      teamNumberPtr = (int*) memory->getDataPtr("GameCtrl/teamNumber");
      defaultTeamColour = (int*) memory->getDataPtr("GameCtrl/teamColour");

      // register "onPreProcess" and "onPostProcess" callbacks
      theInstance = this;
      proxy->getGenericProxy()->getModule()->atPreProcess(&onPreProcess);
      proxy->getGenericProxy()->getModule()->atPostProcess(&onPostProcess);

      udp = new UdpComm();
      if(!udp->setBlocking(false) ||
         !udp->setBroadcast(true) ||
         !udp->bind("0.0.0.0", GAMECONTROLLER_DATA_PORT) ||
         !udp->setLoopback(false))
      {
        fprintf(stderr, "libgamectrl: Could not open UDP port\n");
        delete udp;
        udp = 0;
        // continue, because button interface will still work
      }

      publish();
    }
    catch(AL::ALError& e)
    {
      fprintf(stderr, "libgamectrl: %s\n", e.what());
      close();
    }
  }
コード例 #18
0
void NaoqiJointStates::run()
{
   ros::Rate r(m_rate);
   ros::Time stamp1;
   ros::Time stamp2;
   ros::Time stamp;

   std::vector<float> odomData;
   float odomX, odomY, odomZ, odomWX, odomWY, odomWZ;

   std::vector<float> memData;

   std::vector<float> positionData;

   ROS_INFO("Staring main loop. ros::ok() is %d nh.ok() is %d",ros::ok(),m_nh.ok());
   while(ros::ok() )
   {
      r.sleep();
      ros::spinOnce();
      stamp1 = ros::Time::now();
      try
      {
         memData = m_memoryProxy->getListData(m_dataNamesList);
         // {SPACE_TORSO = 0, SPACE_WORLD = 1, SPACE_NAO = 2}. (second argument)
         odomData = m_motionProxy->getPosition("Torso", 1, true);
         positionData = m_motionProxy->getAngles("Body", true);
      }
      catch (const AL::ALError & e)
      {
         ROS_ERROR( "Error accessing ALMemory, exiting...");
         ROS_ERROR( "%s", e.what() );
         //ros.signal_shutdown("No NaoQI available anymore");
      }
      stamp2 = ros::Time::now();
      //ROS_DEBUG("dt is %f",(stamp2-stamp1).toSec()); % dt is typically around 1/1000 sec
      // TODO: Something smarter than this..
      stamp = stamp1 + ros::Duration((stamp2-stamp1).toSec()/2.0);

      /******************************************************************
       *                              IMU
       *****************************************************************/
      if (memData.size() != m_dataNamesList.getSize())
      {
         ROS_ERROR("memData length %zu does not match expected length %u",memData.size(),m_dataNamesList.getSize() );
         continue;
      }
      // IMU data:
      m_torsoIMU.header.stamp = stamp;

      float angleX = memData[1];
      float angleY = memData[2];
      float angleZ = memData[3];
      float gyroX = memData[4];
      float gyroY = memData[5];
      float gyroZ = memData[6];
      float accX = memData[7];
      float accY = memData[8];
      float accZ = memData[9];

      m_torsoIMU.orientation = tf::createQuaternionMsgFromRollPitchYaw(
                                angleX,
                                angleY,
                                angleZ); // yaw currently always 0

      m_torsoIMU.angular_velocity.x = gyroX;
      m_torsoIMU.angular_velocity.y = gyroY;
      m_torsoIMU.angular_velocity.z = gyroZ; // currently always 0

      m_torsoIMU.linear_acceleration.x = accX;
      m_torsoIMU.linear_acceleration.y = accY;
      m_torsoIMU.linear_acceleration.z = accZ;

      // covariances unknown
      // cf http://www.ros.org/doc/api/sensor_msgs/html/msg/Imu.html
      m_torsoIMU.orientation_covariance[0] = -1;
      m_torsoIMU.angular_velocity_covariance[0] = -1;
      m_torsoIMU.linear_acceleration_covariance[0] = -1;

      m_torsoIMUPub.publish(m_torsoIMU);


      /******************************************************************
       *                            Joint state
       *****************************************************************/
      m_jointState.header.stamp = stamp;
      m_jointState.header.frame_id = m_baseFrameId;
      m_jointState.position.resize(positionData.size());
      for(unsigned i = 0; i<positionData.size(); ++i)
      {
         m_jointState.position[i] = positionData[i];
      }

      // simulated model misses some joints, we need to fill:
      if (m_jointState.position.size() == 22)
      {
         m_jointState.position.insert(m_jointState.position.begin()+6,0.0);
         m_jointState.position.insert(m_jointState.position.begin()+7,0.0);
         m_jointState.position.push_back(0.0);
         m_jointState.position.push_back(0.0);
      }

      m_jointStatePub.publish(m_jointState);


        /******************************************************************
        *                            Odometry
        *****************************************************************/
        if (!m_paused) {

        // apply offset transformation:
        tf::Pose transformedPose;


        if (odomData.size()!=6)
        {
            ROS_ERROR( "Error getting odom data. length is %zu",odomData.size() );
            continue;
        }

        double dt = (stamp.toSec() - m_lastOdomTime);

        odomX = odomData[0];
        odomY = odomData[1];
        odomZ = odomData[2];
        odomWX = odomData[3];
        odomWY = odomData[4];
        odomWZ = odomData[5];

        tf::Quaternion q;
        // roll and pitch from IMU, yaw from odometry:
        if (m_useIMUAngles)
            q.setRPY(angleX, angleY, odomWZ);
        else
            q.setRPY(odomWX, odomWY, odomWZ);

        m_odomPose.setOrigin(tf::Vector3(odomX, odomY, odomZ));
        m_odomPose.setRotation(q);

        if(m_mustUpdateOffset) {
            if(!m_isInitialized) {
                if(m_initializeFromIMU) {
                    // Initialization from IMU: Take x, y, z, yaw from odometry, roll and pitch from IMU
                    m_targetPose.setOrigin(m_odomPose.getOrigin());
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, odomWZ));
                } else if(m_initializeFromOdometry) {
                    m_targetPose.setOrigin(m_odomPose.getOrigin());
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, odomWZ));
                }
                m_isInitialized = true;
            } else {
                // Overwrite target pitch and roll angles with IMU data
                const double target_yaw = tf::getYaw(m_targetPose.getRotation());
                if(m_initializeFromIMU) {
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, target_yaw));
                } else if(m_initializeFromOdometry){
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, target_yaw));
                }
            }
            m_odomOffset = m_targetPose * m_odomPose.inverse();
            transformedPose = m_targetPose;
            m_mustUpdateOffset = false;
        } else {
            transformedPose = m_odomOffset * m_odomPose;
        }

        //
        // publish the transform over tf first
        //
        m_odomTransformMsg.header.stamp = stamp;
        tf::transformTFToMsg(transformedPose, m_odomTransformMsg.transform);
        m_transformBroadcaster.sendTransform(m_odomTransformMsg);


        //
        // Fill the Odometry msg
        //
        m_odom.header.stamp = stamp;
        //set the velocity first (old values still valid)
        m_odom.twist.twist.linear.x = (odomX - m_odom.pose.pose.position.x) / dt;
        m_odom.twist.twist.linear.y = (odomY - m_odom.pose.pose.position.y) / dt;
        m_odom.twist.twist.linear.z = (odomZ - m_odom.pose.pose.position.z) / dt;

        // TODO: calc angular velocity!
        //	m_odom.twist.twist.angular.z = vth; ??

        //set the position from the above calculated transform
        m_odom.pose.pose.orientation = m_odomTransformMsg.transform.rotation;
        m_odom.pose.pose.position.x = m_odomTransformMsg.transform.translation.x;
        m_odom.pose.pose.position.y = m_odomTransformMsg.transform.translation.y;
        m_odom.pose.pose.position.z = m_odomTransformMsg.transform.translation.z;


        m_odomPub.publish(m_odom);


        m_lastOdomTime = stamp.toSec();

        }
    }
    ROS_INFO("naoqi_sensors stopped.");

}
コード例 #19
0
void WifiManager::UpdateList()
{
  // reload config
  ParamEntry::Reload();

  std::string serviceOff;
  std::string serviceOn;
#if !WIFI_LOCAL_TEST
  GetConnectionProxy().scan();
  AL::ALValue serviceList = GetConnectionProxy().services();
  // Log() << serviceList << std::endl;
#else // !LOCAL_TEST
  AL::ALValue serviceList;
#endif // !LOCAL_TEST
  _services.clear();
  for (int i = 0; i < serviceList.getSize(); i++)
  {
    // Log() << "item[" << i << "]: " << serviceList[i] << std::endl;
    std::map<std::string, std::string> details;
    for (int j = 0; j < serviceList[i].getSize(); j++)
    {
      //Log() << " -- " << serviceList[i][j] << std::endl;
      std::string name = serviceList[i][j][0];
      std::string val;
      if (serviceList[i][j][1].isString())
        val = (std::string) serviceList[i][j][1];
      else
        val = serviceList[i][j][1].toString();
      details[name] = val;
    }
    /*for (auto& item : details)
    {
      Log() << item.first << ": " << item.second << std::endl;
    }*/

    WifiService service;
    service._id = details["ServiceId"];
    service._name = details["Name"];
    service._state = WifiStateUtils::StringToWifiState(details["State"]);
    service.FindConfig();
    _services.push_back(service);

    if (service._knownConfig && service._knownConfig->_default && _selectedSSID.empty())
      _selectedSSID = service._name;
  }

#if WIFI_LOCAL_TEST


  WifiService service;
  service._id = "whatever";
  service._name = "YETTI";
  service._state = WifiStateUtils::StringToWifiState("Idle");
  service.FindConfig();
  _services.push_back(service);

  if (service._knownConfig && service._knownConfig->_default && _selectedSSID.empty())
    _selectedSSID = service._name;

#endif // WIFI_LOCAL_TEST
}
int main(int argc, const char* argv[])
{
  std::string opt_ip = "131.254.10.126";
  bool opt_language_english = true;
  bool opt_debug = false;

  for (unsigned int i=0; i<argc; i++) {
    if (std::string(argv[i]) == "--ip")
      opt_ip = argv[i+1];
    else if (std::string(argv[i]) == "--fr")
      opt_language_english = false;
    else if (std::string(argv[i]) == "--debug")
      opt_debug = true;
    else if (std::string(argv[i]) == "--help") {
      std::cout << "Usage: " << argv[0] << "[--ip <robot address>] [--fr]" << std::endl;
      return 0;
    }
  }

  std::string camera_name = "CameraTopPepper";

  // Open the grabber for the acquisition of the images from the robot
  vpNaoqiGrabber g;
  if (! opt_ip.empty())
    g.setRobotIp(opt_ip);
  g.setFramerate(30);
  g.setCamera(0);
  g.open();


  vpCameraParameters cam = vpNaoqiGrabber::getIntrinsicCameraParameters(AL::kQVGA,camera_name, vpCameraParameters::perspectiveProjWithDistortion);
  vpHomogeneousMatrix eMc = vpNaoqiGrabber::getExtrinsicCameraParameters(camera_name,vpCameraParameters::perspectiveProjWithDistortion);

  std::cout << "eMc:" << std::endl << eMc << std::endl;
  std::cout << "cam:" << std::endl << cam << std::endl;
  vpNaoqiRobot robot;

  // Connect to the robot
  if (! opt_ip.empty())
    robot.setRobotIp(opt_ip);
  robot.open();



  if (robot.getRobotType() != vpNaoqiRobot::Pepper)
  {
    std::cout << "ERROR: You are not connected to Pepper, but to a different Robot. Check the IP. " << std::endl;
    return 0;
  }

  std::vector<std::string> jointNames_head = robot.getBodyNames("Head");

  // Open Proxy for the speech
  AL::ALTextToSpeechProxy tts(opt_ip, 9559);
  std::string phraseToSay;
  if (opt_language_english)
  {
    tts.setLanguage("English");
    phraseToSay = " \\emph=2\\ Hi,\\pau=200\\ How are you ?";
  }
  else
  {
    tts.setLanguage("French");
    phraseToSay = " \\emph=2\\ Bonjour,\\pau=200\\ comment vas  tu ?";
  }

  // Inizialize PeoplePerception
  AL::ALPeoplePerceptionProxy people_proxy(opt_ip, 9559);
  AL::ALMemoryProxy m_memProxy(opt_ip, 9559);
  people_proxy.subscribe("People", 30, 0.0);
  std::cout << "period: " << people_proxy.getCurrentPeriod() << std::endl;

  // Open Proxy for the recognition speech
  AL::ALSpeechRecognitionProxy asr(opt_ip, 9559);
  //  asr.unsubscribe("Test_ASR");
  //   return 0 ;

  asr.setVisualExpression(false);
  asr.setLanguage("English");
  std::vector<std::string> vocabulary;
  vocabulary.push_back("follow me");
  vocabulary.push_back("stop");

  // Set the vocabulary
  asr.setVocabulary(vocabulary,false);

  // Start the speech recognition engine with user Test_ASR
  asr.subscribe("Test_ASR");
  std::cout << "Speech recognition engine started" << std::endl;

  // Proxy to control the leds
  AL::ALLedsProxy led_proxy(opt_ip, 9559);

  //Declare plots
  vpPlot * plotter_diff_vel; vpPlot * plotter_vel;
  vpPlot * plotter_error;  vpPlot * plotter_distance;

  if (opt_debug)
  {
    // Plotting
    plotter_diff_vel = new vpPlot (2);
    plotter_diff_vel->initGraph(0, 2);
    plotter_diff_vel->initGraph(1, 2);
    plotter_diff_vel->setTitle(0,  "HeadYaw");
    plotter_diff_vel->setTitle(1,  "HeadPitch");

    plotter_vel= new vpPlot (1);
    plotter_vel->initGraph(0, 5);
    plotter_vel->setLegend(0, 0, "vx");
    plotter_vel->setLegend(0, 1, "vy");
    plotter_vel->setLegend(0, 2, "wz");
    plotter_vel->setLegend(0, 3, "q_yaw");
    plotter_vel->setLegend(0, 4, "q_pitch");

    plotter_error = new vpPlot(1);
    plotter_error->initGraph(0, 3);
    plotter_error->setLegend(0, 0, "x");
    plotter_error->setLegend(0, 1, "y");
    plotter_error->setLegend(0, 2, "Z");

    plotter_distance = new vpPlot (1);
    plotter_distance->initGraph(0, 1);
    plotter_distance->setLegend(0, 0, "dist");
  }

  try {
    vpImage<unsigned char> I(g.getHeight(), g.getWidth());
    vpDisplayX d(I);
    vpDisplay::setTitle(I, "ViSP viewer");

    vpFaceTrackerOkao face_tracker(opt_ip,9559);

    double dist = 0.0; // Distance between person detected from peoplePerception and faceDetection

    // Set Visual Servoing:
    vpServo task;
    task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
    task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE);
    //    vpAdaptiveGain lambda_adapt;
    //    lambda_adapt.initStandard(1.6, 1.8, 15);
    vpAdaptiveGain lambda_base(1.2, 1.0, 10); // 2.3, 0.7, 15
    vpAdaptiveGain lambda_nobase(5, 2.9, 15); // 4, 0.5, 15
    task.setLambda(lambda_base) ;

    double Z = 0.9;
    double Zd = 0.9;
    bool stop_vxy = false;
    bool move_base = true;
    bool move_base_prev = true;

    // Create the desired  visual feature
    vpFeaturePoint s;
    vpFeaturePoint sd;
    vpImagePoint ip(I.getHeight()/2, I.getWidth()/2);
    // Create the current x visual feature
    vpFeatureBuilder::create(s, cam, ip);
    vpFeatureBuilder::create(sd, cam, ip);

    //   sd.buildFrom( I.getWidth()/2, I.getHeight()/2, Zd);

    AL::ALValue limit_yaw = robot.getProxy()->getLimits("HeadYaw");
    std::cout << limit_yaw[0][0] << " " << limit_yaw[0][1] << std::endl;

    // Add the feature
    task.addFeature(s, sd) ;

    vpFeatureDepth s_Z, s_Zd;
    s_Z.buildFrom(s.get_x(), s.get_y(), Z , 0); // log(Z/Z*) = 0 that's why the last parameter is 0
    s_Zd.buildFrom(sd.get_x(), sd.get_y(), Zd , 0); // log(Z/Z*) = 0 that's why the last parameter is 0

    // Add the feature
    task.addFeature(s_Z, s_Zd);

    // Jacobian 6x5 (vx,vy,wz,q_yaq,q_pitch)
    vpMatrix tJe(6,5);
    tJe[0][0]= 1;
    tJe[1][1]= 1;
    tJe[5][2]= 1;
    vpMatrix eJe(6,5);

    double servo_time_init = 0;

    vpImagePoint head_cog_cur;
    vpImagePoint head_cog_des(I.getHeight()/2, I.getWidth()/2);
    vpColVector q_dot;

    bool reinit_servo = true;
    unsigned long loop_iter = 0;

    std::vector<std::string> recognized_names;
    std::map<std::string,unsigned int> detected_face_map;
    bool detection_phase = true;
    unsigned int f_count = 0;
    //    AL::ALValue leg_names  = AL::ALValue::array("HipRoll","HipPitch", "KneePitch" );
    //    AL::ALValue values  = AL::ALValue::array(0.0, 0.0, 0.0 );

    // robot.getProxy()->setMoveArmsEnabled(false,false);
    std::vector<std::string> arm_names = robot.getBodyNames("LArm");
    std::vector<std::string> rarm_names = robot.getBodyNames("RArm");
    std::vector<std::string> leg_names(3);
    leg_names[0]= "HipRoll";
    leg_names[1]= "HipPitch";
    leg_names[2]= "KneePitch";

    arm_names.insert( arm_names.end(), rarm_names.begin(), rarm_names.end() );
    arm_names.insert( arm_names.end(), leg_names.begin(), leg_names.end() );

    std::vector<float> arm_values(arm_names.size(),0.0);

    for (unsigned int i = 0; i < arm_names.size(); i++ )
      std::cout << arm_names[i]<< std::endl;

    robot.getPosition(arm_names, arm_values,false);

    vpImagePoint best_cog_face_peoplep;
    bool person_found = false;

    double t_prev = vpTime::measureTimeSecond();

    while(1) {
      if (reinit_servo) {
        servo_time_init = vpTime::measureTimeSecond();
        t_prev = vpTime::measureTimeSecond();
        reinit_servo = false;
        led_proxy.fadeRGB("FaceLeds","white",0.1);
      }

      double t = vpTime::measureTimeMs();
      if (0) // (opt_debug)
      {
        g.acquire(I);
      }
      vpDisplay::display(I);
      // Detect face
      bool face_found = face_tracker.detect();
      stop_vxy = false;

      //std::cout << "Loop time face_tracker: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      // Check speech recognition result

      AL::ALValue result_speech = m_memProxy.getData("WordRecognized");

      if ( ((result_speech[0]) == vocabulary[0]) && (double (result_speech[1]) > 0.4 )) //move
      {
        //std::cout << "Recognized: " << result_speech[0] << "with confidence of " << result_speech[1] << std::endl;
        task.setLambda(lambda_base) ;

        move_base = true;
      }
      else if ( (result_speech[0] == vocabulary[1]) && (double(result_speech[1]) > 0.4 )) //stop
      {
        //std::cout << "Recognized: " << result_speech[0] << "with confidence of " << result_speech[1] << std::endl;
        task.setLambda(lambda_nobase) ;
        move_base = false;
      }

      if (move_base != move_base_prev)
      {
        if (move_base)
        {
          phraseToSay = "Ok, I will follow you.";
          tts.post.say(phraseToSay);
        }
        else
        {
          phraseToSay = "Ok, I will stop.";
          tts.post.say(phraseToSay);
        }

      }

      //robot.setStiffness(arm_names,0.0);
      //robot.getProxy()->setAngles(arm_names,arm_values,1.0);
      //robot.getProxy()->setAngles("HipRoll",0.0,1.0);

      //std::cout << "Loop time check_speech: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      move_base_prev = move_base;

      if (face_found) {
        std::ostringstream text;
        text << "Found " << face_tracker.getNbObjects() << " face(s)";
        vpDisplay::displayText(I, 10, 10, text.str(), vpColor::red);
        for(size_t i=0; i < face_tracker.getNbObjects(); i++) {
          vpRect bbox = face_tracker.getBBox(i);
          if (i == 0)
            vpDisplay::displayRectangle(I, bbox, vpColor::red, false, 2);
          else
            vpDisplay::displayRectangle(I, bbox, vpColor::green, false, 1);
          vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), face_tracker.getMessage(i) , vpColor::red);
        }

        led_proxy.post.fadeRGB("FaceLeds","blue",0.1);

        double u = face_tracker.getCog(0).get_u();
        double v = face_tracker.getCog(0).get_v();
        if (u<= g.getWidth() && v <= g.getHeight())
          head_cog_cur.set_uv(u,v);

        vpRect bbox = face_tracker.getBBox(0);
        std::string name = face_tracker.getMessage(0);

        //std::cout << "Loop time face print " << vpTime::measureTimeMs() - t << " ms" << std::endl;
      }

      AL::ALValue result = m_memProxy.getData("PeoplePerception/VisiblePeopleList");

      //std::cout << "Loop time get Data PeoplePerception " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      person_found = false;
      if (result.getSize() > 0)
      {
        AL::ALValue info = m_memProxy.getData("PeoplePerception/PeopleDetected");
        int num_people = info[1].getSize();
        std::ostringstream text;
        text << "Found " << num_people << " person(s)";
        vpDisplay::displayText(I, 10, 10, text.str(), vpColor::red);

        person_found = true;

        if (face_found) // Try to find the match between two detection
        {
          vpImagePoint cog_face;
          double dist_min = 1000;
          unsigned int index_person = 0;

          for (unsigned int i = 0; i < num_people; i++)
          {

            float alpha =  info[1][i][2];
            float beta =  info[1][i][3];
            //Z = Zd;
            // Centre of face into the image
            float x =  g.getWidth()/2 -  g.getWidth() * beta;
            float y =  g.getHeight()/2  + g.getHeight() * alpha;
            cog_face.set_uv(x,y);
            dist = vpImagePoint::distance(cog_face, head_cog_cur);

            if (dist < dist_min)
            {
              dist_min = dist;
              best_cog_face_peoplep = cog_face;
              index_person  = i;
            }
          }

          vpDisplay::displayCross(I, best_cog_face_peoplep, 10, vpColor::white);

          if (dist_min < 55.)
          {
            Z = info[1][index_person][1]; // Current distance
          }

        }
        else // Take the first one on the list
        {
          float alpha =  info[1][0][2];
          float beta =  info[1][0][3];
          //Z = Zd;
          // Centre of face into the image
          float x =  g.getWidth()/2 -  g.getWidth() * beta;
          float y =  g.getHeight()/2  + g.getHeight() * alpha;
          head_cog_cur.set_uv(x,y);
          Z = info[1][0][1]; // Current distance

        }

      }
      else
      {
        std::cout << "No distance computed " << std::endl;
        stop_vxy = true;
        robot.getProxy()->setAngles("HipRoll",0.0,1.0);
        //Z = Zd;
      }
      //          float alpha =  info[1][0][2];
      //          float beta =  info[1][0][3];
      //          //Z = Zd;
      //          // Centre of face into the image
      //          float x =  g.getWidth()/2 -  g.getWidth() * beta;
      //          float y =  g.getHeight()/2  + g.getHeight() * alpha;

      //          vpImagePoint cog_face(y,x);
      //          dist = vpImagePoint::distance(cog_face,head_cog_cur);
      //          if (dist < 55.)
      //            Z = info[1][0][1]; // Current distance
      //          else
      //            stop_vxy = true;
      //        }
      //        else
      //        {
      //          std::cout << "No distance computed " << std::endl;
      //          stop_vxy = true;
      //          //Z = Zd;
      //        }


      //std::cout << "Loop time before VS: " << vpTime::measureTimeMs() - t << " ms" << std::endl;
      if (face_found || person_found )
      {
        // Get Head Jacobian (6x2)
        vpMatrix torso_eJe_head;
        robot.get_eJe("Head",torso_eJe_head);

        // Add column relative to the base rotation (Wz)
        //        vpColVector col_wz(6);
        //        col_wz[5] = 1;
        for (unsigned int i = 0; i < 6; i++)
          for (unsigned int j = 0; j < torso_eJe_head.getCols(); j++)
            tJe[i][j+3] = torso_eJe_head[i][j];

        // std::cout << "tJe" << std::endl << tJe << std::endl;

        //        vpHomogeneousMatrix torsoMHeadPith( robot.getProxy()->getTransform(jointNames_head[jointNames_head.size()-1], 0, true));// get transformation  matrix between torso and HeadRoll
        vpHomogeneousMatrix torsoMHeadPith( robot.getProxy()->getTransform("HeadPitch", 0, true));// get transformation  matrix between torso and HeadRoll

        vpVelocityTwistMatrix HeadPitchVLtorso(torsoMHeadPith.inverse());

        for(unsigned int i=0; i< 3; i++)
          for(unsigned int j=0; j< 3; j++)
            HeadPitchVLtorso[i][j+3] = 0;

        //std::cout << "HeadPitchVLtorso: " << std::endl << HeadPitchVLtorso << std::endl;
        // Transform the matrix
        eJe = HeadPitchVLtorso *tJe;

        // std::cout << "eJe" << std::endl << eJe << std::endl;

        task.set_eJe( eJe );
        task.set_cVe( vpVelocityTwistMatrix(eMc.inverse()) );

        vpDisplay::displayCross(I, head_cog_des, 10, vpColor::blue);
        vpDisplay::displayCross(I, head_cog_cur, 10, vpColor::green);
        //  std::cout << "head_cog_des:" << std::endl << head_cog_des << std::endl;
        //  std::cout << "head_cog_cur:" << std::endl << head_cog_cur << std::endl;

        // Update the current x feature
        double x,y;
        vpPixelMeterConversion::convertPoint(cam, head_cog_cur, x, y);
        s.buildFrom(x, y, Z);
        //s.set_xyZ(head_cog_cur.get_u(), head_cog_cur.get_v(), Z);

        // Update log(Z/Z*) feature. Since the depth Z change, we need to update the intection matrix
        s_Z.buildFrom(s.get_x(), s.get_y(), Z, log(Z/Zd)) ;

        q_dot = task.computeControlLaw(vpTime::measureTimeSecond() - servo_time_init);

        //std::cout << "Loop time compute VS: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

        vpMatrix P = task.getI_WpW();
        double alpha = -3.3;
        double min = limit_yaw[0][0];
        double max = limit_yaw[0][1];

        vpColVector z_q2 (q_dot.size());
        vpColVector q_yaw = robot.getPosition(jointNames_head[0]);

        z_q2[3] = 2 * alpha * q_yaw[0]/ pow((max - min),2);

        vpColVector q3 = P * z_q2;
        //if (q3.euclideanNorm()<10.0)
        q_dot =  q_dot + q3;

        std::vector<float> vel(jointNames_head.size());
        vel[0] = q_dot[3];
        vel[1] = q_dot[4];

        // Compute the distance in pixel between the target and the center of the image
        double distance = vpImagePoint::distance(head_cog_cur, head_cog_des);
        //if (distance > 0.03*I.getWidth())
         std::cout << "q:" << std::endl << q_dot << std::endl;
        //  std::cout << "vel" << std::endl << q_dot << std::endl;
        //std::cout << "distance" << std::endl << distance <<" -- " << 0.03*I.getWidth() << " ++ " << I.getWidth() << std::endl;

        //        if (distance > 0.1*I.getWidth())
        robot.setVelocity(jointNames_head,vel);
        //        else
        //        {
        //          std::cout << "Setting hipRoll to zero" << std::endl;
        //          robot.getProxy()->setAngles("HipRoll",0.0,1.0);
        //        }
        // std::cout << "errorZ: " << task.getError()[2] << std::endl;
        // std::cout << "stop_vxy: " << stop_vxy << std::endl;

        if (std::fabs(Z -Zd) < 0.05 || stop_vxy || !move_base)
          robot.setBaseVelocity(0.0, 0.0, q_dot[2]);
        else
          robot.setBaseVelocity(q_dot[0], q_dot[1], q_dot[2]);

        if (opt_debug)
        {
          vpColVector vel_head = robot.getJointVelocity(jointNames_head);
          for (unsigned int i=0 ; i < jointNames_head.size() ; i++) {
            plotter_diff_vel->plot(i, 1, loop_iter, q_dot[i+3]);
            plotter_diff_vel->plot(i, 0, loop_iter, vel_head[i]);
          }
          plotter_error->plot(0,loop_iter,task.getError());
          plotter_vel->plot(0,loop_iter, q3);
          plotter_distance->plot(0,0,loop_iter,Z);
        }

        //        if (detection_phase)
        //        {

        //          //if (score >= 0.4 && distance < 0.06*I.getWidth() && bbox.getSize() > 3000)
        //          if (distance < 0.06*I.getWidth() && bbox.getSize() > 3000)
        //          {
        //            if (opt_debug)
        //            {
        //              vpDisplay::displayRectangle(I, bbox, vpColor::red, false, 1);
        //              vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), name, vpColor::red);
        //            }
        //            detected_face_map[name]++;
        //            f_count++;
        //          }
        //          else
        //          {
        //            if (opt_debug)
        //            {
        //              vpDisplay::displayRectangle(I, bbox, vpColor::green, false, 1);
        //              vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), name, vpColor::green);
        //            }
        //          }
        //          if (f_count>10)
        //          {
        //            detection_phase = false;
        //            f_count = 0;
        //          }
        //        }
        //        else
        //        {
        //          std::string recognized_person_name = std::max_element(detected_face_map.begin(), detected_face_map.end(), pred)->first;
        //          unsigned int times = std::max_element(detected_face_map.begin(), detected_face_map.end(), pred)->second;

        //          if (!in_array(recognized_person_name, recognized_names) && recognized_person_name != "Unknown") {

        //            if (opt_language_english)
        //            {
        //              phraseToSay = "\\emph=2\\ Hi \\wait=200\\ \\emph=2\\" + recognized_person_name + "\\pau=200\\ How are you ?";
        //            }
        //            else
        //            {
        //              phraseToSay = "\\emph=2\\ Salut \\wait=200\\ \\emph=2\\" + recognized_person_name + "\\pau=200\\ comment vas  tu ?";;
        //            }

        //            std::cout << phraseToSay << std::endl;
        //            tts.post.say(phraseToSay);
        //            recognized_names.push_back(recognized_person_name);
        //          }
        //          if (!in_array(recognized_person_name, recognized_names) && recognized_person_name == "Unknown"
        //              && times > 15)
        //          {

        //            if (opt_language_english)
        //            {
        //              phraseToSay = "\\emph=2\\ Hi \\wait=200\\ \\emph=2\\. I don't know you! \\emph=2\\ What's your name?";
        //            }
        //            else
        //            {
        //              phraseToSay = " \\emph=2\\ Salut \\wait=200\\ \\emph=2\\. Je ne te connais pas! \\emph=2\\  Comment t'appelles-tu ?";
        //            }

        //            std::cout << phraseToSay << std::endl;
        //            tts.post.say(phraseToSay);
        //            recognized_names.push_back(recognized_person_name);
        //          }

        //          detection_phase = true;
        //          detected_face_map.clear();
        //        }
      }
      else {
        robot.stop(jointNames_head);
        robot.stopBase();
        std::cout << "Stop!" << std::endl;
        reinit_servo = true;
      }

      //if (opt_debug)
      vpDisplay::flush(I);
      if (vpDisplay::getClick(I, false))
        break;
      loop_iter ++;
      std::cout << "Loop time: " << vpTime::measureTimeMs() - t << " ms" << std::endl;
    }
    robot.stop(jointNames_head);
    robot.stopBase();

    tts.setLanguage("French");
  //  tts.exit();

    people_proxy.unsubscribe("People");
   // people_proxy.exit();

    asr.unsubscribe("Test_ASR");
    asr.setVisualExpression(true);

  //  asr.exit();

    tts.setLanguage("French");
 //   tts.exit();

    led_proxy.fadeRGB("FaceLeds","white",0.1);
  //  led_proxy.exit();

    vpDisplay::getClick(I, true);

  }
  catch(vpException &e) {
    std::cout << e.getMessage() << std::endl;
  }
  catch (const AL::ALError& e)
  {
    std::cerr << "Caught exception " << e.what() << std::endl;
  }

  std::cout << "The end: stop the robot..." << std::endl;

  //tts.setLanguage("French");
  //  tts.exit();


  robot.stop(jointNames_head);
  robot.stopBase();

  return 0;
}
コード例 #21
0
ファイル: bhuman.cpp プロジェクト: Bigshan/BHuman2013
  /**
   * The constructor initializes the shared memory for communicating with bhuman.
   * It also establishes a communication with NaoQi and prepares all data structures
   * required for this communication.
   * @param pBroker A NaoQi broker that allows accessing other NaoQi modules.
   */
  BHuman(boost::shared_ptr<AL::ALBroker> pBroker) :
    ALModule(pBroker, "BHuman"),
    data((LBHData*) MAP_FAILED),
    sem(SEM_FAILED),
    proxy(0),
    memory(0),
    dcmTime(0),
    lastReadingActuators(-1),
    actuatorDrops(0),
    frameDrops(allowedFrameDrops + 1),
    state(sitting),
    phase(0.f),
    ledIndex(0),
    rightEarLEDsChangedTime(0),
    startPressedTime(0),
    lastBHumanStartTime(0)
  {
    setModuleDescription("A module that provides basic ipc NaoQi DCM access using shared memory.");
    fprintf(stderr, "libbhuman: Starting.\n");

    assert(lbhNumOfSensorIds == sizeof(sensorNames) / sizeof(*sensorNames));
    assert(lbhNumOfActuatorIds == sizeof(actuatorNames) / sizeof(*actuatorNames));
    assert(lbhNumOfTeamInfoIds == sizeof(teamInfoNames) / sizeof(*teamInfoNames));

    // create shared memory
    memoryHandle = shm_open(LBH_MEM_NAME, O_CREAT | O_RDWR, S_IRUSR | S_IWUSR);
    if(memoryHandle == -1)
      perror("libbhuman: shm_open");
    else if(ftruncate(memoryHandle, sizeof(LBHData)) == -1)
      perror("libbhuman: ftruncate");
    else
    {
      // map the shared memory
      data = (LBHData*) mmap(NULL, sizeof(LBHData), PROT_READ | PROT_WRITE, MAP_SHARED, memoryHandle, 0);
      if(data == MAP_FAILED)
        perror("libbhuman: mmap");
      else
      {
        memset(data, 0, sizeof(LBHData));

        // open semaphore
        sem = sem_open(LBH_SEM_NAME, O_CREAT | O_RDWR, S_IRUSR | S_IWUSR, 0);
        if(sem == SEM_FAILED)
          perror("libbhuman: sem_open");
        else
          try
          {
            // get the robot name
            memory = new AL::ALMemoryProxy(pBroker);

            std::string robotName = (std::string) memory->getData("Device/DeviceList/ChestBoard/BodyNickName", 0);
            strncpy(data->robotName, robotName.c_str(), sizeof(data->robotName));

            // create "positionRequest" and "hardnessRequest" alias
            proxy = new AL::DCMProxy(pBroker);

            AL::ALValue params;
            AL::ALValue result;
            params.arraySetSize(1);
            params.arraySetSize(2);

            params[0] = std::string("positionActuators");
            params[1].arraySetSize(lbhNumOfPositionActuatorIds);
            for(int i = 0; i < lbhNumOfPositionActuatorIds; ++i)
              params[1][i] = std::string(actuatorNames[i]);
            result = proxy->createAlias(params);

            params[0] = std::string("hardnessActuators");
            params[1].arraySetSize(lbhNumOfHardnessActuatorIds);
            for(int i = 0; i < lbhNumOfHardnessActuatorIds; ++i)
              params[1][i] = std::string(actuatorNames[headYawHardnessActuator + i]);
            result = proxy->createAlias(params);

            params[0] = std::string("usRequest");
            params[1].arraySetSize(1);
            params[1][0] = std::string(actuatorNames[usActuator]);
            result = proxy->createAlias(params);

            // prepare positionRequest
            positionRequest.arraySetSize(6);
            positionRequest[0] = std::string("positionActuators");
            positionRequest[1] = std::string("ClearAll");
            positionRequest[2] = std::string("time-separate");
            positionRequest[3] = 0;
            positionRequest[4].arraySetSize(1);
            positionRequest[5].arraySetSize(lbhNumOfPositionActuatorIds);
            for(int i = 0; i < lbhNumOfPositionActuatorIds; ++i)
              positionRequest[5][i].arraySetSize(1);

            // prepare hardnessRequest
            hardnessRequest.arraySetSize(6);
            hardnessRequest[0] = std::string("hardnessActuators");
            hardnessRequest[1] = std::string("ClearAll");
            hardnessRequest[2] = std::string("time-separate");
            hardnessRequest[3] = 0;
            hardnessRequest[4].arraySetSize(1);
            hardnessRequest[5].arraySetSize(lbhNumOfHardnessActuatorIds);
            for(int i = 0; i < lbhNumOfHardnessActuatorIds; ++i)
              hardnessRequest[5][i].arraySetSize(1);

            // prepare usRequest
            usRequest.arraySetSize(6);
            usRequest[0] = std::string("usRequest");
            usRequest[1] = std::string("Merge"); // doesn't work with "ClearAll"
            usRequest[2] = std::string("time-separate");
            usRequest[3] = 0;
            usRequest[4].arraySetSize(1);
            usRequest[5].arraySetSize(1);
            usRequest[5][0].arraySetSize(1);

            // prepare ledRequest
            ledRequest.arraySetSize(3);
            ledRequest[1] = std::string("ClearAll");
            ledRequest[2].arraySetSize(1);
            ledRequest[2][0].arraySetSize(2);
            ledRequest[2][0][1] = 0;

            // prepare sensor pointers
            for(int i = 0; i < lbhNumOfSensorIds; ++i)
              sensorPtrs[i] = (float*) memory->getDataPtr(sensorNames[i]);
            resetUsMeasurements();

            // initialize requested actuators
            memset(requestedActuators, 0, sizeof(requestedActuators));
            for(int i = faceLedRedLeft0DegActuator; i < chestBoardLedRedActuator; ++i)
              requestedActuators[i] = -1.f;

            // register "onPreProcess" and "onPostProcess" callbacks
            theInstance = this;
            proxy->getGenericProxy()->getModule()->atPreProcess(&onPreProcess);
            proxy->getGenericProxy()->getModule()->atPostProcess(&onPostProcess);

            fprintf(stderr, "libbhuman: Started!\n");
            return; // success
          }
          catch(AL::ALError& e)
          {
            fprintf(stderr, "libbhuman: %s\n", e.toString().c_str());
          }
      }
    }
    close(); // error
  }
コード例 #22
0
ファイル: bhuman.cpp プロジェクト: Bigshan/BHuman2013
  /**
   * The method reads all sensors. It also detects if the chest button was pressed
   * for at least three seconds. In that case, it shuts down the robot.
   */
  void readSensors()
  {
    // get new sensor values and copy them to the shared memory block
    try
    {
      // copy sensor values into the shared memory block
      int writingSensors = 0;
      if(writingSensors == data->newestSensors)
        ++writingSensors;
      if(writingSensors == data->readingSensors)
        if(++writingSensors == data->newestSensors)
          ++writingSensors;
      assert(writingSensors != data->newestSensors);
      assert(writingSensors != data->readingSensors);

      float* sensors = data->sensors[writingSensors];
      for(int i = 0; i < lbhNumOfSensorIds; ++i)
        sensors[i] = *sensorPtrs[i];

      AL::ALValue value = memory->getData("GameCtrl/RoboCupGameControlData");
      if(value.isBinary() && value.getSize() == sizeof(RoboCup::RoboCupGameControlData))
        memcpy(&data->gameControlData[writingSensors], value, sizeof(RoboCup::RoboCupGameControlData));

      data->newestSensors = writingSensors;

      // detect shutdown request via chest-button
      if(*sensorPtrs[chestButtonSensor] == 0.f)
        startPressedTime = dcmTime;
      else if(state != shuttingDown && startPressedTime && dcmTime - startPressedTime > 3000)
      {
        if(*sensorPtrs[rBumperRightSensor] != 0.f || *sensorPtrs[rBumperLeftSensor] != 0.f ||
           *sensorPtrs[lBumperRightSensor] != 0.f || *sensorPtrs[lBumperLeftSensor] != 0.f)
          (void) !system("( /home/nao/bin/bhumand stop && sudo shutdown -r now ) &");
        else
          (void) !system("( /home/nao/bin/bhumand stop && sudo shutdown -h now ) &");
        state = preShuttingDown;
      }
    }
    catch(AL::ALError& e)
    {
      fprintf(stderr, "libbhuman: %s\n", e.toString().c_str());
    }

    // raise the semaphore
    if(sem != SEM_FAILED)
    {
      int sval;
      if(sem_getvalue(sem, &sval) == 0)
      {
        if(sval < 1)
        {
          sem_post(sem);
          frameDrops = 0;
        }
        else
        {
          if(frameDrops == 0)
            fprintf(stderr, "libbhuman: dropped sensor data.\n");
          ++frameDrops;
        }
      }
    }
  }
コード例 #23
0
ファイル: interactions.cpp プロジェクト: dsapandora/nao_hri
void InteractionMonitor::processFaces(const AL::ALValue &faces, map<string, Human>& humans) {

    try {
        /** Check that there are faces effectively detected. */
        if (faces.getSize() < 2 ) {
            if (facesCount != 0) {
                ROS_INFO("No face detected");
                humanBuffer = NB_VIEWS_BEFORE_LEARNING;
                facesCount = 0;
            }
            return;
        }
        /** Check the number of faces from the FaceInfo field, and check that it has
        * changed from the last event.*/
        if (faces[1].getSize() - 1 != facesCount) {
            ROS_INFO("%d face(s) detected.", faces[1].getSize() - 1);
            /** Update the current number of detected faces. */
            facesCount = faces[1].getSize() - 1;
        }
    }
    catch (const AL::ALError& e) {
        ROS_ERROR(e.what());
    }

    for (int i = 0; i < facesCount; ++i) {
       
        int naoqiFaceId = faces[1][i][1][0];
        float confidence = faces[1][i][1][1];

        // Face > [FaceInfo] > FaceInfo > ShapeInfo > yaw
        float yaw = faces[1][i][0][1];
        // Face > [FaceInfo] > FaceInfo > ShapeInfo > pitch
        float pitch = faces[1][i][0][2];

        ROS_INFO("Face (id: %d) confidence:%.2f yaw: %.2f, pitch: %.2f", naoqiFaceId, confidence, yaw, pitch);
        //ROS_INFO("Face (id: %d) reco confidence: %.2f", naoqiFaceId, confidence);
        //ROS_INFO("Time_Filtered_Reco_Info: %s", faces[1][i].toString().c_str());

        int candidateId = facesCloseTo(yaw, pitch);

        if (naoqiFaceId == -1 && candidateId == -1)  // seen unknown new face
        {
            if (humanBuffer > 0) {
                humanBuffer--;
            }
            else {
                humanBuffer = NB_VIEWS_BEFORE_LEARNING;
                char faceId[6];
                gen_random(faceId, 5);
                ROS_INFO("Unknown face (confidence: %.2f). Learning it as <%s>", confidence, faceId);
                if(!m_faceProxy->learnFace(faceId))
                {
                    ROS_ERROR("Could not learn the new face!");
                }
            }
        }
        else { // face recognized!

            humanBuffer = NB_VIEWS_BEFORE_LEARNING;
            // too close to an existing face? re-use the existing face!
            if (candidateId != -1) {
                naoqiFaceId = candidateId;
            }
            else {
                assert(naoqiFaceId != -1);
                string label = faces[1][i][1][2];
                id2label[naoqiFaceId] = label;
            }

            string label = id2label[naoqiFaceId];
            // save the yaw,pitch pose of the recognized face, to
            // prevent jitter at next round of detections
            m_lastSeenHumans[naoqiFaceId] = make_pair(yaw, pitch);


            humans[label] = makeHuman(label, yaw, pitch);
        }
    }

}
コード例 #24
0
/*!

  Connect to Romeo robot, grab, display images using ViSP and start face detection with Okao library running on the robot.
  By default, this example connect to a robot with ip address: 198.18.0.1.

 */
int main(int argc, const char* argv[])
{
  try
  {
    std::string opt_ip = "198.18.0.1";;

    if (argc == 3) {
      if (std::string(argv[1]) == "--ip")
        opt_ip = argv[2];
    }

    vpNaoqiGrabber g;
    if (! opt_ip.empty())
      g.setRobotIp(opt_ip);
    g.setCamera(0);
    g.open();

    vpImage<unsigned char> I(g.getHeight(), g.getWidth());
    vpDisplayX d(I);
    vpDisplay::setTitle(I, "ViSP viewer");


    // Open Proxy for the speech
    AL::ALTextToSpeechProxy tts(opt_ip, 9559);
    tts.setLanguage("English");

    std::string phraseToSay = "Hi!";
    //    int id = tts.post.say(phraseToSay);
    //    tts.wait(id,2000);
    // Open Face detection proxy

    AL::ALFaceDetectionProxy df(opt_ip, 9559);


    // Start the face recognition engine
    const int period = 1;

    df.subscribe("Face", period, 0.0);


    AL::ALMemoryProxy memProxy(opt_ip, 9559);

    float  mImageHeight = g.getHeight();
    float  mImageWidth = g.getWidth();
    std::vector<std::string> names;


    while (1)
    {

      g.acquire(I);
      vpDisplay::display(I);


      AL::ALValue result = memProxy.getData("FaceDetected");



      if (result.getSize() >=2)
      {
        //std::cout << "face" << std::endl;
        AL::ALValue info_face_array = result[1];

        for (unsigned int i = 0; i < info_face_array.getSize()-1; i++ )
        {
          // AL::ALValue info_face = info_face_array[i];

          //Extract face info

          //AL::ALValue shape_face =info_face[0];

          //std::cout << "alpha "<< shape_face[1] <<". Beta:" << shape_face[1] << std::endl;
          // Face Detected [1]/ First face [0]/ Shape Info [0]/ Alpha [1]
          float alpha = result[1][i][0][1];
          float beta = result[1][i][0][2];
          float sx = result[1][i][0][3];
          float sy = result[1][i][0][4];

          std::string name = result[1][i][1][2];
          float score = result[1][i][1][1];

          // sizeX / sizeY are the face size in relation to the image

          float sizeX = mImageHeight * sx;
          float sizeY = mImageWidth * sy;

          // Centre of face into the image
          float x = mImageWidth / 2 - mImageWidth * alpha;
          float y = mImageHeight / 2 + mImageHeight * beta;

          vpDisplay::displayCross(I, y, x, 10, vpColor::red);
          vpDisplay::displayRectangle(I,y,x,0.0,sizeX,sizeY,vpColor::cyan,1);

          std::cout << i << "- size: " << sizeX*sizeY << std::endl;

          if (score >= 0.4)
          {
            std::cout << i << "- Name: " << name <<" score "  << score << std::endl;

            //            std::cout << "NAMES: " << names << std::endl;
            //            std::cout << "Esite: " << in_array(name, names) << std::endl;
            //            std::cout << "==================================== " << std::endl;

            if (!in_array(name, names) && sizeX*sizeY > 4000) {
              std::string phraseToSay = "\\emph=2\\ Hi \\wait=200\\ \\emph=2\\" + name;
              std::cout << phraseToSay << std::endl;
              tts.post.say(phraseToSay);
              names.push_back(name);

            }
          }

          else
            std::cout << i << "- Face not recognized. " << std::endl;

          //          cv::Point p1(x - (sizeX / 2), y - (sizeY / 2));
          //          cv::Point p2(x + (sizeX / 2), y + (sizeY / 2));
          //          cv::rectangle(I, p1, p2, cv::Scalar(255,255,255));

        }

      }

      //vpTime::sleepMs(60);

      vpDisplay::flush(I);
      if (vpDisplay::getClick(I, false))
        break;

    }


    df.unsubscribe("Face");



  }
  catch (const vpException &e)
  {
    std::cerr << "Caught exception: " << e.what() << std::endl;
  }
  catch (const AL::ALError &e)
  {
    std::cerr << "Caught exception: " << e.what() << std::endl;
  }



  return 0;
}
コード例 #25
0
bool vpFaceTrackerOkao::detect()
{
  m_message.clear();
  m_polygon.clear();
  m_nb_objects = 0;
  m_faces.clear();
  m_scores.clear();

  bool target_found = false;
  AL::ALValue result = m_mem_proxy.getData("FaceDetected");
  //-- Detect faces


  if (result.getSize() >=2)
  {
    //std::cout << "face" << std::endl;
    AL::ALValue info_face_array = result[1];
    target_found = true;
    double min_dist = m_image_width*m_image_height;
    unsigned int index_closest_cog = 0;
    vpImagePoint closest_cog;
    for (unsigned int i = 0; i < info_face_array.getSize()-1; i++ )
    {
      //Extract face info
      // Face Detected [1]/ First face [0]/ Shape Info [0]/ Alpha [1]
      float alpha = result[1][i][0][1];
      float beta = result[1][i][0][2];
      float sx = result[1][i][0][3];
      float sy = result[1][i][0][4];

      std::string name = result[1][i][1][2];
      float score = result[1][i][1][1];

      std::ostringstream message;
      if (score > 0.6)
        message << name;
      else
        message << "Unknown";

      m_message.push_back( message.str() );
      m_scores.push_back(score);
      // sizeX / sizeY are the face size in relation to the image
      float h = m_image_height * sx;
      float w = m_image_width * sy;

      // Center of face into the image
      float x = m_image_width / 2 - m_image_width * alpha;
      float y = m_image_height / 2 + m_image_height * beta;

      vpImagePoint cog(x,y);
      double dist = vpImagePoint::distance(m_previuos_cog,cog);

      if (dist< min_dist)
      {
        closest_cog = cog;
        index_closest_cog = i;
        min_dist = dist;
      }

      std::vector<vpImagePoint> polygon;
      double x_corner = x - h/2;
      double y_corner = y - w/2;

      polygon.push_back(vpImagePoint(y_corner  , x_corner  ));
      polygon.push_back(vpImagePoint(y_corner+w, x_corner  ));
      polygon.push_back(vpImagePoint(y_corner+w, x_corner+h));
      polygon.push_back(vpImagePoint(y_corner  , x_corner+h));

      m_polygon.push_back(polygon);
      m_nb_objects ++;

    }

    if (index_closest_cog !=0)
      std::swap(m_polygon[0], m_polygon[index_closest_cog]);
    m_previuos_cog = closest_cog;
  }

  return target_found;
}
コード例 #26
0
ファイル: dcm_test.cpp プロジェクト: amagasso/romeo_tk
/*!

   Connect toRomeo robot, and apply some motion.
   By default, this example connect to a robot with ip address: 198.18.0.1.
   If you want to connect on an other robot, run:

   ./motion --ip <robot ip address>

   Example:

   ./motion --ip 169.254.168.230
 */
int main(int argc, const char* argv[])
{
  try
  {
    std::string opt_ip = "198.18.0.1";;

    if (argc == 3) {
      if (std::string(argv[1]) == "--ip")
        opt_ip = argv[2];
    }

    // Create a general proxy to motion to use new functions not implemented in the specialized proxy

    std::string 	myIP = "";		// IP du portable (voir /etc/hosts)
    int 	myPort = 0 ;			// Default broker port

    //    AL::ALProxy *m_proxy;
    boost::shared_ptr<AL::ALBroker> broker = AL::ALBroker::createBroker("Broker", myIP, myPort, opt_ip, 9559);
    //    m_proxy = new AL::ALProxy(broker, "ALVideoDevice");

    //    m_proxy->callVoid("setCameraGroup",1 ,true);

    //AL::ALProxy* dcm = new AL::ALProxy(broker, "DCM_video");



    boost::shared_ptr<AL::ALProxy> proxy = boost::shared_ptr<AL::ALProxy>(new AL::ALProxy(broker, "DCM_video"));
    AL::DCMProxy* dcm = new AL::DCMProxy(proxy);

    //boost::shared_ptr<AL::ALProxy> dcm = boost::shared_ptr<AL::ALProxy>(new AL::ALProxy(broker, "DCM_video"));



    AL::ALValue commands;
    commands.arraySetSize(3);
    commands[0] = std::string("FaceBoard/CameraSwitch/Value");
    commands[1] = std::string("Merge");
    commands[2].arraySetSize(1);
    commands[2][0].arraySetSize(2);
    commands[2][0][0] = 1;
    //commands[2][0][1] = dcm->call<int>("getTime",0);// dcm->getTime(0);
    commands[2][0][1] = dcm->getTime(0);
    //dcm->callVoid("set",commands);
    dcm->set(commands);


    //int time = dcm->call<int>("getTime",0);
    int time = dcm->getTime(10);
    std::cout <<"Time " << time << std::endl;

    //dcm = naoqitools.myGetProxy( "DCM_video" );^M
    //dcm.set( ["FaceBoard/CameraSwitch/Value", "Merge",  [[rGroupNumber, dcm.getTime( 0 ) ]] ] );^M
    //dcm.set(\["ChestBoard/Led/Red/Actuator/Value", "Merge", \[[1.0, dcm.getTime(10000)]] ])




  }
  catch (const vpException &e)
  {
    std::cerr << "Caught exception: " << e.what() << std::endl;
  }
  catch (const AL::ALError &e)
  {
    std::cerr << "Caught exception: " << e.what() << std::endl;
  }



  return 0;
}