Ejemplo n.º 1
0
void Player::HandlePacket( PacketPtr packet ) {
	if( packet->getDataSize() < 4 ) {
		LogConsole("Player sent packet size < 4");
		return;
	}

	ObjectPtr agent( m_agent.lock() );
	if( !agent ) {
		LogConsole( "Player agent deleted, deleting player." );
		Delete();
		return;
	}

	sf::Uint16 type0;
	(*packet) >> type0;

	switch( type0 ) {
		case ClientToServer::CLIENT_COMMAND:
			agent->HandlePacket( packet );
			break;
		default:
			LogConsole( "Player sent packet with type0=" + string_cast( type0 ) );
			break;
	}
}
Ejemplo n.º 2
0
void Maze::processAI()
{
	std::cout << "DEBUG: Starting Maze::processAI" << std::endl;

	AIAgent agent(0, *this);

	{
		boost::mutex::scoped_lock lock(players_mutex_);
		players_[agent.getPlayerId()] = agent.getPlayer();
	}

	GameMessage msg(GameMessage::GC_UPDATE_NOTIFY, agent.getPlayer().get());
	broadcast(msg);

	while (true)
    {
        try
        {
			if (agent.handleTick())
				break;

            boost::this_thread::sleep(boost::posix_time::milliseconds(100));
        }
        catch(boost::thread_interrupted &)
        {
			std::cout << "DEBUG: Ending Maze::processAI" << std::endl;
            return;
        }
    }

	clearSessions();
}
Ejemplo n.º 3
0
void test_Agent(const char *filename)
{
	Maze field;
	Maze mazeInRobot;
	field.loadFromFile(filename);
	//field.loadFromArray(mazeData_66test);

	Agent agent(mazeInRobot);

	IndexVec cur(0,0);
	while(1) {
		bool pos[MAZE_SIZE][MAZE_SIZE] = {false};
		pos[cur.y][cur.x] = true;
		mazeInRobot.printWall(pos);

		agent.update(cur, field.getWall(cur));
		if (agent.getState() == Agent::FINISHED) break;

		Direction dir = agent.getNextDirection();
		for (int i=0;i<4;i++) {
			if (dir[i]) cur += IndexVec::vecDir[i];
		}
		usleep(1000000/10);
	}

	agent.caclRunSequence(true);
	bool route[MAZE_SIZE][MAZE_SIZE] = {false};
	for (auto &index : agent.getShortestPath()) {
		route[index.y][index.x] = true;
	}
	mazeInRobot.printWall(route);
}
Ejemplo n.º 4
0
	std::tuple< i_system*, i_genome_mapping*, i_agent_factory*, i_observer* > sat_system< dim >::create_instance(rtp_param param, bool evolvable)
	{
		std::tuple< i_system*, i_genome_mapping*, i_agent_factory*, i_observer* > result;

		rtp_param_list param_list = boost::any_cast<rtp_param_list>(param);
		
		sat_scenario< dim >* scenario = sat_scenario< dim >::create_instance(param_list[0]);
		
		ship_config::Type cfg_type = boost::any_cast<ship_config::Type>(param_list[1]);
		thruster_config< dim > ship_cfg = ship_config::create_instance(cfg_type, rtp_param());

		std::get< 0 >(result) = new sat_system< dim >(scenario, ship_cfg);
		if(evolvable)
		{
			evolvable_agent::Type ea_type = boost::any_cast<evolvable_agent::Type>(param_list[2]);
			// TODO: Some easy way to access sub-tuple? (eg. result.firstN)
			std::tie(
				std::get< 1 >(result),
				std::get< 2 >(result)
				) = evolvable_agent::create_instance(ea_type, param_list[3], ship_cfg);

			agent_objective::Type obj_type = boost::any_cast<agent_objective::Type>(param_list[4]);
			std::get< 3 >(result) = agent_objective::create_instance(obj_type, param_list[5]);
		}
		else
		{
			i_sat_agent::Type a_type = boost::any_cast<i_sat_agent::Type>(param_list[2]);
			boost::shared_ptr< i_sat_agent > agent(i_sat_agent::create_instance(a_type, param_list[3]));
			std::get< 0 >(result)->register_agent(agent);
		}
		return result;
	}
Ejemplo n.º 5
0
bool WebDevToolsAgent::dispatchMessageFromFrontendOnIOThread(const WebDevToolsMessageData& data)
{
    IORPCDelegate transport;
    ProfilerAgentDelegateStub stub(&transport);
    ProfilerAgentImpl agent(&stub);
    return ProfilerAgentDispatch::dispatch(&agent, data);
}
Ejemplo n.º 6
0
void cuckooDetect()
{
	try{
		getDLLList();
	}
	catch (int e)
	{

	}
	
	try{
		agent();
	}
	catch (int e)
	{

	}
	try{
		portScanner();
	}
	catch (int e)
	{

	}
	try{
		filesAndFolderCheck();
	}
	catch (int e)
	{

	}
	try{
		cuckoomonDetect();
	}
	catch (int e)
	{

	}
	try{
		cuckoo();
	}
	catch (int e)
	{

	}
	try{
		pipe();
	}
	catch (int e)
	{

	}
	try{
		functionHookedByCuckoo();
	}
	catch (int e)
	{

	}
}
	void EnvironmentDisturbanceManager::Update()
	{
		if( m_observableEvents.empty() )
			return;

		RemoveExpiredEvents();

		AutoAIObjectIter aiActorsIterator( gEnv->pAISystem->GetAIObjectManager()->GetFirstAIObject( OBJFILTER_TYPE, AIOBJECT_ACTOR ) );
		while ( IAIObject* aiObject = aiActorsIterator->GetObject() )
		{
			Agent agent( aiObject );

			ObservableEvents::iterator observableEventIterator = m_observableEvents.begin();
			while ( observableEventIterator != m_observableEvents.end() )
			{
				ObservableEvent& observableEvent = *observableEventIterator;
				if ( !observableEvent.HasBeenObservedBy( agent.GetEntityID() ) && agent.CanSee( observableEvent.GetVisionId() ) )
				{
					observableEvent.SetObservedBy( agent.GetEntityID() );

					IAISignalExtraData* data = gEnv->pAISystem->CreateSignalExtraData();
					data->point = observableEvent.GetPosition();
					agent.SetSignal( SIGNALFILTER_SENDER, observableEvent.GetSignal(), data );
				}

				++observableEventIterator;
			}

			aiActorsIterator->Next();
		}
	}
Ejemplo n.º 8
0
//unsigned char buffer[STATIC_ALLOCATOR_SIZE];
//StaticAllocator myAlloc(buffer, STATIC_ALLOCATOR_SIZE);
int main() {
  //Alloc::init(&myAlloc);
//  DummyAgent agent;
  QLearningEGreedyPolicy egreedy(0.1f);
  NeuralNetwork net(DIM_OBSERVATIONS + DIM_ACTIONS, N_HIDDEN, 1, 0.1f);
  QLearningAgent agent(&net, DIM_OBSERVATIONS, DIM_ACTIONS, N_ACTIONS,
                       1.0f, 0.1f, &egreedy, false); // lambda = 1.0 => no history
  LibMapperEnvironment env;
  RLQualia qualia(&agent, &env);

  qualia.init();
  qualia.start();

  for (;;) {
//  for (int i=0; i<10; i++) {
    qualia.step();
#if is_computer()
    printf("Current agent action: %d\n", agent.currentAction.conflated());
    printf("Current environment observation: %f %f\n", (double)env.currentObservation.observations[0], (double)env.currentObservation.observations[1]);
#endif
  }

//  if (myAlloc.nLeaks)
//    printf("WARNING: Static Allocator has leaks: %d\n", myAlloc.nLeaks);

  return 0;
}
Ejemplo n.º 9
0
Player::~Player() {
	ObjectPtr agent( m_agent.lock() );
	if( agent ) {
		agent->Delete();
	}

	LogConsole( "Player " + string_cast( m_id ) + " destroyed" );
}
Ejemplo n.º 10
0
// ######################################################################
int main (int argc, char* argv[]) {
	std::cout << "HawkNavigator: starting..." << std::endl;
	
	HawkNavigator agent("HawkNavigator", argc, argv);
	agent.start();
	
	std::cout << "HawkNavigator: all done!" << std::endl;
}
void CTestStepBtRomConfigSdpAgent001::TestSdpAgentL(TInt aTestSubCase)
	{

	const TBTDevAddr Kaddr(0x000e07966a4);
	const TUUID KdummyUUID(1);

	//Create a search pattern so we only see SDP records for our application
	CSdpSearchPattern *pattern(CSdpSearchPattern::NewL());
	CleanupStack::PushL(pattern);
	User::LeaveIfError(pattern->AddL(KdummyUUID));

	switch (aTestSubCase)
		{
	case 1:
		{
		CSdpAgent* agent(CSdpAgent::NewL(*this, Kaddr));
		CleanupStack::PushL(agent);

		agent->SetRecordFilterL(*pattern);
		agent->NextRecordRequestL();		

		CleanupStack::PopAndDestroy(agent);
		CleanupStack::PopAndDestroy(pattern);
		}
		break;
		
	case 2:
		{
		CSdpAgent* agent(CSdpAgent::NewLC(*this, Kaddr));
		
		agent->SetRecordFilterL(*pattern);
		agent->NextRecordRequestL();				

		CleanupStack::PopAndDestroy(agent);
		CleanupStack::PopAndDestroy(pattern);
		}
		break;
		
	default:
		SetTestStepResult(EFail);
		User::Panic(KInvalidTestPanicString, aTestSubCase);
		break;
		}
	}
Ejemplo n.º 12
0
void SearchModule::MarkAssignedSearchSpotAsUnreachable(EntityId entityID)
{
	Agent agent(entityID);
	if (agent)
	{
		SearchGroup* group = GetGroup(agent.GetGroupID());
		if (group)
			return group->MarkAssignedSearchSpotAsUnreachable(entityID);
	}
}
Ejemplo n.º 13
0
static int
lq_hostlist(hostlist_callback callback, void *arg, void *priv)
{
	VALIDATE(priv);

	printf("[libvirt-qmf] HOSTLIST operation\n");
	
	qmf::ConsoleSession session(lq_open_session((struct lq_info *)priv));
	if (!session.isValid()) {
		return 1;
	}

	unsigned tries = 0;
	qmf::ConsoleEvent event;
	uint32_t numDomains = 0;
	while (++tries < 10 && !numDomains) {
		sleep(1);
		uint32_t numAgents = session.getAgentCount();
		for (unsigned a = 0; a < numAgents; a++) {
			qmf::Agent agent(session.getAgent(a));
			event = queryDomain(agent);

			numDomains = event.getDataCount();
			if (numDomains >= 1) {
				break;
			}
		}
	}

	for (unsigned d = 0; d < numDomains; d++) {
		qmf::Data domain = event.getData(d);

		std::string vm_name, vm_uuid, vm_state_str;
		try {
			vm_name = domain.getProperty("name").asString();
			vm_uuid = domain.getProperty("uuid").asString();
			vm_state_str = domain.getProperty("state").asString();
		} catch (qmf::KeyNotFound e) {
			std::cout << e.what() << " - skipping" << std::endl;
			continue;
		}

		int vm_state;
		if (!strcasecmp(vm_state_str.c_str(), "shutoff")) {
			vm_state = 0;
		} else {
			vm_state = 1;
		}

		callback(vm_name.c_str(), vm_uuid.c_str(), vm_state, arg);
	}

	session.close();
	return 0;
}
Ejemplo n.º 14
0
void SearchModule::EntityEnter(EntityId entityID)
{
	Agent agent(entityID);
	assert(agent.IsValid());
	if (agent.IsValid())
	{
		if (SearchGroup* group = GetGroup(agent.GetGroupID()))
		{
			group->AddEnteredEntity(entityID);
		}
	}
}
Ejemplo n.º 15
0
bool SearchModule::GetNextSearchPoint(EntityId entityID, SearchSpotQuery* query)
{
	Agent agent(entityID);
	if (agent)
	{
		SearchGroup* group = GetGroup(agent.GetGroupID());
		if (group)
			return group->GetNextSearchPoint(entityID, query);
	}

	return false;
}
void SimpleDeploymentDummy::positionsCallback(const turtlebot_deployment::PoseWithName::ConstPtr& posePtr)
{
    ROS_DEBUG("SimpleDeployment: Positions received, finding robot in database");
    // Search for agent in the catalog using functor that compares the name to an input string
    std::vector<Agent>::iterator it = std::find_if(agent_catalog_.begin(), agent_catalog_.end(), MatchString(posePtr->name) );

    // If the agent is already in the catalog, update the position and recalculate the distance.
    if ( it != agent_catalog_.end() ) {
        ROS_DEBUG("SimpleDeployment: Robot found, updating pose and distance");

        it->setPose(posePtr->pose);
        it->setDistance(distance(this_agent_.getPose(),posePtr->pose));
    }

    // else (the agent is not yet in the catalog), create an Agent object and push it into the catalog vector
    else {
        ROS_DEBUG("SimpleDeployment: Robot not found in database");

        if ( posePtr->name != this_agent_.getName() ) {
            ROS_DEBUG("SimpleDeployment: Robot is not me. Adding it to database");

            // This initializes an object called "agent" with id = 1, the pose of the incoming message, and the distance from this agent to the agent that published the message
            Agent agent( 1, *posePtr, distance(this_agent_.getPose(), posePtr->pose) );
            agent_catalog_.push_back( agent );
        }
        else {
            ROS_ERROR("SimpleDeployment: Robot is me! Updating position and adding to database");
            this_agent_.setPose(posePtr->pose);
            got_me_ = true;

            // This initializes an object called "agent" with id = 0, the pose of the incoming message, and a distance of 0.0;
            Agent agent( 0, *posePtr, 0.0 );
            agent_catalog_.push_back( agent );
        }
    }

    // Sort agent list on distance (using functor)
    std::sort( agent_catalog_.begin(), agent_catalog_.end(), SortAgentsOnDistance() );
    ROS_DEBUG("SimpleDeployment: Positions processed");
}
Ejemplo n.º 17
0
void Player::Update() {
	m_inventory->SendUpdate( shared_from_this() );

	FlushBuffer();
	HandleSocketData();

	ObjectPtr agent( m_agent.lock() );
	if( !agent ) {
		LogConsole( "Player agent deleted, deleting player." );
		Delete();
		return;
	}
}
Ejemplo n.º 18
0
void testQLearningDummy() {
  randomSeed(RANDOM_SEED);
  NeuralNetwork net(DUMMY_ENVIRONMENT_OBSERVATIONS_DIM + DUMMY_AGENT_ACTIONS_DIM, N_HIDDEN, 1, 0.1f);
  ActionProperties props(DUMMY_AGENT_ACTIONS_DIM, DUMMY_AGENT_N_ACTIONS);
  QLearningEGreedyPolicy egreedy(0.1f);
  QFunction qFunc(&net, DUMMY_ENVIRONMENT_OBSERVATIONS_DIM, &props);
  QLearningAgent agent(&qFunc, &egreedy,
                       DUMMY_ENVIRONMENT_OBSERVATIONS_DIM,
                       &props,
                       0.0f, 0.01f, false); // lambda = 0.0  => no history, gamma = 0.01 => opportunistic agent
  DummyEnvironment env;
  testQLearning(env, agent);
}
Ejemplo n.º 19
0
void SearchSpot::MarkAsSearchedBy(SearchActor& participant, float timeout)
{
	if(m_assigneeID)
	{
		if (IsAssignedTo(participant.entityID))
		{
			Agent agent(participant.entityID);
			if (agent)
			{
				if(m_isTargetSearchSpot)
					agent.SetSignal(0, "OnTargetSearchSpotSeen");

				agent.SetSignal(0, "OnAssignedSearchSpotSeen");
			}
		}
		else
		{
			Agent agent(m_assigneeID);
			if (agent)
			{
				agent.SetSignal(0, "OnAssignedSearchSpotSeenBySomeoneElse");
			}
		}
		m_assigneeID = 0;
	}

	if(timeout > 0.0f)
	{
		m_searchTimeoutLeft = timeout;
		m_status = SearchedTimingOut;
	}
	else
		m_status = Searched;

	m_lastTimeObserved = gEnv->pTimer->GetFrameStartTime();
}
Ejemplo n.º 20
0
void SearchModule::EntityLeave(EntityId entityID)
{
	Agent agent(entityID);
	if (agent.IsValid())
	{
		const int groupID = agent.GetGroupID();
		if (SearchGroup* group = GetGroup(groupID))
		{
			group->RemoveEnteredEntity(entityID);

			if (group->IsEmpty())
			{
				GroupLeave(groupID);
			}
		}
	}
}
Ejemplo n.º 21
0
void RecordAIComment(EntityId entityId, const char* szComment, ...)
{
#ifdef INCLUDE_GAME_AI_RECORDER

	Agent agent(entityId);
	if (agent.IsValid())
	{
		va_list args;
		va_start(args, szComment);

		g_pGame->GetGameAISystem()->GetGameAIRecorder().RecordLuaComment(agent, szComment, args);
		
		va_end(args);
	}

#endif //INCLUDE_GAME_AI_RECORDER
}
Ejemplo n.º 22
0
Results benchmarkConfiguration(bool doublePole, bool fullyObservable,
                               bool alphaBetaFilter, bool doubleExponentialSmoothing, int parameters,
                               int runs, double sigma0)
{
  OpenANN::Environment* env;
  if(doublePole)
    env = new DoublePoleBalancing(fullyObservable);
  else
    env = new SinglePoleBalancing(fullyObservable);

  Results results;
  results.runs = runs;
  std::vector<double> episodes;

  OpenANN::Logger progressLogger(Logger::CONSOLE);
  for(int run = 0; run < runs; run++)
  {
    NeuroEvolutionAgent agent(0, false, "linear", parameters > 0, parameters,
                              fullyObservable, alphaBetaFilter, doubleExponentialSmoothing);
    agent.setSigma0(sigma0);
    Result result = benchmarkSingleRun(*env, agent);
    if(run % 10 == 0)
      progressLogger << ".";
    if(!result.success)
      results.failures++;
    episodes.push_back(result.episodes);
    results.time += result.time;
    results.mean += result.episodes;
  }
  progressLogger << "\n";
  results.mean /= (double) runs;
  results.time /= (double) runs;
  results.min = (int) * std::min_element(episodes.begin(), episodes.end());
  results.max = (int) * std::max_element(episodes.begin(), episodes.end());
  std::sort(episodes.begin(), episodes.end());
  results.median = (int) episodes[episodes.size() / 2];
  for(int run = 0; run < runs; run++)
  {
    episodes[run] -= results.mean;
    episodes[run] *= episodes[run];
  }
  results.stdDev = std::sqrt(std::accumulate(episodes.begin(), episodes.end(), 0.0) / (double) runs);

  delete env;
  return results;
}
Ejemplo n.º 23
0
	void permission_object_t::test<9>()
	{
		LLPermissions perm;
		LLUUID agent("abf0d56b-82e5-47a2-a8ad-74741bb2c29e");	
		LLUUID owner("68edcf47-ccd7-45b8-9f90-1649d7f12806"); 
		LLUUID group("9c8eca51-53d5-42a7-bb58-cef070395db8");
		bool is_atomic = TRUE;
		ensure("setOwnerAndGroup():failed ", (TRUE == perm.setOwnerAndGroup(agent,owner,group,is_atomic)));
		
		LLUUID owner2("68edcf47-ccd7-45b8-9f90-1649d7f12807"); 
		LLUUID group2("9c8eca51-53d5-42a7-bb58-cef070395db9");
		
		// cant change - agent need to be current owner
		ensure("setOwnerAndGroup():failed ", (FALSE == perm.setOwnerAndGroup(agent,owner2,group2,is_atomic)));
		
		// should be able to change - agent and owner same as current owner
		ensure("setOwnerAndGroup():failed ", (TRUE == perm.setOwnerAndGroup(owner,owner,group2,is_atomic)));
	}
Ejemplo n.º 24
0
    bool WaiterList::AddAndSuspendWaiter(DWORD_PTR waiter, uint32 timeout)
    {
#ifdef _WIN32
        Assert(m_waiters != nullptr);
        Assert(waiter != NULL);
        Assert(!Contains(waiter));

        AgentOfBuffer agent(waiter, CreateEvent(NULL, TRUE, FALSE, NULL));
        m_waiters->Add(agent);

        csForAccess.Leave();
        DWORD result = WaitForSingleObject(agent.event, timeout);
        csForAccess.Enter();
        return result == WAIT_OBJECT_0;
#else
        // TODO for xplat
        return false;
#endif
    }
Ejemplo n.º 25
0
int main(int argc, char** argv) {
  srand(RANDOM_SEED);

  NeuralNetwork net(GLOW_ENVIRONMENT_OBSERVATIONS_DIM + GLOW_AGENT_ACTIONS_DIM, N_HIDDEN, 1, LEARNING_RATE);
  //QLearningEGreedyPolicy policy(EPSILON);
  QLearningSoftmaxPolicy policy(0.5f, EPSILON);

  QLearningAgent agent(&net,
                       GLOW_ENVIRONMENT_OBSERVATIONS_DIM, GLOW_AGENT_ACTIONS_DIM, GLOW_AGENT_N_ACTIONS,
                       1.0f, 0.1f, &policy, false); // lambda = 1.0 => no history

  SimpleGlowEnvironment env(argv[1], LED_OUT, PHOTO_AIN);

//  BigDummyReward rew;
//  DummyRewardEnvironment env(DUMMY_ENVIRONMENT_OBSERVATIONS_DIM, &rew);

  RLQualia qualia(&agent, &env);

  printf("Starting...\n");

  qualia.init();
  for (int i=0; i<10; i++) {
    printf("# %d ", qualia.nEpisodes);
    qualia.episode(1000);
#if is_computer()
    printf("Mean reward: %f (%f / %d)\n", (double) qualia.totalReward / qualia.nSteps, qualia.totalReward, qualia.nSteps);
//    printf("Current agent action: [%d %d] = %d\n", agent.currentAction[0], agent.currentAction[1], agent.currentAction.conflated());
//    printf("Current environment observation: [%f %f] => %f\n", env.currentObservation[0], env.currentObservation[1], env.currentObservation.reward);
#endif
  }

  // Put epsilon on ice.
  printf("Final episode (without random moves)\n");
  ((QLearningEGreedyPolicy *)agent.policy)->epsilon = 0;
  qualia.episode(1000);
#if is_computer()
  printf("Mean reward: %f (%f/%d)\n", (double) qualia.totalReward / qualia.nSteps, qualia.totalReward, qualia.nSteps);
  printf("Current agent action: [%d] = %d\n", agent.currentAction[0], agent.currentAction.conflated());
//  printf("Current environment observation: [%f] => %f\n", env.currentObservation[0], env.currentObservation.reward);
#endif

  return 0;
}
Ejemplo n.º 26
0
SearchSpot* SearchGroup::FindBestSearchSpot(EntityId entityID, SearchSpotQuery* query)
{
	SearchSpotIter it = m_searchSpots.begin();
	SearchSpotIter end = m_searchSpots.end();

	SearchSpot* bestScoredSearchSpot = NULL;
	SearchSpot* lastSeenSearchSpot = NULL;
	float bestScore = FLT_MIN;

	Vec3 targetCurrentPos(ZERO);
	if(m_targetID)
	{
		Agent agent(m_targetID);
		if(agent.IsValid())
			targetCurrentPos = agent.GetPos();
	}

	for ( ; it != end; ++it)
	{
		SearchSpot& searchSpot = (*it);

		if (searchSpot.GetStatus() == NotSearchedYet)
		{
			float score = CalculateScore(searchSpot, entityID, query, targetCurrentPos);
			if (score > bestScore)
			{
				bestScoredSearchSpot = &searchSpot;
				bestScore = score;
			}
		}

		if(searchSpot.GetStatus() != Unreachable && searchSpot.GetStatus() != BeingSearchedRightAboutNow)
		{
			if(!lastSeenSearchSpot || searchSpot.m_lastTimeObserved.GetValue() < lastSeenSearchSpot->m_lastTimeObserved.GetValue())
			{
				lastSeenSearchSpot = &searchSpot;
			}
		}
	}

	return bestScoredSearchSpot ? bestScoredSearchSpot : lastSeenSearchSpot;
}
Ejemplo n.º 27
0
bool CTask::start()
{
    boost::trim(_task_info.nodes);
    LOG(INFO) << "Task name: " << _task_info.name << " Task description: " << _task_info.description
        << " Task nodes: " << _task_info.nodes << " Task command: " << _task_info.cmd
        << " task task_name" << _task_info.task_name << " task task_id" << _task_info.task_id << " over";
    try
    {
        CAgentClient agent((_task_info.nodes).c_str(),openflow::OPENFLOW_AGENT_HANDLER_PORT);
        int32_t ret = agent.execute_task(_task_info);
        LOG(INFO) << "send task to agent: " << ret;
    }
    catch(apache::thrift::TException &e)
    {
        LOG(ERROR) << e.what();
        return false;
    }
    LOG(INFO)<<"for execute end";
    return true;
}
Ejemplo n.º 28
0
bool RangeContainer::GetTargetDistances(float& distToAttentionTargetSq, float& distToLiveTargetSq) const
{
	Agent agent(m_entityID);
	assert(agent.IsValid());
	IF_UNLIKELY (!agent.IsValid())
		return false;

	if (IAIObject* attentionTarget = agent.GetAttentionTarget())
	{
		distToAttentionTargetSq = agent.GetEntityPos().GetSquaredDistance(attentionTarget->GetPos());
		
		if (IAIObject* liveTarget = agent.GetLiveTarget())
		{
			distToLiveTargetSq = agent.GetEntityPos().GetSquaredDistance(liveTarget->GetPos());
			return true;
		}
	}

	return false;
}
Ejemplo n.º 29
0
int CSClient::Request_Agent_Presence(
	uint32 nQueueid,
	uint64 nAgentUid,
	uint64 nAgentMultpt,
	uint32 nCapacity,
	AGENT_PRESENCE_CALLBACK cb,	//�ص�����
	CS_CONTEXT_TYPE dwContext)
{

	if (!m_pManager)
	{
		//ND_LOG_ERROR("[OMS] UserScoreMsg: not connected!");
		return -1;
	}

	uint32 index = CS_CB_QUEUE::instance().AddCallback(AGENT_PRESENCE_CALLBACK_TYPE, (void*)cb, dwContext, 0);

	UMID agent(nAgentUid,nAgentMultpt);
	
	CSMsg_Request_Agent_Presence req(nQueueid,agent,nCapacity);
	base::packet::OutPacket out;
	base::packet::Header header(WY_CS_AGENT_PRESENCE, 0, index);


	try
	{
		req.encode(out);
		header.body_size(out.get_size());
		if (m_cshandler->send_packet(header,out) == 0)
			return 0;
		else
			return -1;//ND_LOG_ERROR("[OMS] UserScoreMsg: send fail!");
	}
	catch (...)
	{
		//ND_LOG_ERROR("[OMS] UserScoreMsg: encode body fail!");
	}
	CS_CB_QUEUE::instance().RemoveCallback(index);
	return -1;

}
Ejemplo n.º 30
0
		void
		try_start_new_iteration()
		{
			if( m_iterations_left <= 0 )
			{
				std::cout << "COMPLETED!" << std::endl;

				so_environment().stop();
				return;
			}

			std::cout << m_iterations_left << " iterations left...\r"
				<< std::flush;

			m_state = state_t::awaiting_creation;
			m_acks_received = 0;
			m_destroy_received = 0;

			m_child_mboxes = std::vector< so_5::rt::mbox_t >();
			m_child_mboxes.reserve( m_max_agents );

			auto coop = so_environment().create_coop( "child" );
			coop->set_parent_coop_name( so_coop_name() );
			coop->add_reg_notificator(
					so_5::rt::make_coop_reg_notificator( so_direct_mbox() ) );
			coop->add_dereg_notificator(
					so_5::rt::make_coop_dereg_notificator( so_direct_mbox() ) );

			for( std::size_t i = 0; i != m_max_agents; ++i )
			{
				std::unique_ptr< so_5::rt::agent_t > agent(
						new a_child_t(
								so_environment(),
								so_direct_mbox() ) );
				m_child_mboxes.push_back( agent->so_direct_mbox() );

				coop->add_agent( std::move( agent ) );
			}

			so_environment().register_coop( std::move( coop ) );
		}