Пример #1
0
void CCameraData::GetPixelLengthVectors( const CVector3& Pos, const CVector2& vp, CVector3& OutX, CVector3& OutY ) const
{
	/////////////////////////////////////////////////////////////////
	int ivpw = int(vp.GetX());
	int ivph = int(vp.GetY());
	/////////////////////////////////////////////////////////////////
	CVector4 va = Pos;
	CVector4 va_xf = va.Transform(mVPMatrix);
	va_xf.PerspectiveDivide();
	va_xf = va_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
	/////////////////////////////////////////////////////////////////
	CVector4 vdx = Pos+mCamXNormal;
	CVector4 vdx_xf = vdx.Transform(mVPMatrix);
	vdx_xf.PerspectiveDivide();
	vdx_xf = vdx_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
	float MagX = (vdx_xf-va_xf).Mag(); // magnitude in pixels of mBillboardRight
	/////////////////////////////////////////////////////////////////
	CVector4 vdy = Pos+mCamYNormal;
	CVector4 vdy_xf = vdy.Transform(mVPMatrix);
	vdy_xf.PerspectiveDivide();
	vdy_xf = vdy_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
	float MagY = (vdy_xf-va_xf).Mag(); // magnitude in pixels of mBillboardUp
	/////////////////////////////////////////////////////////////////
	OutX = mCamXNormal*(2.0f/MagX);
	OutY = mCamYNormal*(2.0f/MagY);
	/////////////////////////////////////////////////////////////////
}
/**
 * Returns true if the given vector is near the arena end.
 */
bool CCombinedLoopFunctions::CloseToArenaEnd(const CVector2& pos){

	if(pos.GetX() > arenaSize.GetX()/2.0f - CLOSE_TO_ARENA_END_PARAMETER ||
			pos.GetX() < -arenaSize.GetX()/2.0f + CLOSE_TO_ARENA_END_PARAMETER ||
			pos.GetY() > arenaSize.GetY()/2.0f - CLOSE_TO_ARENA_END_PARAMETER ||
			pos.GetY() < -arenaSize.GetY()/2.0f + CLOSE_TO_ARENA_END_PARAMETER) {
		return true;
	}
	return false;
}
/**
 * Returns true if the given vector is near the nest, relative to food patch size.
 */
bool CCombinedLoopFunctions::CloseToNest(const CVector2& pos){
	/*if(pos.GetX() > -(nestSize/2.0f)-foodPatchSize/1 && pos.GetX() < (nestSize/2.0f) + foodPatchSize/1 && pos.GetY() > -(nestSize/2.0f) - foodPatchSize/1 && pos.GetY() < (nestSize/2.0f) + foodPatchSize/1) {
		return true;
	}
	return false;*/

	if(pos.GetX() > -(nestSize/2.0f) - CLOSE_TO_NEST_PARAMETER && pos.GetX() < (nestSize/2.0f) + CLOSE_TO_NEST_PARAMETER && pos.GetY() > -(nestSize/2.0f) - CLOSE_TO_NEST_PARAMETER && pos.GetY() < (nestSize/2.0f) + CLOSE_TO_NEST_PARAMETER) {
		return true;
	}
	return false;
}
   void CFootBotMotorGroundSensor::Update() {
      /* We make the assumption that the foot-bot is rotated only wrt to Z */
      CFloorEntity& cFloorEntity = m_cSpace.GetFloorEntity();
      const CVector3& cEntityPos = GetEntity().GetEmbodiedEntity().GetPosition();
      const CQuaternion& cEntityRot = GetEntity().GetEmbodiedEntity().GetOrientation();
      CRadians cRotZ, cRotY, cRotX;
      cEntityRot.ToEulerAngles( cRotZ, cRotY, cRotX );
      CVector2 cCenterPos(cEntityPos.GetX(), cEntityPos.GetY());
      CVector2 cSensorPos;
      for(UInt32 i = 0; i < CCI_FootBotMotorGroundSensor::NUM_READINGS; ++i) {
         cSensorPos  = m_tReadings[i].Offset;
         cSensorPos.Rotate(cRotZ);
	 cSensorPos *= 0.01;
	 cSensorPos += cCenterPos;
         const CColor& cColor = cFloorEntity.GetColorAtPoint(cSensorPos.GetX(),cSensorPos.GetY());
         m_tReadings[i].Value = cColor.ToGrayScale()/255*FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.GetSpan();

	 if( m_fNoiseLevel > 0.0f ) {
	    AddNoise(i);
	 }

	 /* Normalize reading between 0 and 1, only if calibration has been performed */
         if( m_bCalibrated ) {
	    m_tReadings[i].Value = FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.NormalizeValue(m_tReadings[i].Value);
	 }
      }
   }
 void CFootBotBaseGroundRotZOnlySensor::Update() {
    /*
     * We make the assumption that the robot is rotated only wrt to Z
     */
    /* Get robot position and orientation */
    const CVector3& cEntityPos = m_pcEmbodiedEntity->GetPosition();
    const CQuaternion& cEntityRot = m_pcEmbodiedEntity->GetOrientation();
    CRadians cRotZ, cRotY, cRotX;
    cEntityRot.ToEulerAngles(cRotZ, cRotY, cRotX);
    /* Set robot center */
    CVector2 cCenterPos(cEntityPos.GetX(), cEntityPos.GetY());
    /* Position of sensor on the ground after rototranslation */
    CVector2 cSensorPos;
    /* Go through the sensors */
    for(UInt32 i = 0; i < m_tReadings.size(); ++i) {
       /* Calculate sensor position on the ground */
       cSensorPos = m_pcGroundSensorEntity->GetSensor(i+4).Offset;
       cSensorPos.Rotate(cRotZ);
       cSensorPos += cCenterPos;
       /* Get the color */
       const CColor& cColor = m_pcFloorEntity->GetColorAtPoint(cSensorPos.GetX(),
                                                               cSensorPos.GetY());
       /* Set the reading */
       m_tReadings[i].Value = cColor.ToGrayScale() / 255.0f;
       /* Apply noise to the sensor */
       if(m_bAddNoise) {
          m_tReadings[i].Value += m_pcRNG->Uniform(m_cNoiseRange);
       }
       /* Set the final reading */
       m_tReadings[i].Value = m_tReadings[i].Value < 0.5f ? 0.0f : 1.0f;
    }
 }
CColor CFloorPowerFunctions::GetFloorColor(const CVector2& c_position_on_plane) {

	if(c_position_on_plane.GetX() < -1.0f) {
      return CColor::GRAY50;
   }
   return CColor::GRAY20;

}
void iAnt_controller::SetTargetInBounds(CVector2 t) {
    /* Bound the X value based on the forage range. */
    if(t.GetX() > data->ForageRangeX.GetMax())
        t = CVector2(data->ForageRangeX.GetMax(), t.GetY());
    
    if(t.GetX() < data->ForageRangeX.GetMin())
        t = CVector2(data->ForageRangeX.GetMin(), t.GetY());
    
    /* Bound the Y value based on the forage range. */
    if(t.GetY() > data->ForageRangeY.GetMax())
        t = CVector2(t.GetX(), data->ForageRangeY.GetMax());
    
    if(t.GetY() < data->ForageRangeY.GetMin())
        t = CVector2(t.GetX(), data->ForageRangeY.GetMin());
    
    /* Set the robot's target to the bounded t position. */
    target = t;
}
Пример #8
0
void HRVOAgent::addObstacleAtPoint(CVector2 p,CVector2 v,Real r)
{
    HRVO::Agent *a=new HRVO::Agent();
    a->velocity_=HRVO::Vector2((float)v.GetX(),(float)v.GetY());
    a->position_=HRVO::Vector2((float)p.GetX(),(float)p.GetY());

    Real distance;
    CVector2 relativePosition=relativePositionOfObstacleAt(p,r,distance);
    //a->radius_=r+marginForObstacleAtDistance(distance,r,safetyMargin,socialMargin);
a->radius_=r+fmin(distance-r-_HRVOAgent->radius_-0.001,marginForObstacleAtDistance(distance,r,safetyMargin,socialMargin));
    // printf("Obstacle radius %.3f\n",a->radius_);

  //a->radius_=r+marginForObstacleAtDistance(p.Length(),r,safetyMargin,socialMargin);
    a->prefVelocity_=a->velocity_;
    _HRVOAgent->agents_.push_back(a);
    _HRVOAgent->insertAgentNeighbor(agentIndex, rangeSq);
    agentIndex++;
}
Пример #9
0
  bool CKinematics2DEngine::CheckCollisions( const CKinematics2DCollisionCircle* pc_circle, const CKinematics2DCollisionRectangle* pc_rectangle ) {
    /* Rototranslate the plane so that the rectangle is axis aligned and centered in O */
    CVector2 center = pc_circle->GetPosition() - pc_rectangle->GetPosition();
    center.Rotate(pc_rectangle->GetOrientation() );
    center.Absolute();
    
    /* Find the Voronoi Region that the circle is in, exploiting the symmetries */
    CVector2 c_half_size   = pc_rectangle->GetHalfSize();
    Real     f_radius      = pc_circle->GetRadius();
    if( center.GetX() <= c_half_size.GetX() ) {
      /* The circle is in the top or bottom region */
      return (center.GetY() <= c_half_size.GetY() + f_radius);
    }
    
    if( center.GetY() <= c_half_size.GetY() ) {
      /* The circle is in the left or right region */
      return (center.GetX() <= c_half_size.GetX() + f_radius);
    }

    /* The circle is in one of the four corner regions */
    return (SquareDistance( c_half_size, center ) <= f_radius*f_radius);
  }
bool iAnt_controller::checkDistanceY() {
    cout << "checkDistanceY\n";
    CVector2 curPosition = GetPosition();
    cout << "curPositionY" << curPosition << '\n';
    bool stop = false;
    //cout << "curPosition" << curPosition.GetY() << '\n';
    float distanceY = curPosition.GetX()- data->NestPosition.GetX();
    cout << "distance: " << distanceY << "\n";
    if (distanceY > 0.3)
    {   cout << "line 115";
        stop = true;
    }
    return stop;
}
Пример #11
0
CColor CLandmarks::GetFloorColor(const CVector2& c_position_on_plane) {
   if((c_position_on_plane.GetX() >= -2.0f &&
       c_position_on_plane.GetX() <= 2.0f) &&
      (c_position_on_plane.GetY() >= -5.0f &&
       c_position_on_plane.GetY() <= -1.0f)) {
      return CColor::GRAY90;
   }
   for(size_t i = 0; i < TARGETS; ++i) {
      if(SquareDistance(c_position_on_plane, m_vecTargets[i]) < TARGET_RADIUS2) {
         return CColor::BLACK;
      }
   }
   return CColor::WHITE;
}
Пример #12
0
void Texture::SetTexel( const CColor4 & color, const CVector2 & ST )
{
	CReal Mul(255.0f);
	U8* pu8data = (U8*) mpImageData;

	u8 ur = (u8) (color.GetX()*Mul);
	u8 ug = (u8) (color.GetY()*Mul);
	u8 ub = (u8) (color.GetZ()*Mul);
	u8 ua = (u8) (color.GetW()*Mul);
	int ix = (int) (miWidth * fmod( (float) ST.GetY(), 1.0f ));
	int iy = (int) (miHeight * fmod( (float) ST.GetX(), 1.0f ));
	int idx = (int) (4 * (ix*miHeight + iy));
	pu8data[idx+0] = ub;
	pu8data[idx+1] = ug;
	pu8data[idx+2] = ur;
	pu8data[idx+3] = ua;
	SetDirty(true);
}
void CProprioceptiveFeatureVector::ComputeFeatureValues()
{
    CProprioceptiveFeatureVector::FEATURE_RANGE = 30.0f;


    unsigned  unCloseRangeNbrCount = CountNeighbors(FEATURE_RANGE/2.0f);
    unsigned  unFarRangeNbrCount   = CountNeighbors(FEATURE_RANGE) - unCloseRangeNbrCount;

    bool neighbours_present = ((unCloseRangeNbrCount + unFarRangeNbrCount) > 0) ? true : false;


    int CurrentStepNumber = m_sSensoryData.m_rTime;

    // Feature (from LS to MS bits in FV)
    // Sensors
    //1st: set if bot has atleast one neighbor in range 0-30cm in the majority of of past X time-steps
    //2nd: set if bot has atleast one neighbor in range 30-60cm in the majority of of past X time-steps
    if(CurrentStepNumber >= m_iEventSelectionTimeWindow)
    {
        // decision based on the last X time-steps
        if(m_unSumTimeStepsNbrsRange0to30 > (unsigned)(0.5*(double)m_iEventSelectionTimeWindow))
            m_pfAllFeatureValues[0] = 1.0;
        else
            m_pfAllFeatureValues[0] = 0.0;

        if(m_unSumTimeStepsNbrsRange30to60 > (unsigned)(0.5*(double)m_iEventSelectionTimeWindow))
            m_pfAllFeatureValues[1] = 1.0;
        else
            m_pfAllFeatureValues[1] = 0.0;

        // removing the fist entry of the moving time window  from the sum
        m_unSumTimeStepsNbrsRange0to30  -=  m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex];
        m_unSumTimeStepsNbrsRange30to60 -=  m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex];
    }

    // adding new values into the queue
    if (unCloseRangeNbrCount > 0)
    {
        m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex] = 1;
        m_unSumTimeStepsNbrsRange0to30++;
    }
    else
        m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex] = 0;

    if (unFarRangeNbrCount > 0)
    {
        m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex] = 1;
        m_unSumTimeStepsNbrsRange30to60++;
    }
    else
        m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex] = 0;


    m_unNbrsCurrQueueIndex = (m_unNbrsCurrQueueIndex + 1) % m_iEventSelectionTimeWindow;



    // Sensors-motor interactions
    // Set if the occurance of the following event, atleast once in time window X
    // 3rd: distance to nbrs 0-6 && change in angular acceleration
    // 4th: no neighbors detected  && change in angular acceleration

    if(neighbours_present &&
            (m_sSensoryData.AngularAcceleration > m_tAngularAccelerationThreshold ||
             m_sSensoryData.AngularAcceleration < -m_tAngularAccelerationThreshold))
    {
        m_piLastOccuranceEvent[2] = CurrentStepNumber;
    }

    if(!neighbours_present &&
            (m_sSensoryData.AngularAcceleration > m_tAngularAccelerationThreshold ||
             m_sSensoryData.AngularAcceleration < -m_tAngularAccelerationThreshold))
    {
        m_piLastOccuranceEvent[3] = CurrentStepNumber;
    }

    for(unsigned int featureindex = 2; featureindex <=3; featureindex++)
    {
        if ((CurrentStepNumber - m_piLastOccuranceEvent[featureindex]) <= m_iEventSelectionTimeWindow)
        {
            m_pfAllFeatureValues[featureindex] = 1.0;
        }
        else
        {
            m_pfAllFeatureValues[featureindex] = 0.0;
        }
    }


    // Motors
    //5th: distance travelled by bot in past 100 time-steps. Higher than 5% of max-possible distance travelled is accepted as feature=1.
    CVector2 vecAgentPos = m_sSensoryData.pos;

    m_fCumulativeDistTravelled += m_sSensoryData.dist;

    if(CurrentStepNumber >= m_iDistTravelledTimeWindow)
    {
        // distance travelled in last 100 time-steps
        m_fSquaredDistTravelled = (vecAgentPos.GetX() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetX()) *
                (vecAgentPos.GetX() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetX())  +
                (vecAgentPos.GetY() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetY()) *
                (vecAgentPos.GetY() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetY());


        //pos_ref = m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex];


        // removing the distance travelled in the first time-step of the moving time-window from the queue
        m_fCumulativeDistTravelled -= m_pfDistAtTimeStep[m_unCoordCurrQueueIndex];


        // decision based on distance travelled in the last 100 time-steps
        /*if(m_fCumulativeDistTravelled >= m_fCumulativeDistThreshold)
            m_pfAllFeatureValues[4] = 1.0;
        else
            m_pfAllFeatureValues[4] = 0.0;*/


        /*if(m_fSquaredDistTravelled >= m_fSquaredDistThreshold)
            m_pfAllFeatureValues[4] = 1.0;
        else
            m_pfAllFeatureValues[4] = 0.0;*/
    }

    m_fEstimated_Dist_LongTimeWindow   = TrackRobotDisplacement(CurrentStepNumber, vec_RobPos_LongRangeTimeWindow);
    m_pfAllFeatureValues[4] = (m_fEstimated_Dist_LongTimeWindow   >  sqrt(m_fSquaredDistThreshold)) ? 1.0f: 0.0f;



    //std::cout << " cumulative distance " << m_fCumulativeDistTravelled << std::endl;
    //std::cout << " LinearSpeed " <<  m_sSensoryData.LinearSpeed << " angular speed " << m_sSensoryData.AngularSpeed << std::endl;
    //std::cout << " LinearAcceleration " <<  m_sSensoryData.LinearAcceleration << " angular acceleration " << m_sSensoryData.AngularAcceleration << std::endl;

    // adding new coordinate values into the queue
    m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex] = vecAgentPos;

    // adding distance travelled at last time-step into queue
    m_pfDistAtTimeStep[m_unCoordCurrQueueIndex] = m_sSensoryData.dist;

    m_unCoordCurrQueueIndex = (m_unCoordCurrQueueIndex + 1) % m_iDistTravelledTimeWindow;


    if((m_sSensoryData.AngularAcceleration > m_tAngularAccelerationThreshold ||
        m_sSensoryData.AngularAcceleration < -m_tAngularAccelerationThreshold))
    {
         m_piLastOccuranceEvent[5] = CurrentStepNumber;
    }


    // 6th: set if the robot changed its angular speed (per control cycle) atleast once in the past time-window.
    if ((CurrentStepNumber - m_piLastOccuranceEvent[5]) <= m_iEventSelectionTimeWindow)
    {
        m_pfAllFeatureValues[5] = 1.0;
    }
    else
    {
        m_pfAllFeatureValues[5] = 0.0;
    }

    //6th: linear speed, higher than 5% of max. speed is accepted as feature=1 OR angular speed higher than 5% of max. angular speed is accepted as feature=1
    /*m_pfAllFeatureValues[5] = (m_sSensoryData.LinearSpeed  >= m_fVelocityThreshold) ? 1.0:0.0;*/


    // adding the selected features into the feature vector
    for(size_t i = 0; i <  NUMBER_OF_FEATURES; ++i)
        m_pfFeatureValues[i] = m_pfAllFeatureValues[i];
}
/*****
 * Sets target West of the robot's current target.
 *****/
void iAnt_controller::SetTargetW(char x) {
    CVector2 position = GetTarget();
    target = CVector2(position.GetX(),position.GetY()+stepSize);
}
void CRecruitmentLoopFunctions::PreStep() {
   /* Logic to pick and drop food items */
   /*
    * If a robot is in the nest, drop the food item
    * If a robot is on a food item, pick it
    * Each robot can carry only one food item per time
    */

   /* Check whether a robot is on a food item */
   //CSpace::TMapPerType& m_cFootbots = m_cSpace.GetEntitiesByType("foot-bot");
	CSpace::TMapPerType& m_cFootbots = GetSpace().GetEntitiesByType("foot-bot");

   for(CSpace::TMapPerType::iterator it = m_cFootbots.begin();
       it != m_cFootbots.end();
       ++it) {
      /* Get handle to foot-bot entity and controller */
      CFootBotEntity& cFootBot = *any_cast<CFootBotEntity*>(it->second);
      CBTFootbotRecruitmentController& cController = dynamic_cast<CBTFootbotRecruitmentController&>(cFootBot.GetControllableEntity().GetController());
      /* Get the position of the foot-bot on the ground as a CVector2 */
      CVector2 cPos;
      cPos.Set(cFootBot.GetEmbodiedEntity().GetPosition().GetX(),
               cFootBot.GetEmbodiedEntity().GetPosition().GetY());
      /* Get state data */
      CBTFootbotRecruitmentController::SStateData* sStateData = &cController.GetStateData();
      sStateData->CurrentPosition = cPos;

      /* Get food data */
      CBTFootbotRecruitmentController::SFoodData* sFoodData = &cController.GetFoodData();
      /* The foot-bot has a food item */
      if(sFoodData->HasFoodItem) {
         /* Check whether the foot-bot is in the nest */
         if(InNest(cPos)) {
            /* Place a new food item on the ground */
        	 CVector2 tmp = sFoodData->LastFoodPosition;
        	 m_cFoodPos[sFoodData->FoodItemIdx].Set(tmp.GetX(), tmp.GetY());
            //m_cFoodPos[sFoodData->FoodItemIdx].Set(m_pcRNG->Uniform(m_cForagingArenaSideX), m_pcRNG->Uniform(m_cForagingArenaSideY));
            /* Drop the food item */
            sFoodData->HasFoodItem = false;
            sFoodData->FoodItemIdx = 0;
            ++sFoodData->TotalFoodItems;
            m_nbCollectedFood++;
            /* The floor texture must be updated */
            m_pcFloor->SetChanged();
         }
      }
      else {
         /* The foot-bot has no food item */
         /* Check whether the foot-bot is out of the nest */
         if(!InNest(cPos)) {
            /* Check whether the foot-bot is on a food item */
            bool bDone = false;
            for(size_t i = 0; i < m_cFoodPos.size() && !bDone; ++i) {
               if((cPos - m_cFoodPos[i]).SquareLength() < m_fFoodSquareRadius) {
                  /* If so, we move that item out of sight */
                  m_cFoodPos[i].Set(100.0f, 100.f);
                  /* The foot-bot is now carrying an item */
                  sFoodData->HasFoodItem = true;
                  sFoodData->FoodItemIdx = i;
                  sFoodData->LastFoodPosition = cPos;
                  /* The floor texture must be updated */
                  m_pcFloor->SetChanged();
                  /* We are done */
                  bDone = true;
               }
            }
         }
      }
   }
   if(GetSpace().GetSimulationClock() % 1000 == 0){
	   m_cOutput << GetSpace().GetSimulationClock() << "\t"
                << m_nbCollectedFood << "\t" << m_nbCollectedFood / (GetSpace().GetSimulationClock()/1000)<< "\n";
   }
}
bool CRecruitmentLoopFunctions::InNest(const CVector2& pos){
    if(pos.GetX() > -1.0f && pos.GetX() < 1.0f && pos.GetY() > -5.0f && pos.GetY() < -3.0f) {
      return true;
   }
   return false;
}
Пример #17
0
/*****
 * Required by ARGoS. This function initializes global variables using
 * values from the XML configuration file supplied when ARGoS is run.
 *****/
void DSA_loop_functions::Init(TConfigurationNode& node) {
    /* Temporary variables. */
    CSimulator     *simulator     = &GetSimulator();
    CPhysicsEngine *physicsEngine = &simulator->GetPhysicsEngine("default");
    CVector3        ArenaSize     = GetSpace().GetArenaSize();
    CVector2        rangeX        = CVector2(-ArenaSize.GetX()/2.0, ArenaSize.GetX()/2.0);
    CVector2        rangeY        = CVector2(-ArenaSize.GetY()/2.0, ArenaSize.GetY()/2.0);
    CDegrees        USV_InDegrees;

    /* Get each tag in the loop functions section of the XML file. */
    TConfigurationNode simNode  = GetNode(node, "simulation");
    TConfigurationNode random   = GetNode(node, "_0_FoodDistribution_Random");
    TConfigurationNode cluster  = GetNode(node, "_1_FoodDistribution_Cluster");
    TConfigurationNode powerLaw = GetNode(node, "_2_FoodDistribution_PowerLaw");

    GetNodeAttribute(simNode,  "MaxSimCounter",    MaxSimCounter);
    GetNodeAttribute(simNode,  "VariableSeed",     VariableSeed);
    GetNodeAttribute(simNode,  "OutputData",       OutputData);
    GetNodeAttribute(simNode,  "NestPosition",     NestPosition);
    GetNodeAttribute(simNode,  "NestRadius",       NestRadius);
    GetNodeAttribute(simNode,  "FoodRadius",       FoodRadius);
    GetNodeAttribute(simNode,  "FoodDistribution", FoodDistribution);
    GetNodeAttribute(random,   "FoodItemCount",    FoodItemCount);
    GetNodeAttribute(cluster,  "NumberOfClusters", NumberOfClusters);
    GetNodeAttribute(cluster,  "ClusterWidthX",    ClusterWidthX);
    GetNodeAttribute(cluster,  "ClusterLengthY",   ClusterLengthY);
    GetNodeAttribute(powerLaw, "PowerRank",        PowerRank);

    /* Convert and calculate additional values. */
    NestRadiusSquared         = (NestRadius) * (NestRadius);
    FoodRadiusSquared         = (FoodRadius + 0.04) * (FoodRadius + 0.04);

    ForageRangeX.Set(rangeX.GetX() + (2.0 * FoodRadius),
                          rangeX.GetY() - (2.0 * FoodRadius));
    ForageRangeY.Set(rangeY.GetX() + (2.0 * FoodRadius),
                          rangeY.GetY() - (2.0 * FoodRadius));

    RNG = CRandom::CreateRNG("argos");

    /* Store the iAnts in a more friendly, human-readable structure. */
    CSpace::TMapPerType& footbots = GetSpace().GetEntitiesByType("foot-bot");
    CSpace::TMapPerType::iterator it;

    ReadFile();

    size_t STOP = 0;
    size_t robots = 0;

    for(it = footbots.begin(); it != footbots.end(); it++) 
    {
        CFootBotEntity& footBot = *any_cast<CFootBotEntity*>(it->second);
        DSA_controller& c = dynamic_cast<DSA_controller&>(footBot.GetControllableEntity().GetController());
        DSAnts.push_back(&c);
        c.SetData(&data); 
        c.SetStop(STOP);
        STOP += 10; /* adds 10 seconds extra pause for each robot */                
      
        if(robots <= N_robots)
        {
            c.GetPattern(robotPattern[robots]);
            robots++;
            //attempt to add mutiple colors for each robot
            // if(robots==2)
            // {
            //     DrawTargetRays = 2;
            // }
            // else if(robots ==3)
            // {
            //     DrawTargetRays = 3;
            // }
        }    
    }

    /* Set up the food distribution based on the XML file. */
    SetFoodDistribution();
}
Пример #18
0
void CCameraData::ProjectDepthRay( const CVector2& v2d, CVector3& vdir, CVector3& vori ) const
{
	const Frustum& camfrus = mFrustum;
	CVector3 near_xt_lerp; near_xt_lerp.Lerp( camfrus.mNearCorners[0], camfrus.mNearCorners[1], v2d.GetX() );
	CVector3 near_xb_lerp; near_xb_lerp.Lerp( camfrus.mNearCorners[3], camfrus.mNearCorners[2], v2d.GetX() );
	CVector3 near_lerp; near_lerp.Lerp( near_xt_lerp, near_xb_lerp, v2d.GetY() );
	CVector3 far_xt_lerp; far_xt_lerp.Lerp( camfrus.mFarCorners[0], camfrus.mFarCorners[1], v2d.GetX() );
	CVector3 far_xb_lerp; far_xb_lerp.Lerp( camfrus.mFarCorners[3], camfrus.mFarCorners[2], v2d.GetX() );
	CVector3 far_lerp; far_lerp.Lerp( far_xt_lerp, far_xb_lerp, v2d.GetY() );
	vdir=(far_lerp-near_lerp).Normal();
	vori=near_lerp;
}
Пример #19
0
/*****
 * Sets target East of the robot's current target.
 *****/
void DSA_controller::SetTargetE(char x){
   CVector2 position = GetTarget();
   target = CVector2(position.GetX(),position.GetY()-stepSize);
}
Пример #20
0
 void CCameraDefaultSensor::Init(TConfigurationNode& t_tree) {
    try {
       /* Parent class init */
       CCI_CameraSensor::Init(t_tree);
       /* Show the frustums */
       GetNodeAttributeOrDefault(t_tree, "show_frustum", m_bShowFrustum, m_bShowFrustum);
       /* For each camera */
       TConfigurationNodeIterator itCamera("camera");
       for(itCamera = itCamera.begin(&t_tree);
           itCamera != itCamera.end();
           ++itCamera) {
          /* Get camera indentifier */
          std::string strId;
          GetNodeAttribute(*itCamera, "id", strId);
          /* Parse and look up the anchor */
          std::string strAnchorId;
          GetNodeAttribute(*itCamera, "anchor", strAnchorId);
          SAnchor& sAnchor = m_pcEmbodiedEntity->GetAnchor(strAnchorId);
          /* parse the offset */
          CVector3 cOffsetPosition;
          CQuaternion cOffsetOrientation;
          GetNodeAttribute(*itCamera, "position", cOffsetPosition);
          GetNodeAttribute(*itCamera, "orientation", cOffsetOrientation);
          CTransformationMatrix3 cOffset(cOffsetOrientation, cOffsetPosition);
          /* parse the range */
          CRange<Real> cRange;
          GetNodeAttribute(*itCamera, "range", cRange);
          /* create the projection matrix */
          CSquareMatrix<3> cProjectionMatrix;
          cProjectionMatrix.SetIdentityMatrix();
          /* set the focal length */
          CVector2 cFocalLength;
          GetNodeAttribute(*itCamera, "focal_length", cFocalLength);
          cProjectionMatrix(0,0) = cFocalLength.GetX(); // Fx
          cProjectionMatrix(1,1) = cFocalLength.GetY(); // Fy
          /* set the principal point */
          CVector2 cPrincipalPoint;
          GetNodeAttribute(*itCamera, "principal_point", cPrincipalPoint);
          cProjectionMatrix(0,2) = cPrincipalPoint.GetX(); // Px
          cProjectionMatrix(1,2) = cPrincipalPoint.GetY(); // Py
          /* set the distortion parameters */
          /*
          CMatrix<1,5> cDistortionParameters;
          std::string strDistortionParameters;
          Real pfDistortionParameters[3];
          GetNodeAttribute(*itCamera, "distortion_parameters", strDistortionParameters);
          ParseValues<Real>(strDistortionParameters, 3, pfDistortionParameters, ',');
          cDistortionParameters(0,0) = pfDistortionParameters[0]; // K1
          cDistortionParameters(0,1) = pfDistortionParameters[1]; // K2
          cDistortionParameters(0,4) = pfDistortionParameters[2]; // K3
          */
          /* parse the resolution */
          CVector2 cResolution;
          GetNodeAttribute(*itCamera, "resolution", cResolution);
          /* create and initialise the algorithms */
          std::vector<CCameraSensorSimulatedAlgorithm*> vecSimulatedAlgorithms;
          std::vector<CCI_CameraSensorAlgorithm*> vecAlgorithms;
          TConfigurationNodeIterator itAlgorithm;
          for(itAlgorithm = itAlgorithm.begin(&(*itCamera));
              itAlgorithm != itAlgorithm.end();
              ++itAlgorithm) {
             /* create the algorithm */
             CCameraSensorSimulatedAlgorithm* pcAlgorithm = 
                CFactory<CCameraSensorSimulatedAlgorithm>::New(itAlgorithm->Value());
             /* check that algorithm inherits from a control interface */
             CCI_CameraSensorAlgorithm* pcCIAlgorithm = 
                dynamic_cast<CCI_CameraSensorAlgorithm*>(pcAlgorithm);
             if(pcCIAlgorithm == nullptr) {
                THROW_ARGOSEXCEPTION("Algorithm \"" << itAlgorithm->Value() << 
                                     "\" does not inherit from CCI_CameraSensorAlgorithm");
             }
             /* initialize the algorithm's control interface */
             pcCIAlgorithm->Init(*itAlgorithm);
             /* store pointers to the algorithms */
             vecSimulatedAlgorithms.push_back(pcAlgorithm);
             vecAlgorithms.push_back(pcCIAlgorithm);
          }
          /* create the simulated sensor */
          m_vecSensors.emplace_back(sAnchor, cOffset, cRange, cProjectionMatrix,
                                    cResolution, vecSimulatedAlgorithms);
          /* create the sensor's control interface */
          m_vecInterfaces.emplace_back(strId, vecAlgorithms);
       }
    }
    catch(CARGoSException& ex) {
       THROW_ARGOSEXCEPTION_NESTED("Error initializing camera sensor", ex);
    }
    Update();
 }
Пример #21
0
 void CDynamics2DVelocityControl::SetLinearVelocity(const CVector2& c_velocity) {
    m_ptControlBody->v.x = c_velocity.GetX();
    m_ptControlBody->v.y = c_velocity.GetY();
 }
bool CCombinedLoopFunctions::InNest(const CVector2& pos){
	if(pos.GetX() > -(nestSize/2.0f) && pos.GetX() < (nestSize/2.0f) && pos.GetY() > -(nestSize/2.0f) && pos.GetY() < (nestSize/2.0f)) {
		return true;
	}
	return false;
}