SPLStandardMessage(const int &playerNum, const int &teamNum, const int &fallen, const AbsCoord &robotPos, const AbsCoord &walking, const AbsCoord &shooting, const int &ballAge, const AbsCoord &ballPosition, const AbsCoord &ballVelocity, const int8_t intention, const BroadcastData &broadcast) : playerNum(playerNum), teamNum(teamNum), fallen(fallen), ballAge(ballAge), intention(intention), averageWalkSpeed(200), maxKickDistance(8000), currentPositionConfidence(50), currentSideConfidence(50) { const char* init = SPL_STANDARD_MESSAGE_STRUCT_HEADER; for(unsigned int i = 0; i < sizeof(header); ++i) header[i] = init[i]; version = SPL_STANDARD_MESSAGE_STRUCT_VERSION; pose[0] = robotPos.x(); pose[1] = robotPos.y(); pose[2] = robotPos.theta(); walkingTo[0] = walking.x(); walkingTo[1] = walking.y(); shootingTo[0] = shooting.x(); shootingTo[1] = shooting.y(); AbsCoord ballPosRR = ballPosition.convertToRobotRelativeCartesian(robotPos); ball[0] = ballPosRR.x(); ball[1] = ballPosRR.y(); ballVel[0] = ballVelocity.x(); ballVel[1] = ballVelocity.y(); for(int i = 0; i < SPL_STANDARD_MESSAGE_MAX_NUM_OF_PLAYERS; ++i) suggestion[i] = 0; // Everything else we need goes into the "data" section numOfDataBytes = sizeof(broadcast); memcpy(data, &broadcast, numOfDataBytes); }
void LocalisationAdapter::writeResultToBlackboard(void) { AbsCoord robotPos = L->getRobotPose(); AbsCoord ballPosition = L->getBallPosition(); RRCoord ballPosRR = ballPosition.convertToRobotRelative(robotPos); AbsCoord ballPosRRC = ballPosition.convertToRobotRelativeCartesian(robotPos); AbsCoord ballVelocity = L->getBallVelocity(); double robotPosUncertainty = L->getRobotPosUncertainty(); double robotHeadingUncertainty = L->getRobotHeadingUncertainty(); double ballPosUncertainty = L->getBallPosUncertainty(); double ballVelEigenvalue = L->getBallVelocityUncertainty(); AbsCoord nextBall(ballPosition.x() + ballVelocity.x(), ballPosition.y() + ballVelocity.y(), 0.0f); AbsCoord nextBallRRC = nextBall.convertToRobotRelativeCartesian(robotPos); AbsCoord ballVelRRC(nextBallRRC.x() - ballPosRRC.x(), nextBallRRC.y() - ballPosRRC.y(), 0.0f); Pose pose = readFrom(motion, pose); XYZ_Coord ballNeckRelative = pose.robotRelativeToNeckCoord(ballPosRR, BALL_RADIUS); uint32_t ballLostCount = L->getBallLostCount(); SharedLocalisationUpdateBundle sharedLocalisationBundle = L->getSharedUpdateData(); acquireLock(serialization); writeTo(localisation, robotPos, robotPos); writeTo(localisation, ballLostCount, ballLostCount); writeTo(localisation, ballPos, ballPosition); writeTo(localisation, ballPosRR, ballPosRR); writeTo(localisation, ballPosRRC, ballPosRRC); writeTo(localisation, ballVelRRC, ballVelRRC); writeTo(localisation, ballVel, ballVelocity); writeTo(localisation, robotPosUncertainty, robotPosUncertainty); writeTo(localisation, robotHeadingUncertainty, robotHeadingUncertainty); writeTo(localisation, ballPosUncertainty, ballPosUncertainty); writeTo(localisation, ballVelEigenvalue, ballVelEigenvalue); writeTo(localisation, teamBall, TeamBallInfo()); // TODO: make this a different value? writeTo(localisation, ballNeckRelative, ballNeckRelative); writeTo(localisation, sharedLocalisationBundle, sharedLocalisationBundle); writeTo(localisation, havePendingOutgoingSharedBundle, true); writeTo(localisation, havePendingIncomingSharedBundle, std::vector<bool>(5, false)); writeTo(localisation, robotObstacles, robotFilter->filteredRobots); releaseLock(serialization); }
/*----------------------------------------------------------------------------- * Motion thread tick function *---------------------------------------------------------------------------*/ void MotionAdapter::tick() { Timer t; // Get the motion request from behaviours int behaviourReadBuf = readFrom(behaviour, readBuf); ActionCommand::All request = readFrom(behaviour, request[behaviourReadBuf]).actions; // Get sensor information from kinematics SensorValues sensors; if(request.body.actionType == Body::MOTION_CALIBRATE){ // raw sensor values are sent to offnao for calibration // these values are straight forward copy paste into pos files sensors = nakedTouch->getSensors(kinematics); sensors.sensors[Sensors::InertialSensor_AngleX] = -RAD2DEG(sensors.sensors[Sensors::InertialSensor_AngleX]); sensors.sensors[Sensors::InertialSensor_AngleY] = -RAD2DEG(sensors.sensors[Sensors::InertialSensor_AngleY]); sensors.sensors[Sensors::InertialSensor_GyrX] = -sensors.sensors[Sensors::InertialSensor_GyrX]; sensors.sensors[Sensors::InertialSensor_GyrY] = -sensors.sensors[Sensors::InertialSensor_GyrY]; } else { sensors = touch->getSensors(kinematics); } // For kinematics, give it the lagged sensorValues with the most recent lean angles (because they already // have a lag in them) unless it's the very first one otherwise it will propagate nans everywhere SensorValues sensorsLagged; if (sensorBuffer.size() > 0) { sensorsLagged = sensorBuffer.back(); } else { sensorsLagged = sensors; } sensorsLagged.sensors[Sensors::InertialSensor_AngleX] = sensors.sensors[Sensors::InertialSensor_AngleX]; sensorsLagged.sensors[Sensors::InertialSensor_AngleY] = sensors.sensors[Sensors::InertialSensor_AngleY]; kinematics.setSensorValues(sensorsLagged); kinematics.parameters = readFrom(kinematics, parameters); // Calculate the Denavit-Hartenberg chain kinematics.updateDHChain(); writeTo(motion, pose, kinematics.getPose()); bool standing = touch->getStanding(); ButtonPresses buttons = touch->getButtons(); llog(VERBOSE) << "touch->getSensors took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); // Keep a running time for standing if (standing) { uptime = 0.0f; } else { uptime += 0.01f; } writeTo(motion, uptime, uptime); // Sensors are lagged so it correctly synchronises with vision sensorBuffer.insert(sensorBuffer.begin(), sensors); writeTo(motion, sensors, sensors); //Lagged); writeTo(kinematics, sensorsLagged, sensorsLagged); // std::cout << "live = " << sensors.joints.angles[Joints::HeadYaw] // << " delayed = " << sensorsLagged.joints.angles[Joints::HeadYaw] // << std::endl; if (sensorBuffer.size() > SENSOR_LAG) { sensorBuffer.pop_back(); } // sonar recorder gets and update and returns the next sonar request request.sonar = sonarRecorder.update(sensors.sonar); writeTo(motion, sonarWindow, sonarRecorder.sonarWindow); if (isIncapacitated(request.body.actionType)) { uptime = 0.0f; } buttons |= readFrom(motion, buttons); writeTo(motion, buttons, buttons); llog(VERBOSE) << "writeTo / readFrom took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); if (standing) { generator->reset(); request.body = ActionCommand::Body::INITIAL; odometry.clear(); } // Get the position of the ball in robot relative cartesian coordinates AbsCoord robotPose = readFrom(localisation, robotPos); AbsCoord ballAbs = readFrom(localisation, ballPos); AbsCoord ball = ballAbs.convertToRobotRelativeCartesian(robotPose); // Update the body model bodyModel.kinematics = &kinematics; bodyModel.update(&odometry, sensors); // Get the joints requested by whichever generator we're using JointValues joints = generator->makeJoints(&request, &odometry, sensors, bodyModel, ball.x(), ball.y()); // Save the body model Center of Mass writeTo(motion, com, bodyModel.getCoM()); writeTo(motion, active, request); // Odometry is lagged by walk's estimations, and it also correctly synchronises with vision writeTo(motion, odometry, Odometry(odometry)); // Save the pendulum model writeTo(motion, pendulumModel, bodyModel.getPendulumModel()); llog(VERBOSE) << "generator->makeJoints took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); // Actuate joints as requested. effector->actuate(joints, request.leds, request.sonar); llog(VERBOSE) << "effector->actuate took " << t.elapsed_ms() << "ms" << std::endl; }