void VariableView::redraw(NaoData *naoData) { Frame frame = naoData->getCurrentFrame(); Blackboard *blackboard = (frame.blackboard); if(!blackboard) return; // struct RobotPos wrapperPos[4]; // readArray(localisation, robot, wrapperPos); AbsCoord pos = readFrom(localisation, robotPos); stringstream spos; uint32_t ballLostCount = readFrom(localisation, ballLostCount); AbsCoord ballPos = readFrom(localisation, ballPos); AbsCoord ballVelRRC = readFrom(localisation, ballVelRRC); spos << " Robot @ (" << pos.x() << ", " << pos.y() << ")" << endl; spos << "Heading: " << pos.theta() << " (" << RAD2DEG(pos.theta()) << " deg)" << endl; spos << "Variance: " << pos.var(0,0) << ", " << pos.var(1,1) << ", " << pos.var(2,2) << endl; spos << "ballLostCount: " << ballLostCount << endl; spos << "ball pos; x:" << ballPos.x() << ", y:" << ballPos.y() << endl; spos << "ball var; xx:" << ballPos.var(0,0) << ", xy:" << ballPos.var(0,1) << endl; spos << " yx:" << ballPos.var(1,0) << ", yy:" << ballPos.var(1,1) << endl; spos << "ball vel rrc; x':" << ballVelRRC.x() << ", y':" << ballVelRRC.y() << endl; spos << "ball vel rrc var; x'x':" << ballVelRRC.var(0,0) << ", x'y':" << ballVelRRC.var(0,1) << endl; spos << " y'x':" << ballVelRRC.var(1,0) << ", y'y':" << ballVelRRC.var(1,1) << endl; localisationRobotPos->setText(0, spos.str().c_str()); updateVision(naoData); updateBehaviour(naoData); stringstream steam; steam << "gamecontroller.team_red = " << readFrom(gameController, team_red) << endl; gameControllerTeam->setText(0, steam.str().c_str()); }
void MultiGaussianDistribution::addSymmetricMode(std::vector<SimpleGaussian*> &distribution) { AbsCoord ballPos = distribution.front()->getBallPosition(); double fromCentre = sqrt(ballPos.x()*ballPos.x() + ballPos.y()*ballPos.y()); // If the ball is close to the centre then a symmetric mode will be too close to the original mode // and hence provide spurious matches potentially. if (fromCentre >= 1300.0) { // TODO: make this a constant in the ConstantsProvider SimpleGaussian *symmetric = distribution.front()->createSymmetricGaussian(); distribution.push_back(symmetric); } }
SPLStandardMessage(const int &playerNum, const int &teamNum, const int &fallen, const AbsCoord &robotPos, const AbsCoord &walking, const AbsCoord &shooting, const int &ballAge, const AbsCoord &ballPosition, const AbsCoord &ballVelocity, const int8_t intention, const BroadcastData &broadcast) : playerNum(playerNum), teamNum(teamNum), fallen(fallen), ballAge(ballAge), intention(intention), averageWalkSpeed(200), maxKickDistance(8000), currentPositionConfidence(50), currentSideConfidence(50) { const char* init = SPL_STANDARD_MESSAGE_STRUCT_HEADER; for(unsigned int i = 0; i < sizeof(header); ++i) header[i] = init[i]; version = SPL_STANDARD_MESSAGE_STRUCT_VERSION; pose[0] = robotPos.x(); pose[1] = robotPos.y(); pose[2] = robotPos.theta(); walkingTo[0] = walking.x(); walkingTo[1] = walking.y(); shootingTo[0] = shooting.x(); shootingTo[1] = shooting.y(); AbsCoord ballPosRR = ballPosition.convertToRobotRelativeCartesian(robotPos); ball[0] = ballPosRR.x(); ball[1] = ballPosRR.y(); ballVel[0] = ballVelocity.x(); ballVel[1] = ballVelocity.y(); for(int i = 0; i < SPL_STANDARD_MESSAGE_MAX_NUM_OF_PLAYERS; ++i) suggestion[i] = 0; // Everything else we need goes into the "data" section numOfDataBytes = sizeof(broadcast); memcpy(data, &broadcast, numOfDataBytes); }
void FieldPainter::drawBallPosRRC(const AbsCoord &ballPos, const AbsCoord &ballVel, bool moving, const AbsCoord &robot) { save(); translate(robot.x(), robot.y()); rotate(RAD2DEG(robot.theta())); QPoint newBall = QPoint(ballPos.x(), ballPos.y()); QPoint nextBall = QPoint(ballPos.x() + ballVel.x(), ballPos.y() + ballVel.y()); setPen(QPen(QColor(255, 0, 0))); setBrush(QBrush(QColor(255, 0, 0))); drawEllipse(newBall, 40, 40); if (moving) drawLine(newBall, nextBall); restore(); drawRRCovEllipse(ballPos, robot); }
void FieldPainter::drawPostAbs (const AbsCoord &post, QColor colour) { save(); translate(post.x(), post.y()); setPen("white"); setBrush(colour); drawEllipse (QPoint(0, 0), 80, 80); restore(); }
void LocalisationAdapter::writeResultToBlackboard(void) { AbsCoord robotPos = L->getRobotPose(); AbsCoord ballPosition = L->getBallPosition(); RRCoord ballPosRR = ballPosition.convertToRobotRelative(robotPos); AbsCoord ballPosRRC = ballPosition.convertToRobotRelativeCartesian(robotPos); AbsCoord ballVelocity = L->getBallVelocity(); double robotPosUncertainty = L->getRobotPosUncertainty(); double robotHeadingUncertainty = L->getRobotHeadingUncertainty(); double ballPosUncertainty = L->getBallPosUncertainty(); double ballVelEigenvalue = L->getBallVelocityUncertainty(); AbsCoord nextBall(ballPosition.x() + ballVelocity.x(), ballPosition.y() + ballVelocity.y(), 0.0f); AbsCoord nextBallRRC = nextBall.convertToRobotRelativeCartesian(robotPos); AbsCoord ballVelRRC(nextBallRRC.x() - ballPosRRC.x(), nextBallRRC.y() - ballPosRRC.y(), 0.0f); Pose pose = readFrom(motion, pose); XYZ_Coord ballNeckRelative = pose.robotRelativeToNeckCoord(ballPosRR, BALL_RADIUS); uint32_t ballLostCount = L->getBallLostCount(); SharedLocalisationUpdateBundle sharedLocalisationBundle = L->getSharedUpdateData(); acquireLock(serialization); writeTo(localisation, robotPos, robotPos); writeTo(localisation, ballLostCount, ballLostCount); writeTo(localisation, ballPos, ballPosition); writeTo(localisation, ballPosRR, ballPosRR); writeTo(localisation, ballPosRRC, ballPosRRC); writeTo(localisation, ballVelRRC, ballVelRRC); writeTo(localisation, ballVel, ballVelocity); writeTo(localisation, robotPosUncertainty, robotPosUncertainty); writeTo(localisation, robotHeadingUncertainty, robotHeadingUncertainty); writeTo(localisation, ballPosUncertainty, ballPosUncertainty); writeTo(localisation, ballVelEigenvalue, ballVelEigenvalue); writeTo(localisation, teamBall, TeamBallInfo()); // TODO: make this a different value? writeTo(localisation, ballNeckRelative, ballNeckRelative); writeTo(localisation, sharedLocalisationBundle, sharedLocalisationBundle); writeTo(localisation, havePendingOutgoingSharedBundle, true); writeTo(localisation, havePendingIncomingSharedBundle, std::vector<bool>(5, false)); writeTo(localisation, robotObstacles, robotFilter->filteredRobots); releaseLock(serialization); }
void FieldPainter::drawRobotAbs (const AbsCoord &pos, QColor colour, bool ellipse, QColor varColour) { static const int robot_radius = 120; QRect roboRect(-robot_radius,-robot_radius,robot_radius*2,robot_radius*2); QPen variancePen(varColour); variancePen.setWidth(20); // draw pacman save(); translate(pos.x(), pos.y()); setPen("black"); int fov; if (pos.var(2,2) > 0) { fov = MIN(160, RAD2DEG(sqrt(pos.var(2,2)))); } else { fov = 0; } setBrush(QBrush(colour)); rotate((RAD2DEG(pos.theta())-(fov/2))); drawPie(roboRect, 0, (360-fov)*16); restore(); /* Converting covariance matrix to ellipse representation * Eigenvalues of the matrix are the major and minor axis lengths * Eigenvectors are the vectors from the center along the axes (should be perpendicular) */ if (ellipse) { Eigen::VectorXf eigenvalues = pos.var.block<2, 2>(0,0).marked<Eigen::SelfAdjoint>().eigenvalues(); int r_major_axis = MAX(sqrt(MAX(eigenvalues[0], eigenvalues[1])), robot_radius); int r_minor_axis = MAX(sqrt(MIN(eigenvalues[0], eigenvalues[1])), robot_radius); float theta = atan2(2*pos.var(0,1), pos.var(0,0)-pos.var(1,1))/2.0; // draw position covariance ellipse save(); translate(pos.x(), pos.y()); rotate(RAD2DEG(theta)); QRect varRect(-r_major_axis, -r_minor_axis, r_major_axis*2, r_minor_axis*2); setPen(variancePen); setBrush(QBrush(Qt::NoBrush)); drawEllipse(varRect); restore(); } }
// draw robot relative position covariance ellipse void FieldPainter::drawRRCovEllipse(const AbsCoord &pos, const AbsCoord &robotPos) { QPen variancePen("red"); variancePen.setWidth(20); Eigen::VectorXf eigenvalues = pos.var.block<2, 2>(0,0).marked<Eigen::SelfAdjoint>().eigenvalues(); int r_major_axis = sqrt(MAX(eigenvalues[0], eigenvalues[1])); int r_minor_axis = sqrt(MIN(eigenvalues[0], eigenvalues[1])); float theta = atan2(2*pos.var(0,1), pos.var(0,0)-pos.var(1,1))/2.0; save(); translate(robotPos.x(), robotPos.y()); rotate(RAD2DEG(robotPos.theta())); translate(pos.x(), pos.y()); rotate(RAD2DEG(theta)); QRect varRect(-r_major_axis, -r_minor_axis, r_major_axis*2, r_minor_axis*2); setPen(variancePen); setBrush(QBrush(Qt::NoBrush)); drawEllipse(varRect); restore(); }
void FieldPainter::drawBallPosAbs(const AbsCoord &ball, QColor colour) { save(); QPoint newBall = QPoint(ball.x(), ball.y()); QPen absPen(colour); setPen(absPen); setBrush(QBrush(colour)); drawEllipse(newBall, 40, 40); restore(); drawAbsCovEllipse(ball, absPen); }
void FieldPainter::drawPlayerNumber (const AbsCoord &pos, int num) { save(); translate(pos.x() - 50, pos.y() - 400); scale(1,-1); setPen("white"); setBrush(QColor("white")); QFont font; font.setPixelSize(200); setFont(font); drawText(0, 0, QString::number(num)); restore(); }
void FieldPainter::translateRR(const RRCoord &rr, const AbsCoord &robot) { translate(robot.x(), robot.y()); rotate((const float)RAD2DEG(robot.theta())); rotate((const float)RAD2DEG(rr.heading())); translate(rr.distance(), 0); /* rotate((const float)RAD2DEG(-robot.theta())); rotate((const float)RAD2DEG(-rr.heading())); */ if (!isnan(rr.orientation())) { rotate(RAD2DEG(-rr.orientation())); } }
void MultiGaussianDistribution::startOfPlayReset(void) { // All other modes other than the best mode that are in the opponent half are zeroed out. for (unsigned i = 1; i < modes.size(); i++) { if (modes[i]->getRobotPose().x() > 0.0) { modes[i]->setWeight(0.0); } } AbsCoord robotPose = modes.front()->getRobotPose(); // If we are way inside the opponent half or we are just inside the opponent half and think we // are facing our own goal, then we are symmetrically flipped. NOTE: we dont want to blindly // zero out all modes that are in the opposing half because it is possible that we are // actually in our own half but due to noise the state estimation places us just over the // halfway line. This is quite possible for robots standing near the halfway line. if (robotPose.x() > 1000.0 || (robotPose.x() > 0.0 && fabs(robotPose.theta()) > M_PI/2.0)) { SimpleGaussian *flippedMode = modes.front()->createSymmetricGaussian(); modes.front()->setWeight(0.0); modes.push_back(flippedMode); } fixupDistribution(); }
void FieldPainter::drawFeatureAbs (const AbsCoord &pos, const FieldFeatureInfo::Type &type) { save(); translate(pos.x(), pos.y()); rotate(RAD2DEG(pos.theta())); setPen(Qt::NoPen); setBrush(QBrush("red")); QPen p; switch (type) { case FieldFeatureInfo::fCorner: save(); rotate(RAD2DEG(M_PI_4)); drawRect(-25, -25, 300, 50); restore(); save(); rotate(RAD2DEG(-M_PI_4)); drawRect(-25, -25, 300, 50); restore(); break; case FieldFeatureInfo::fTJunction: drawRect(-25, -300, 50, 600); drawRect(0, -25, 300, 50); case FieldFeatureInfo::fPenaltySpot: break; case FieldFeatureInfo::fCentreCircle: save(); setPen("red"); p = pen(); p.setWidth(50); setPen(p); setBrush(Qt::NoBrush); drawEllipse (QPoint(0, 0), 600, 600); restore(); break; case FieldFeatureInfo::fLine: break; default: break; } restore(); }
/*----------------------------------------------------------------------------- * Motion thread tick function *---------------------------------------------------------------------------*/ void MotionAdapter::tick() { Timer t; // Get the motion request from behaviours int behaviourReadBuf = readFrom(behaviour, readBuf); ActionCommand::All request = readFrom(behaviour, request[behaviourReadBuf]).actions; // Get sensor information from kinematics SensorValues sensors; if(request.body.actionType == Body::MOTION_CALIBRATE){ // raw sensor values are sent to offnao for calibration // these values are straight forward copy paste into pos files sensors = nakedTouch->getSensors(kinematics); sensors.sensors[Sensors::InertialSensor_AngleX] = -RAD2DEG(sensors.sensors[Sensors::InertialSensor_AngleX]); sensors.sensors[Sensors::InertialSensor_AngleY] = -RAD2DEG(sensors.sensors[Sensors::InertialSensor_AngleY]); sensors.sensors[Sensors::InertialSensor_GyrX] = -sensors.sensors[Sensors::InertialSensor_GyrX]; sensors.sensors[Sensors::InertialSensor_GyrY] = -sensors.sensors[Sensors::InertialSensor_GyrY]; } else { sensors = touch->getSensors(kinematics); } // For kinematics, give it the lagged sensorValues with the most recent lean angles (because they already // have a lag in them) unless it's the very first one otherwise it will propagate nans everywhere SensorValues sensorsLagged; if (sensorBuffer.size() > 0) { sensorsLagged = sensorBuffer.back(); } else { sensorsLagged = sensors; } sensorsLagged.sensors[Sensors::InertialSensor_AngleX] = sensors.sensors[Sensors::InertialSensor_AngleX]; sensorsLagged.sensors[Sensors::InertialSensor_AngleY] = sensors.sensors[Sensors::InertialSensor_AngleY]; kinematics.setSensorValues(sensorsLagged); kinematics.parameters = readFrom(kinematics, parameters); // Calculate the Denavit-Hartenberg chain kinematics.updateDHChain(); writeTo(motion, pose, kinematics.getPose()); bool standing = touch->getStanding(); ButtonPresses buttons = touch->getButtons(); llog(VERBOSE) << "touch->getSensors took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); // Keep a running time for standing if (standing) { uptime = 0.0f; } else { uptime += 0.01f; } writeTo(motion, uptime, uptime); // Sensors are lagged so it correctly synchronises with vision sensorBuffer.insert(sensorBuffer.begin(), sensors); writeTo(motion, sensors, sensors); //Lagged); writeTo(kinematics, sensorsLagged, sensorsLagged); // std::cout << "live = " << sensors.joints.angles[Joints::HeadYaw] // << " delayed = " << sensorsLagged.joints.angles[Joints::HeadYaw] // << std::endl; if (sensorBuffer.size() > SENSOR_LAG) { sensorBuffer.pop_back(); } // sonar recorder gets and update and returns the next sonar request request.sonar = sonarRecorder.update(sensors.sonar); writeTo(motion, sonarWindow, sonarRecorder.sonarWindow); if (isIncapacitated(request.body.actionType)) { uptime = 0.0f; } buttons |= readFrom(motion, buttons); writeTo(motion, buttons, buttons); llog(VERBOSE) << "writeTo / readFrom took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); if (standing) { generator->reset(); request.body = ActionCommand::Body::INITIAL; odometry.clear(); } // Get the position of the ball in robot relative cartesian coordinates AbsCoord robotPose = readFrom(localisation, robotPos); AbsCoord ballAbs = readFrom(localisation, ballPos); AbsCoord ball = ballAbs.convertToRobotRelativeCartesian(robotPose); // Update the body model bodyModel.kinematics = &kinematics; bodyModel.update(&odometry, sensors); // Get the joints requested by whichever generator we're using JointValues joints = generator->makeJoints(&request, &odometry, sensors, bodyModel, ball.x(), ball.y()); // Save the body model Center of Mass writeTo(motion, com, bodyModel.getCoM()); writeTo(motion, active, request); // Odometry is lagged by walk's estimations, and it also correctly synchronises with vision writeTo(motion, odometry, Odometry(odometry)); // Save the pendulum model writeTo(motion, pendulumModel, bodyModel.getPendulumModel()); llog(VERBOSE) << "generator->makeJoints took " << t.elapsed_ms() << "ms" << std::endl; t.restart(); // Actuate joints as requested. effector->actuate(joints, request.leds, request.sonar); llog(VERBOSE) << "effector->actuate took " << t.elapsed_ms() << "ms" << std::endl; }
void ICPFieldView::redraw(Blackboard *blackboard, const AbsCoord &robotPos, int icpResult, const AbsCoord &icpObs, const AbsCoord &ballRRC, const AbsCoord &teamBall) { *renderPixmap = imagePixmap; FieldPainter painter(renderPixmap); AbsCoord anchor; if (icpResult >= ICP_LOCALISED){ anchor = icpObs; } else { anchor = robotPos; } /* Draw features with absolute coordinates */ if (!isnan(robotPos.theta()) && !isnan(robotPos.x()) && !isnan(robotPos.y())) { /* Draw the team ball */ if (!isnan(teamBall.x()) && !isnan(teamBall.y())){ painter.drawBallPosAbs(teamBall, QColor(255, 0, 255)); } /* Draw our position */ painter.drawRobotAbs(robotPos, "#ffee00", true, "black"); // pacman yellow, black variance if (icpResult >= ICP_LOCALISED) { /* Draw icp's chosen/closest observation on top of the others */ if (!isnan(icpObs.theta()) && !isnan(icpObs.x()) && !isnan(icpObs.y())) { painter.drawRobotAbs(icpObs, "blue", true, "blue"); } } } /* Draw features with robot-relative coordinates */ if ((!isnan(anchor.theta()) && !isnan(anchor.x()) && !isnan(anchor.y()))) { /* Posts */ std::vector<PostInfo> posts = readFrom(vision, posts); std::vector<PostInfo>::iterator post_i; for (post_i = posts.begin (); post_i < posts.end (); ++ post_i) { painter.drawPostRR(*post_i, anchor); } /* Ball */ AbsCoord bVel; if (!isnan(ballRRC.x()) && !isnan(ballRRC.y())){ painter.drawBallPosRRC(ballRRC, bVel, false, anchor); } /* Field Lines RR */ std::vector<FieldFeatureInfo> features = readFrom(vision,fieldFeatures); std::vector<FieldFeatureInfo>::iterator feature_i; for (feature_i = features.begin (); feature_i < features.end (); ++ feature_i) { painter.drawFeatureRR(*feature_i, anchor); } /* RRCoord fieldPoints[MAX_FIELD_LINE_POINTS]; readArray(vision, fieldLinePoints, fieldPoints); for (uint16_t i = 0; i < MAX_FIELD_LINE_POINTS; i++) { std::cout << "point with d = " << fieldPoints[i].distance() << " and h = " << fieldPoints[i].heading() << std::endl; CentreCircleInfo c = CentreCircleInfo(); FieldFeatureInfo f = FieldFeatureInfo(fieldPoints[i], c); painter.drawFeatureRR(f, anchor); } */ } /* Draw ICP lines */ //static std::vector<LineInfo> allFieldLines; // in decreasing order of length std::vector<LineInfo>::const_iterator line = ICP::getAllFieldLines().begin(); for (; line != ICP::getAllFieldLines().end(); ++line) { painter.drawFieldLine(*line); } setPixmap (*renderPixmap); return; }