WayMergeManipulation::WayMergeManipulation(long leftId, long rightId, ConstOsmMapPtr map, Meters minSplitSize) { _left = leftId; _right = rightId; _minSplitSize = minSplitSize; updateEstimate(map); }
void SingleObjectModel::updateFromOdometry() { jpdaf->correctFromOdometry(); // Update the elapsed time since the object was seen (ms.) elapsedTimeSinceLastObs = (getCurrentTime() - lastMeasurement) / 1000.0f; updateEstimate(); }
void SingleObjectModel::predict() { // Update the elapsed time since the object was seen (ms.) elapsedTimeSinceLastObs = (getCurrentTime() - lastMeasurement) / 1000.0f; jpdaf->predict(); updateEstimate(); }
void SingleObjectModel::updateFromObservation(list<AbstractSample> features) { long now = getCurrentTime(); elapsed = now - lastMeasurement; lastMeasurement = now; // The object has been detected elapsedTimeSinceLastObs = 0.0f; this->jpdaf->correct( features ); updateEstimate(); }
/** ==================================================== * @class SingleObjectModel * * Contains all current knowledge about the object. * =====================================================*/ SingleObjectModel::SingleObjectModel( int numObjects = 1, int maxNFeatures = 3 ) { this->jpdaf = new JPDAF (JPDAF::DYNAMIC_OBJ, numObjects, maxNFeatures); elapsedTimeSinceLastObs = ObjectState::NEVER; updateEstimate(); // ToDo: multiple objects (nets) // Velocity estimation lastPos.x = 0.0f; lastPos.y = 0.0f; lastMeasurement = getCurrentTime(); dx = 0.0f; dy = 0.0f; }
void SenderBandwidthEstimationHandler::read(Context *ctx, std::shared_ptr<dataPacket> packet) { RtcpHeader *chead = reinterpret_cast<RtcpHeader*>(packet->data); if (chead->isFeedback() && chead->getSourceSSRC() == connection_->getVideoSinkSSRC()) { char* packet_pointer = packet->data; int rtcp_length = 0; int total_length = 0; int current_block = 0; do { packet_pointer+=rtcp_length; chead = reinterpret_cast<RtcpHeader*>(packet_pointer); rtcp_length = (ntohs(chead->length) + 1) * 4; total_length += rtcp_length; ELOG_DEBUG("%s ssrc %u, sourceSSRC %u, PacketType %u", connection_->toLog(), chead->getSSRC(), chead->getSourceSSRC(), chead->getPacketType()); switch (chead->packettype) { case RTCP_Receiver_PT: { ELOG_DEBUG("%s, Analyzing Video RR: PacketLost %u, Ratio %u, current_block %d, blocks %d" ", sourceSSRC %u, ssrc %u", connection_->toLog(), chead->getLostPackets(), chead->getFractionLost(), current_block, chead->getBlockCount(), chead->getSourceSSRC(), chead->getSSRC()); // calculate RTT + Update receiver block uint32_t delay_since_last_ms = (chead->getDelaySinceLastSr() * 1000) / 65536; int64_t now_ms = ClockUtils::timePointToMs(clock::now()); uint32_t last_sr = chead->getLastSr(); auto value = std::find_if(sr_delay_data_.begin(), sr_delay_data_.end(), [last_sr](const std::shared_ptr<SrDelayData> sr_info) { return sr_info->sr_ntp == last_sr; }); if (value != sr_delay_data_.end()) { uint32_t delay = now_ms - (*value)->sr_send_time - delay_since_last_ms; sender_bwe_->UpdateReceiverBlock(chead->getFractionLost(), delay, period_packets_sent_, now_ms); period_packets_sent_ = 0; updateEstimate(); } } break; case RTCP_PS_Feedback_PT: { if (chead->getBlockCount() == RTCP_AFB) { char *uniqueId = reinterpret_cast<char*>(&chead->report.rembPacket.uniqueid); if (!strncmp(uniqueId, "REMB", 4)) { int64_t now_ms = ClockUtils::timePointToMs(clock::now()); uint64_t bitrate = chead->getBrMantis() << chead->getBrExp(); ELOG_DEBUG("%s message: Updating Estimate with REMB, bitrate %llu", connection_->toLog(), bitrate); sender_bwe_->UpdateReceiverEstimate(now_ms, bitrate); sender_bwe_->UpdateEstimate(now_ms); updateEstimate(); } else { ELOG_DEBUG("%s message: Unsupported AFB Packet not REMB", connection_->toLog()); } } } break; default: break; } current_block++; } while (total_length < packet->length); } ctx->fireRead(packet); }
void ParticleFilter::update(const messages::RobotLocation& odometryInput, const messages::VisionField& visionInput) { motionSystem->update(particles, odometryInput); // Update the Vision Model // set updated vision to determine if resampling necessary updatedVision = visionSystem->update(particles, visionInput); // Resample if vision update if(updatedVision) { resample(); updatedVision = false; //If shitty swarm according to vision, expand search lost = (visionSystem->getLowestError() > 40); } if (lost) motionSystem->resetNoise(4.f, .2f); else { motionSystem->resetNoise(parameters.odometryXYNoise, parameters.odometryHNoise); } // Update filters estimate updateEstimate(); // std::cout<<"Time:\t" << motionInput.timestamp() <<"\n"; // std::cout<<"(x,y,h)\t("<< getXEst() << " , " << getYEst() // <<" , " <<getHEst()<< " )\n\n"; /** * The following is to stop particles going off field * Commented out until known if necessary (K.I.S.S.) */ // // Check if the mean has gone out of bounds. If so, // // reset to the cloesst point in bounds with appropriate // // uncertainty. // bool resetInBounds = false; // if(poseEstimate.x() < 0) // { // resetInBounds = true; // poseEstimate.set_x(0); // } // else if(poseEstimate.x() > parameters.fieldWidth) // { // resetInBounds = true; // poseEstimate.set_x(parameters.fieldWidth); // } // if(poseEstimate.y() < 0) // { // resetInBounds = true; // poseEstimate.set_y(0); // } // else if(poseEstimate.y() > parameters.fieldHeight) // { // resetInBounds = true; // poseEstimate.set_y(parameters.fieldHeight); // } // // Only reset if one of the location coordinates is // // out of bounds; avoids unnecessary resets. // if(resetInBounds) // { // /* // * @TODO Actually reset to a location here // */ // std::cout << "Resetting to (" << poseEstimate.x() // << ", " << poseEstimate.y() << ", " // << poseEstimate.h() << ")." << std::endl; // } }