// This callback is invoked by the physics simulation at the end of each simulation step... // iff the corresponding RigidBody is DYNAMIC and has moved. void EntityMotionState::setWorldTransform(const btTransform& worldTrans) { if (!_entity) { return; } assert(entityTreeIsLocked()); measureBodyAcceleration(); _entity->setPosition(bulletToGLM(worldTrans.getOrigin()) + ObjectMotionState::getWorldOffset()); _entity->setRotation(bulletToGLM(worldTrans.getRotation())); _entity->setVelocity(getBodyLinearVelocity()); _entity->setAngularVelocity(getBodyAngularVelocity()); _entity->setLastSimulated(usecTimestampNow()); if (_entity->getSimulatorID().isNull()) { _loopsWithoutOwner++; if (_loopsWithoutOwner > LOOPS_FOR_SIMULATION_ORPHAN && usecTimestampNow() > _nextOwnershipBid) { //qDebug() << "Warning -- claiming something I saw moving." << getName(); setOutgoingPriority(VOLUNTEER_SIMULATION_PRIORITY); } } #ifdef WANT_DEBUG quint64 now = usecTimestampNow(); qCDebug(physics) << "EntityMotionState::setWorldTransform()... changed entity:" << _entity->getEntityItemID(); qCDebug(physics) << " last edited:" << _entity->getLastEdited() << formatUsecTime(now - _entity->getLastEdited()) << "ago"; qCDebug(physics) << " last simulated:" << _entity->getLastSimulated() << formatUsecTime(now - _entity->getLastSimulated()) << "ago"; qCDebug(physics) << " last updated:" << _entity->getLastUpdated() << formatUsecTime(now - _entity->getLastUpdated()) << "ago"; #endif }
// This callback is invoked by the physics simulation at the end of each simulation step... // iff the corresponding RigidBody is DYNAMIC and has moved. void EntityMotionState::setWorldTransform(const btTransform& worldTrans) { if (!_entity) { return; } measureBodyAcceleration(); _entity->setPosition(bulletToGLM(worldTrans.getOrigin()) + ObjectMotionState::getWorldOffset()); _entity->setRotation(bulletToGLM(worldTrans.getRotation())); _entity->setVelocity(getBodyLinearVelocity()); _entity->setAngularVelocity(getBodyAngularVelocity()); _entity->setLastSimulated(usecTimestampNow()); if (_entity->getSimulatorID().isNull()) { _loopsWithoutOwner++; const uint32_t OWNERSHIP_BID_DELAY = 50; if (_loopsWithoutOwner > OWNERSHIP_BID_DELAY) { //qDebug() << "Warning -- claiming something I saw moving." << getName(); _candidateForOwnership = true; } } else { _loopsWithoutOwner = 0; } #ifdef WANT_DEBUG quint64 now = usecTimestampNow(); qCDebug(physics) << "EntityMotionState::setWorldTransform()... changed entity:" << _entity->getEntityItemID(); qCDebug(physics) << " last edited:" << _entity->getLastEdited() << formatUsecTime(now - _entity->getLastEdited()) << "ago"; qCDebug(physics) << " last simulated:" << _entity->getLastSimulated() << formatUsecTime(now - _entity->getLastSimulated()) << "ago"; qCDebug(physics) << " last updated:" << _entity->getLastUpdated() << formatUsecTime(now - _entity->getLastUpdated()) << "ago"; #endif }
// This callback is invoked by the physics simulation at the end of each simulation step... // iff the corresponding RigidBody is DYNAMIC and has moved. void EntityMotionState::setWorldTransform(const btTransform& worldTrans) { if (!_entity) { return; } assert(entityTreeIsLocked()); measureBodyAcceleration(); bool positionSuccess; _entity->setPosition(bulletToGLM(worldTrans.getOrigin()) + ObjectMotionState::getWorldOffset(), positionSuccess, false); if (!positionSuccess) { static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex("EntityMotionState::setWorldTransform " "setPosition failed.*"); qCDebug(physics) << "EntityMotionState::setWorldTransform setPosition failed" << _entity->getID(); } bool orientationSuccess; _entity->setOrientation(bulletToGLM(worldTrans.getRotation()), orientationSuccess, false); if (!orientationSuccess) { static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex("EntityMotionState::setWorldTransform " "setOrientation failed.*"); qCDebug(physics) << "EntityMotionState::setWorldTransform setOrientation failed" << _entity->getID(); } _entity->setVelocity(getBodyLinearVelocity()); _entity->setAngularVelocity(getBodyAngularVelocity()); _entity->setLastSimulated(usecTimestampNow()); if (_entity->getSimulatorID().isNull()) { _loopsWithoutOwner++; if (_loopsWithoutOwner > LOOPS_FOR_SIMULATION_ORPHAN && usecTimestampNow() > _nextOwnershipBid) { upgradeOutgoingPriority(VOLUNTEER_SIMULATION_PRIORITY); } } #ifdef WANT_DEBUG quint64 now = usecTimestampNow(); qCDebug(physics) << "EntityMotionState::setWorldTransform()... changed entity:" << _entity->getEntityItemID(); qCDebug(physics) << " last edited:" << _entity->getLastEdited() << formatUsecTime(now - _entity->getLastEdited()) << "ago"; qCDebug(physics) << " last simulated:" << _entity->getLastSimulated() << formatUsecTime(now - _entity->getLastSimulated()) << "ago"; qCDebug(physics) << " last updated:" << _entity->getLastUpdated() << formatUsecTime(now - _entity->getLastUpdated()) << "ago"; #endif }
void AudioMixerClientData::printAudioStreamStats(const AudioStreamStats& streamStats) const { printf(" Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)\n", (double)(streamStats._packetStreamStats.getLostRate() * 100.0f), streamStats._packetStreamStats._lost, (double)(streamStats._packetStreamWindowStats.getLostRate() * 100.0f), streamStats._packetStreamWindowStats._lost); printf(" Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u\n", streamStats._desiredJitterBufferFrames, streamStats._framesAvailableAverage, streamStats._framesAvailable); printf(" Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u\n", streamStats._starveCount, streamStats._consecutiveNotMixedCount, streamStats._framesDropped, streamStats._overflowCount); printf(" Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s\n", formatUsecTime(streamStats._timeGapMin).toLatin1().data(), formatUsecTime(streamStats._timeGapMax).toLatin1().data(), formatUsecTime(streamStats._timeGapAverage).toLatin1().data()); printf(" Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s\n", formatUsecTime(streamStats._timeGapWindowMin).toLatin1().data(), formatUsecTime(streamStats._timeGapWindowMax).toLatin1().data(), formatUsecTime(streamStats._timeGapWindowAverage).toLatin1().data()); }
void OctreeSceneStats::trackIncomingOctreePacket(ReceivedMessage& message, bool wasStatsPacket, qint64 nodeClockSkewUsec) { const bool wantExtraDebugging = false; // skip past the flags message.seek(sizeof(OCTREE_PACKET_FLAGS)); OCTREE_PACKET_SEQUENCE sequence; message.readPrimitive(&sequence); OCTREE_PACKET_SENT_TIME sentAt; message.readPrimitive(&sentAt); //bool packetIsColored = oneAtBit(flags, PACKET_IS_COLOR_BIT); //bool packetIsCompressed = oneAtBit(flags, PACKET_IS_COMPRESSED_BIT); OCTREE_PACKET_SENT_TIME arrivedAt = usecTimestampNow(); qint64 flightTime = arrivedAt - sentAt + nodeClockSkewUsec; if (wantExtraDebugging) { qCDebug(octree) << "sentAt:" << sentAt << " usecs"; qCDebug(octree) << "arrivedAt:" << arrivedAt << " usecs"; qCDebug(octree) << "nodeClockSkewUsec:" << nodeClockSkewUsec << " usecs"; qCDebug(octree) << "node Clock Skew:" << formatUsecTime(nodeClockSkewUsec); qCDebug(octree) << "flightTime:" << flightTime << " usecs"; } // Guard against possible corrupted packets... with bad timestamps const qint64 MAX_RESONABLE_FLIGHT_TIME = 200 * USECS_PER_SECOND; // 200 seconds is more than enough time for a packet to arrive const qint64 MIN_RESONABLE_FLIGHT_TIME = -1 * (qint64)USECS_PER_SECOND; // more than 1 second of "reverse flight time" would be unreasonable if (flightTime > MAX_RESONABLE_FLIGHT_TIME || flightTime < MIN_RESONABLE_FLIGHT_TIME) { static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex( "ignoring unreasonable packet... flightTime: -?\\d+ nodeClockSkewUsec: -?\\d+ usecs"); qCDebug(octree) << "ignoring unreasonable packet... flightTime:" << flightTime << "nodeClockSkewUsec:" << nodeClockSkewUsec << "usecs";; return; // ignore any packets that are unreasonable } _incomingOctreeSequenceNumberStats.sequenceNumberReceived(sequence); // track packets here... _incomingPacket++; _incomingBytes += message.getSize(); if (!wasStatsPacket) { _incomingWastedBytes += (udt::MAX_PACKET_SIZE - message.getSize()); } }
void OctreePersistThread::parseSettings(const QJsonObject& settings) { if (settings["backups"].isArray()) { const QJsonArray& backupRules = settings["backups"].toArray(); qCDebug(octree) << "BACKUP RULES:"; foreach (const QJsonValue& value, backupRules) { QJsonObject obj = value.toObject(); int interval = 0; int count = 0; QJsonValue intervalVal = obj["backupInterval"]; if (intervalVal.isString()) { interval = intervalVal.toString().toInt(); } else { interval = intervalVal.toInt(); } QJsonValue countVal = obj["maxBackupVersions"]; if (countVal.isString()) { count = countVal.toString().toInt(); } else { count = countVal.toInt(); } qCDebug(octree) << " Name:" << obj["Name"].toString(); qCDebug(octree) << " format:" << obj["format"].toString(); qCDebug(octree) << " interval:" << interval; qCDebug(octree) << " count:" << count; BackupRule newRule = { obj["Name"].toString(), interval, obj["format"].toString(), count, 0}; newRule.lastBackup = getMostRecentBackupTimeInUsecs(obj["format"].toString()); if (newRule.lastBackup > 0) { quint64 now = usecTimestampNow(); quint64 sinceLastBackup = now - newRule.lastBackup; qCDebug(octree) << " lastBackup:" << qPrintable(formatUsecTime(sinceLastBackup)) << "ago"; } else { qCDebug(octree) << " lastBackup: NEVER"; } _backupRules << newRule; } } else {
QString AudioMixerClientData::getAudioStreamStatsString() const { QString result; AudioStreamStats streamStats = _downstreamAudioStreamStats; result += "DOWNSTREAM.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames) + " current: ?" + " available:" + QString::number(streamStats._ringBufferFramesAvailable) + " starves:" + QString::number(streamStats._ringBufferStarveCount) + " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount) + " overflows:" + QString::number(streamStats._ringBufferOverflowCount) + " silents_dropped: ?" + " lost%:" + QString::number(streamStats._packetStreamStats.getLostRate() * 100.0f, 'f', 2) + " lost%_30s:" + QString::number(streamStats._packetStreamWindowStats.getLostRate() * 100.0f, 'f', 2) + " min_gap:" + formatUsecTime(streamStats._timeGapMin) + " max_gap:" + formatUsecTime(streamStats._timeGapMax) + " avg_gap:" + formatUsecTime(streamStats._timeGapAverage) + " min_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMin) + " max_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMax) + " avg_gap_30s:" + formatUsecTime(streamStats._timeGapWindowAverage); AvatarAudioRingBuffer* avatarRingBuffer = getAvatarAudioRingBuffer(); if (avatarRingBuffer) { AudioStreamStats streamStats = getAudioStreamStatsOfStream(avatarRingBuffer); result += " UPSTREAM.mic.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames) + " current:" + QString::number(streamStats._ringBufferCurrentJitterBufferFrames) + " available:" + QString::number(streamStats._ringBufferFramesAvailable) + " starves:" + QString::number(streamStats._ringBufferStarveCount) + " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount) + " overflows:" + QString::number(streamStats._ringBufferOverflowCount) + " silents_dropped:" + QString::number(streamStats._ringBufferSilentFramesDropped) + " lost%:" + QString::number(streamStats._packetStreamStats.getLostRate() * 100.0f, 'f', 2) + " lost%_30s:" + QString::number(streamStats._packetStreamWindowStats.getLostRate() * 100.0f, 'f', 2) + " min_gap:" + formatUsecTime(streamStats._timeGapMin) + " max_gap:" + formatUsecTime(streamStats._timeGapMax) + " avg_gap:" + formatUsecTime(streamStats._timeGapAverage) + " min_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMin) + " max_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMax) + " avg_gap_30s:" + formatUsecTime(streamStats._timeGapWindowAverage); } else { result = "mic unknown"; } for (int i = 0; i < _ringBuffers.size(); i++) { if (_ringBuffers[i]->getType() == PositionalAudioRingBuffer::Injector) { AudioStreamStats streamStats = getAudioStreamStatsOfStream(_ringBuffers[i]); result += " UPSTREAM.inj.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames) + " current:" + QString::number(streamStats._ringBufferCurrentJitterBufferFrames) + " available:" + QString::number(streamStats._ringBufferFramesAvailable) + " starves:" + QString::number(streamStats._ringBufferStarveCount) + " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount) + " overflows:" + QString::number(streamStats._ringBufferOverflowCount) + " silents_dropped:" + QString::number(streamStats._ringBufferSilentFramesDropped) + " lost%:" + QString::number(streamStats._packetStreamStats.getLostRate() * 100.0f, 'f', 2) + " lost%_30s:" + QString::number(streamStats._packetStreamWindowStats.getLostRate() * 100.0f, 'f', 2) + " min_gap:" + formatUsecTime(streamStats._timeGapMin) + " max_gap:" + formatUsecTime(streamStats._timeGapMax) + " avg_gap:" + formatUsecTime(streamStats._timeGapAverage) + " min_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMin) + " max_gap_30s:" + formatUsecTime(streamStats._timeGapWindowMax) + " avg_gap_30s:" + formatUsecTime(streamStats._timeGapWindowAverage); } } return result; }
QJsonObject AudioMixerClientData::getAudioStreamStats() { QJsonObject result; QJsonObject downstreamStats; AudioStreamStats streamStats = _downstreamAudioStreamStats; downstreamStats["desired"] = streamStats._desiredJitterBufferFrames; downstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; downstreamStats["available"] = (double) streamStats._framesAvailable; downstreamStats["unplayed"] = (double) streamStats._unplayedMs; downstreamStats["starves"] = (double) streamStats._starveCount; downstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; downstreamStats["overflows"] = (double) streamStats._overflowCount; downstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; downstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; downstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); downstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); downstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); downstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); downstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); downstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); result["downstream"] = downstreamStats; AvatarAudioStream* avatarAudioStream = getAvatarAudioStream(); if (avatarAudioStream) { QJsonObject upstreamStats; AudioStreamStats streamStats = avatarAudioStream->getAudioStreamStats(); upstreamStats["mic.desired"] = streamStats._desiredJitterBufferFrames; upstreamStats["desired_calc"] = avatarAudioStream->getCalculatedJitterBufferFrames(); upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; upstreamStats["available"] = (double) streamStats._framesAvailable; upstreamStats["unplayed"] = (double) streamStats._unplayedMs; upstreamStats["starves"] = (double) streamStats._starveCount; upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; upstreamStats["overflows"] = (double) streamStats._overflowCount; upstreamStats["silents_dropped"] = (double) streamStats._framesDropped; upstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; upstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; upstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); upstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); upstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); upstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); upstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); upstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); result["upstream"] = upstreamStats; } else { result["upstream"] = "mic unknown"; } QJsonArray injectorArray; auto streamsCopy = getAudioStreams(); for (auto& injectorPair : streamsCopy) { if (injectorPair.second->getType() == PositionalAudioStream::Injector) { QJsonObject upstreamStats; AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats(); upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames; upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames(); upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; upstreamStats["available"] = (double) streamStats._framesAvailable; upstreamStats["unplayed"] = (double) streamStats._unplayedMs; upstreamStats["starves"] = (double) streamStats._starveCount; upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; upstreamStats["overflows"] = (double) streamStats._overflowCount; upstreamStats["silents_dropped"] = (double) streamStats._framesDropped; upstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; upstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; upstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); upstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); upstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); upstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); upstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); upstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); injectorArray.push_back(upstreamStats); } } result["injectors"] = injectorArray; return result; }