// called on the other nodes - assigns it to my views of the others int OctreeQuery::parseData(ReceivedMessage& message) { const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(message.getRawMessage()); const unsigned char* sourceBuffer = startPosition; // check if this query uses a view frustum memcpy(&_usesFrustum, sourceBuffer, sizeof(_usesFrustum)); sourceBuffer += sizeof(_usesFrustum); if (_usesFrustum) { // unpack camera details memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition)); sourceBuffer += sizeof(_cameraPosition); sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, _cameraOrientation); sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &_cameraFov); sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer,_cameraAspectRatio); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraNearClip); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraFarClip); memcpy(&_cameraEyeOffsetPosition, sourceBuffer, sizeof(_cameraEyeOffsetPosition)); sourceBuffer += sizeof(_cameraEyeOffsetPosition); } // desired Max Octree PPS memcpy(&_maxQueryPPS, sourceBuffer, sizeof(_maxQueryPPS)); sourceBuffer += sizeof(_maxQueryPPS); // desired _octreeElementSizeScale memcpy(&_octreeElementSizeScale, sourceBuffer, sizeof(_octreeElementSizeScale)); sourceBuffer += sizeof(_octreeElementSizeScale); // desired boundaryLevelAdjust memcpy(&_boundaryLevelAdjust, sourceBuffer, sizeof(_boundaryLevelAdjust)); sourceBuffer += sizeof(_boundaryLevelAdjust); memcpy(&_cameraCenterRadius, sourceBuffer, sizeof(_cameraCenterRadius)); sourceBuffer += sizeof(_cameraCenterRadius); // check if we have a packed JSON filter uint16_t binaryParametersBytes; memcpy(&binaryParametersBytes, sourceBuffer, sizeof(binaryParametersBytes)); sourceBuffer += sizeof(binaryParametersBytes); if (binaryParametersBytes > 0) { // unpack the binary JSON parameters QByteArray binaryJSONParameters { binaryParametersBytes, 0 }; memcpy(binaryJSONParameters.data(), sourceBuffer, binaryParametersBytes); sourceBuffer += binaryParametersBytes; // grab the parameter object from the packed binary representation of JSON auto newJsonDocument = QJsonDocument::fromBinaryData(binaryJSONParameters); QWriteLocker jsonParameterLocker { &_jsonParametersLock }; _jsonParameters = newJsonDocument.object(); } return sourceBuffer - startPosition; }
int Referential::unpack(const unsigned char* sourceBuffer) { const unsigned char* startPosition = sourceBuffer; _type = (Type)*sourceBuffer++; if (_type < 0 || _type >= NUM_TYPES) { _type = UNKNOWN; } memcpy(&_version, sourceBuffer, sizeof(_version)); sourceBuffer += sizeof(_version); sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, _translation, 0); sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, _rotation); sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((const int16_t*) sourceBuffer, &_scale, 0); return sourceBuffer - startPosition; }
// called on the other nodes - assigns it to my views of the others int OctreeQuery::parseData(ReceivedMessage& message) { const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(message.getRawMessage()); const unsigned char* sourceBuffer = startPosition; // camera details memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition)); sourceBuffer += sizeof(_cameraPosition); sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, _cameraOrientation); sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &_cameraFov); sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer,_cameraAspectRatio); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraNearClip); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraFarClip); memcpy(&_cameraEyeOffsetPosition, sourceBuffer, sizeof(_cameraEyeOffsetPosition)); sourceBuffer += sizeof(_cameraEyeOffsetPosition); // optional feature flags unsigned char bitItems = 0; bitItems = (unsigned char)*sourceBuffer++; // NOTE: we used to use these bits to set feature request items if we need to extend the protocol with optional features // do it here with... wantFeature= oneAtBit(bitItems, WANT_FEATURE_BIT); Q_UNUSED(bitItems); // desired Max Octree PPS memcpy(&_maxQueryPPS, sourceBuffer, sizeof(_maxQueryPPS)); sourceBuffer += sizeof(_maxQueryPPS); // desired _octreeElementSizeScale memcpy(&_octreeElementSizeScale, sourceBuffer, sizeof(_octreeElementSizeScale)); sourceBuffer += sizeof(_octreeElementSizeScale); // desired boundaryLevelAdjust memcpy(&_boundaryLevelAdjust, sourceBuffer, sizeof(_boundaryLevelAdjust)); sourceBuffer += sizeof(_boundaryLevelAdjust); auto bytesRead = sourceBuffer - startPosition; auto bytesLeft = message.getSize() - bytesRead; if (bytesLeft >= (int)sizeof(_cameraCenterRadius)) { memcpy(&_cameraCenterRadius, sourceBuffer, sizeof(_cameraCenterRadius)); sourceBuffer += sizeof(_cameraCenterRadius); } return sourceBuffer - startPosition; }
int OctreePacketData::unpackDataFromBytes(const unsigned char *dataBytes, QVector<glm::quat>& result) { uint16_t length; memcpy(&length, dataBytes, sizeof(uint16_t)); dataBytes += sizeof(length); // FIXME - this size check is wrong if we allow larger packets if (length * sizeof(glm::quat) > MAX_OCTREE_UNCOMRESSED_PACKET_SIZE) { result.resize(0); return sizeof(uint16_t); } result.resize(length); const unsigned char *start = dataBytes; for (int i = 0; i < length; i++) { dataBytes += unpackOrientationQuatFromBytes(dataBytes, result[i]); } return (dataBytes - start) + (int)sizeof(uint16_t); }
// called on the other nodes - assigns it to my views of the others int OctreeQuery::parseData(const QByteArray& packet) { // increment to push past the packet header int numBytesPacketHeader = numBytesForPacketHeader(packet); const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(packet.data()); const unsigned char* sourceBuffer = startPosition + numBytesPacketHeader; // camera details memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition)); sourceBuffer += sizeof(_cameraPosition); sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, _cameraOrientation); sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &_cameraFov); sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer,_cameraAspectRatio); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraNearClip); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraFarClip); memcpy(&_cameraEyeOffsetPosition, sourceBuffer, sizeof(_cameraEyeOffsetPosition)); sourceBuffer += sizeof(_cameraEyeOffsetPosition); // voxel sending features... unsigned char bitItems = 0; bitItems = (unsigned char)*sourceBuffer++; _wantLowResMoving = oneAtBit(bitItems, WANT_LOW_RES_MOVING_BIT); _wantColor = oneAtBit(bitItems, WANT_COLOR_AT_BIT); _wantDelta = oneAtBit(bitItems, WANT_DELTA_AT_BIT); _wantOcclusionCulling = oneAtBit(bitItems, WANT_OCCLUSION_CULLING_BIT); _wantCompression = oneAtBit(bitItems, WANT_COMPRESSION); // desired Max Octree PPS memcpy(&_maxOctreePPS, sourceBuffer, sizeof(_maxOctreePPS)); sourceBuffer += sizeof(_maxOctreePPS); // desired _octreeElementSizeScale memcpy(&_octreeElementSizeScale, sourceBuffer, sizeof(_octreeElementSizeScale)); sourceBuffer += sizeof(_octreeElementSizeScale); // desired boundaryLevelAdjust memcpy(&_boundaryLevelAdjust, sourceBuffer, sizeof(_boundaryLevelAdjust)); sourceBuffer += sizeof(_boundaryLevelAdjust); return sourceBuffer - startPosition; }
ModelItem ModelItem::fromEditPacket(const unsigned char* data, int length, int& processedBytes, ModelTree* tree, bool& valid) { ModelItem newModelItem; // id and _lastUpdated will get set here... const unsigned char* dataAt = data; processedBytes = 0; // the first part of the data is our octcode... int octets = numberOfThreeBitSectionsInCode(data); int lengthOfOctcode = bytesRequiredForCodeLength(octets); // we don't actually do anything with this octcode... dataAt += lengthOfOctcode; processedBytes += lengthOfOctcode; // id uint32_t editID; memcpy(&editID, dataAt, sizeof(editID)); dataAt += sizeof(editID); processedBytes += sizeof(editID); bool isNewModelItem = (editID == NEW_MODEL); // special case for handling "new" modelItems if (isNewModelItem) { // If this is a NEW_MODEL, then we assume that there's an additional uint32_t creatorToken, that // we want to send back to the creator as an map to the actual id uint32_t creatorTokenID; memcpy(&creatorTokenID, dataAt, sizeof(creatorTokenID)); dataAt += sizeof(creatorTokenID); processedBytes += sizeof(creatorTokenID); newModelItem.setCreatorTokenID(creatorTokenID); newModelItem._newlyCreated = true; valid = true; } else { // look up the existing modelItem const ModelItem* existingModelItem = tree->findModelByID(editID, true); // copy existing properties before over-writing with new properties if (existingModelItem) { newModelItem = *existingModelItem; valid = true; } else { // the user attempted to edit a modelItem that doesn't exist qDebug() << "user attempted to edit a modelItem that doesn't exist... editID=" << editID; // NOTE: even though this is a bad editID, we have to consume the edit details, so that // the buffer doesn't get corrupted for further processing... valid = false; } newModelItem._id = editID; newModelItem._newlyCreated = false; } // lastEdited memcpy(&newModelItem._lastEdited, dataAt, sizeof(newModelItem._lastEdited)); dataAt += sizeof(newModelItem._lastEdited); processedBytes += sizeof(newModelItem._lastEdited); // All of the remaining items are optional, and may or may not be included based on their included values in the // properties included bits uint16_t packetContainsBits = 0; if (!isNewModelItem) { memcpy(&packetContainsBits, dataAt, sizeof(packetContainsBits)); dataAt += sizeof(packetContainsBits); processedBytes += sizeof(packetContainsBits); } // radius if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_RADIUS) == MODEL_PACKET_CONTAINS_RADIUS)) { memcpy(&newModelItem._radius, dataAt, sizeof(newModelItem._radius)); dataAt += sizeof(newModelItem._radius); processedBytes += sizeof(newModelItem._radius); } // position if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_POSITION) == MODEL_PACKET_CONTAINS_POSITION)) { memcpy(&newModelItem._position, dataAt, sizeof(newModelItem._position)); dataAt += sizeof(newModelItem._position); processedBytes += sizeof(newModelItem._position); } // color if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_COLOR) == MODEL_PACKET_CONTAINS_COLOR)) { memcpy(newModelItem._color, dataAt, sizeof(newModelItem._color)); dataAt += sizeof(newModelItem._color); processedBytes += sizeof(newModelItem._color); } // shouldDie if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_SHOULDDIE) == MODEL_PACKET_CONTAINS_SHOULDDIE)) { memcpy(&newModelItem._shouldDie, dataAt, sizeof(newModelItem._shouldDie)); dataAt += sizeof(newModelItem._shouldDie); processedBytes += sizeof(newModelItem._shouldDie); } // modelURL if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_MODEL_URL) == MODEL_PACKET_CONTAINS_MODEL_URL)) { uint16_t modelURLLength; memcpy(&modelURLLength, dataAt, sizeof(modelURLLength)); dataAt += sizeof(modelURLLength); processedBytes += sizeof(modelURLLength); QString tempString((const char*)dataAt); newModelItem._modelURL = tempString; dataAt += modelURLLength; processedBytes += modelURLLength; } // modelRotation if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_MODEL_ROTATION) == MODEL_PACKET_CONTAINS_MODEL_ROTATION)) { int bytes = unpackOrientationQuatFromBytes(dataAt, newModelItem._modelRotation); dataAt += bytes; processedBytes += bytes; } // animationURL if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_ANIMATION_URL) == MODEL_PACKET_CONTAINS_ANIMATION_URL)) { uint16_t animationURLLength; memcpy(&animationURLLength, dataAt, sizeof(animationURLLength)); dataAt += sizeof(animationURLLength); processedBytes += sizeof(animationURLLength); QString tempString((const char*)dataAt); newModelItem._animationURL = tempString; dataAt += animationURLLength; processedBytes += animationURLLength; } // animationIsPlaying if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_ANIMATION_PLAYING) == MODEL_PACKET_CONTAINS_ANIMATION_PLAYING)) { memcpy(&newModelItem._animationIsPlaying, dataAt, sizeof(newModelItem._animationIsPlaying)); dataAt += sizeof(newModelItem._animationIsPlaying); processedBytes += sizeof(newModelItem._animationIsPlaying); } // animationFrameIndex if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_ANIMATION_FRAME) == MODEL_PACKET_CONTAINS_ANIMATION_FRAME)) { memcpy(&newModelItem._animationFrameIndex, dataAt, sizeof(newModelItem._animationFrameIndex)); dataAt += sizeof(newModelItem._animationFrameIndex); processedBytes += sizeof(newModelItem._animationFrameIndex); } // animationFPS if (isNewModelItem || ((packetContainsBits & MODEL_PACKET_CONTAINS_ANIMATION_FPS) == MODEL_PACKET_CONTAINS_ANIMATION_FPS)) { memcpy(&newModelItem._animationFPS, dataAt, sizeof(newModelItem._animationFPS)); dataAt += sizeof(newModelItem._animationFPS); processedBytes += sizeof(newModelItem._animationFPS); } const bool wantDebugging = false; if (wantDebugging) { qDebug("ModelItem::fromEditPacket()..."); qDebug() << " ModelItem id in packet:" << editID; newModelItem.debugDump(); } return newModelItem; }
int ModelItem::readModelDataFromBuffer(const unsigned char* data, int bytesLeftToRead, ReadBitstreamToTreeParams& args) { int bytesRead = 0; if (bytesLeftToRead >= expectedBytes()) { int clockSkew = args.sourceNode ? args.sourceNode->getClockSkewUsec() : 0; const unsigned char* dataAt = data; // id memcpy(&_id, dataAt, sizeof(_id)); dataAt += sizeof(_id); bytesRead += sizeof(_id); // _lastUpdated memcpy(&_lastUpdated, dataAt, sizeof(_lastUpdated)); dataAt += sizeof(_lastUpdated); bytesRead += sizeof(_lastUpdated); _lastUpdated -= clockSkew; // _lastEdited memcpy(&_lastEdited, dataAt, sizeof(_lastEdited)); dataAt += sizeof(_lastEdited); bytesRead += sizeof(_lastEdited); _lastEdited -= clockSkew; // radius memcpy(&_radius, dataAt, sizeof(_radius)); dataAt += sizeof(_radius); bytesRead += sizeof(_radius); // position memcpy(&_position, dataAt, sizeof(_position)); dataAt += sizeof(_position); bytesRead += sizeof(_position); // color memcpy(_color, dataAt, sizeof(_color)); dataAt += sizeof(_color); bytesRead += sizeof(_color); // shouldDie memcpy(&_shouldDie, dataAt, sizeof(_shouldDie)); dataAt += sizeof(_shouldDie); bytesRead += sizeof(_shouldDie); // modelURL uint16_t modelURLLength; memcpy(&modelURLLength, dataAt, sizeof(modelURLLength)); dataAt += sizeof(modelURLLength); bytesRead += sizeof(modelURLLength); QString modelURLString((const char*)dataAt); setModelURL(modelURLString); dataAt += modelURLLength; bytesRead += modelURLLength; // modelRotation int bytes = unpackOrientationQuatFromBytes(dataAt, _modelRotation); dataAt += bytes; bytesRead += bytes; if (args.bitstreamVersion >= VERSION_MODELS_HAVE_ANIMATION) { // animationURL uint16_t animationURLLength; memcpy(&animationURLLength, dataAt, sizeof(animationURLLength)); dataAt += sizeof(animationURLLength); bytesRead += sizeof(animationURLLength); QString animationURLString((const char*)dataAt); setAnimationURL(animationURLString); dataAt += animationURLLength; bytesRead += animationURLLength; // animationIsPlaying memcpy(&_animationIsPlaying, dataAt, sizeof(_animationIsPlaying)); dataAt += sizeof(_animationIsPlaying); bytesRead += sizeof(_animationIsPlaying); // animationFrameIndex memcpy(&_animationFrameIndex, dataAt, sizeof(_animationFrameIndex)); dataAt += sizeof(_animationFrameIndex); bytesRead += sizeof(_animationFrameIndex); // animationFPS memcpy(&_animationFPS, dataAt, sizeof(_animationFPS)); dataAt += sizeof(_animationFPS); bytesRead += sizeof(_animationFPS); } } return bytesRead; }
int ModelItem::readModelDataFromBuffer(const unsigned char* data, int bytesLeftToRead, ReadBitstreamToTreeParams& args) { int bytesRead = 0; if (bytesLeftToRead >= expectedBytes()) { int clockSkew = args.sourceNode ? args.sourceNode->getClockSkewUsec() : 0; const unsigned char* dataAt = data; // id memcpy(&_id, dataAt, sizeof(_id)); dataAt += sizeof(_id); bytesRead += sizeof(_id); // _lastUpdated memcpy(&_lastUpdated, dataAt, sizeof(_lastUpdated)); dataAt += sizeof(_lastUpdated); bytesRead += sizeof(_lastUpdated); _lastUpdated -= clockSkew; // _lastEdited memcpy(&_lastEdited, dataAt, sizeof(_lastEdited)); dataAt += sizeof(_lastEdited); bytesRead += sizeof(_lastEdited); _lastEdited -= clockSkew; // radius memcpy(&_radius, dataAt, sizeof(_radius)); dataAt += sizeof(_radius); bytesRead += sizeof(_radius); // position memcpy(&_position, dataAt, sizeof(_position)); dataAt += sizeof(_position); bytesRead += sizeof(_position); // color memcpy(_color, dataAt, sizeof(_color)); dataAt += sizeof(_color); bytesRead += sizeof(_color); // shouldDie memcpy(&_shouldDie, dataAt, sizeof(_shouldDie)); dataAt += sizeof(_shouldDie); bytesRead += sizeof(_shouldDie); // modelURL uint16_t modelURLLength; memcpy(&modelURLLength, dataAt, sizeof(modelURLLength)); dataAt += sizeof(modelURLLength); bytesRead += sizeof(modelURLLength); QString modelURLString((const char*)dataAt); _modelURL = modelURLString; dataAt += modelURLLength; bytesRead += modelURLLength; // modelRotation int bytes = unpackOrientationQuatFromBytes(dataAt, _modelRotation); dataAt += bytes; bytesRead += bytes; //printf("ModelItem::readModelDataFromBuffer()... "); debugDump(); } return bytesRead; }
int ModelEntityItem::oldVersionReadEntityDataFromBuffer(const unsigned char* data, int bytesLeftToRead, ReadBitstreamToTreeParams& args) { int bytesRead = 0; if (bytesLeftToRead >= expectedBytes()) { int clockSkew = args.sourceNode ? args.sourceNode->getClockSkewUsec() : 0; const unsigned char* dataAt = data; // id // this old bitstream format had 32bit IDs. They are obsolete and need to be replaced with our new UUID // format. We can simply read and ignore the old ID since they should not be repeated. This code should only // run on loading from an old file. quint32 oldID; memcpy(&oldID, dataAt, sizeof(oldID)); dataAt += sizeof(oldID); bytesRead += sizeof(oldID); _id = QUuid::createUuid(); // _lastUpdated memcpy(&_lastUpdated, dataAt, sizeof(_lastUpdated)); dataAt += sizeof(_lastUpdated); bytesRead += sizeof(_lastUpdated); _lastUpdated -= clockSkew; // _lastEdited memcpy(&_lastEdited, dataAt, sizeof(_lastEdited)); dataAt += sizeof(_lastEdited); bytesRead += sizeof(_lastEdited); _lastEdited -= clockSkew; _created = _lastEdited; // NOTE: old models didn't have age or created time, assume their last edit was a create QString ageAsString = formatSecondsElapsed(getAge()); qDebug() << "Loading old model file, _created = _lastEdited =" << _created << " age=" << getAge() << "seconds - " << ageAsString << "old ID=" << oldID << "new ID=" << _id; // radius float radius; memcpy(&radius, dataAt, sizeof(radius)); dataAt += sizeof(radius); bytesRead += sizeof(radius); setRadius(radius); // position memcpy(&_position, dataAt, sizeof(_position)); dataAt += sizeof(_position); bytesRead += sizeof(_position); // color memcpy(&_color, dataAt, sizeof(_color)); dataAt += sizeof(_color); bytesRead += sizeof(_color); // TODO: how to handle this? Presumable, this would only ever be true if the model file was saved with // a model being in a shouldBeDeleted state. Which seems unlikely. But if it happens, maybe we should delete the entity after loading? // shouldBeDeleted bool shouldBeDeleted = false; memcpy(&shouldBeDeleted, dataAt, sizeof(shouldBeDeleted)); dataAt += sizeof(shouldBeDeleted); bytesRead += sizeof(shouldBeDeleted); if (shouldBeDeleted) { qDebug() << "UNEXPECTED - read shouldBeDeleted=TRUE from an old format file"; } // modelURL uint16_t modelURLLength; memcpy(&modelURLLength, dataAt, sizeof(modelURLLength)); dataAt += sizeof(modelURLLength); bytesRead += sizeof(modelURLLength); QString modelURLString((const char*)dataAt); setModelURL(modelURLString); dataAt += modelURLLength; bytesRead += modelURLLength; // rotation int bytes = unpackOrientationQuatFromBytes(dataAt, _rotation); dataAt += bytes; bytesRead += bytes; if (args.bitstreamVersion >= VERSION_ENTITIES_HAVE_ANIMATION) { // animationURL uint16_t animationURLLength; memcpy(&animationURLLength, dataAt, sizeof(animationURLLength)); dataAt += sizeof(animationURLLength); bytesRead += sizeof(animationURLLength); QString animationURLString((const char*)dataAt); setAnimationURL(animationURLString); dataAt += animationURLLength; bytesRead += animationURLLength; // animationIsPlaying bool animationIsPlaying; memcpy(&animationIsPlaying, dataAt, sizeof(animationIsPlaying)); dataAt += sizeof(animationIsPlaying); bytesRead += sizeof(animationIsPlaying); setAnimationIsPlaying(animationIsPlaying); // animationFrameIndex float animationFrameIndex; memcpy(&animationFrameIndex, dataAt, sizeof(animationFrameIndex)); dataAt += sizeof(animationFrameIndex); bytesRead += sizeof(animationFrameIndex); setAnimationFrameIndex(animationFrameIndex); // animationFPS float animationFPS; memcpy(&animationFPS, dataAt, sizeof(animationFPS)); dataAt += sizeof(animationFPS); bytesRead += sizeof(animationFPS); setAnimationFPS(animationFPS); } } return bytesRead; }
// called on the other nodes - assigns it to my views of the others int OctreeQuery::parseData(ReceivedMessage& message) { const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(message.getRawMessage()); const unsigned char* sourceBuffer = startPosition; // unpack the connection ID uint16_t newConnectionID; memcpy(&newConnectionID, sourceBuffer, sizeof(newConnectionID)); sourceBuffer += sizeof(newConnectionID); if (!_hasReceivedFirstQuery) { // set our flag to indicate that we've parsed for this query at least once _hasReceivedFirstQuery = true; // set the incoming connection ID as the current _connectionID = newConnectionID; } else { if (newConnectionID != _connectionID) { // the connection ID has changed - emit our signal so the server // knows that the client is starting a new session _connectionID = newConnectionID; emit incomingConnectionIDChanged(); } } // check if this query uses a view frustum memcpy(&_usesFrustum, sourceBuffer, sizeof(_usesFrustum)); sourceBuffer += sizeof(_usesFrustum); if (_usesFrustum) { // unpack camera details memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition)); sourceBuffer += sizeof(_cameraPosition); sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, _cameraOrientation); sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &_cameraFov); sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer,_cameraAspectRatio); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraNearClip); sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer,_cameraFarClip); memcpy(&_cameraEyeOffsetPosition, sourceBuffer, sizeof(_cameraEyeOffsetPosition)); sourceBuffer += sizeof(_cameraEyeOffsetPosition); } // desired Max Octree PPS memcpy(&_maxQueryPPS, sourceBuffer, sizeof(_maxQueryPPS)); sourceBuffer += sizeof(_maxQueryPPS); // desired _octreeElementSizeScale memcpy(&_octreeElementSizeScale, sourceBuffer, sizeof(_octreeElementSizeScale)); sourceBuffer += sizeof(_octreeElementSizeScale); // desired boundaryLevelAdjust memcpy(&_boundaryLevelAdjust, sourceBuffer, sizeof(_boundaryLevelAdjust)); sourceBuffer += sizeof(_boundaryLevelAdjust); memcpy(&_cameraCenterRadius, sourceBuffer, sizeof(_cameraCenterRadius)); sourceBuffer += sizeof(_cameraCenterRadius); // check if we have a packed JSON filter uint16_t binaryParametersBytes; memcpy(&binaryParametersBytes, sourceBuffer, sizeof(binaryParametersBytes)); sourceBuffer += sizeof(binaryParametersBytes); if (binaryParametersBytes > 0) { // unpack the binary JSON parameters QByteArray binaryJSONParameters { binaryParametersBytes, 0 }; memcpy(binaryJSONParameters.data(), sourceBuffer, binaryParametersBytes); sourceBuffer += binaryParametersBytes; // grab the parameter object from the packed binary representation of JSON auto newJsonDocument = QJsonDocument::fromBinaryData(binaryJSONParameters); QWriteLocker jsonParameterLocker { &_jsonParametersLock }; _jsonParameters = newJsonDocument.object(); } return sourceBuffer - startPosition; }