bool ScriptFile::Load(Deserializer& source) { PROFILE(LoadScript); ReleaseModule(); // Create the module. Discard previous module if there was one asIScriptEngine* engine = script_->GetScriptEngine(); scriptModule_ = engine->GetModule(GetName().CString(), asGM_ALWAYS_CREATE); if (!scriptModule_) { LOGERROR("Failed to create script module " + GetName()); return false; } // Check if this file is precompiled bytecode if (source.ReadFileID() == "ASBC") { ByteCodeDeserializer deserializer = ByteCodeDeserializer(source); if (scriptModule_->LoadByteCode(&deserializer) >= 0) { LOGINFO("Loaded script module " + GetName() + " from bytecode"); compiled_ = true; // Map script module to script resource with userdata scriptModule_->SetUserData(this); return true; } else return false; } else source.Seek(0); // Not bytecode: add the initial section and check for includes if (!AddScriptSection(engine, source)) return false; // Compile. Set script engine logging to retained mode so that potential exceptions can show all error info ScriptLogMode oldLogMode = script_->GetLogMode(); script_->SetLogMode(LOGMODE_RETAINED); script_->ClearLogMessages(); int result = scriptModule_->Build(); String errors = script_->GetLogMessages(); script_->SetLogMode(oldLogMode); if (result < 0) { LOGERROR("Failed to compile script module " + GetName() + ":\n" + errors); return false; } if (!errors.Empty()) LOGWARNING(errors); LOGINFO("Compiled script module " + GetName()); compiled_ = true; // Map script module to script resource with userdata scriptModule_->SetUserData(this); return true; }
bool ScriptFile::Load(Deserializer& source) { PROFILE(LoadScript); ReleaseModule(); // Create the module. Discard previous module if there was one asIScriptEngine* engine = script_->GetScriptEngine(); scriptModule_ = engine->GetModule(GetName().CString(), asGM_ALWAYS_CREATE); if (!scriptModule_) { LOGERROR("Failed to create script module " + GetName()); return false; } // Check if this file is precompiled bytecode if (source.ReadFileID() == "ASBC") { ByteCodeDeserializer deserializer = ByteCodeDeserializer(source); if (scriptModule_->LoadByteCode(&deserializer) >= 0) { LOGINFO("Loaded script module " + GetName() + " from bytecode"); compiled_ = true; // Map script module to script resource with userdata scriptModule_->SetUserData(this); return true; } else return false; } else source.Seek(0); // Not bytecode: add the initial section and check for includes if (!AddScriptSection(engine, source)) return false; // Compile int result = scriptModule_->Build(); if (result < 0) { LOGERROR("Failed to compile script module " + GetName()); return false; } LOGINFO("Compiled script module " + GetName()); compiled_ = true; // Map script module to script resource with userdata scriptModule_->SetUserData(this); return true; }
void Animation::Deserialize(Deserializer &deserializer) { // We are going to use an iteration based deserializer here. Serialization::ChunkHeader header; do { deserializer.Read(header); switch (header.id) { case Serialization::ChunkID_Animation_PlaybackState: { this->currentAnimationTrack.Deserialize(deserializer); deserializer.Read(this->isPlaying); deserializer.Read(this->isPaused); deserializer.Read(this->playbackTime); uint32_t newLoopStartQueueIndex; deserializer.Read(newLoopStartQueueIndex); this->loopStartIndex = static_cast<size_t>(newLoopStartQueueIndex); break; } default: { // We don't know the chunk, so we'll skip over it. deserializer.Seek(header.sizeInBytes); break; } } } while (header.id != Serialization::ChunkID_Null); }
bool ScriptFile::BeginLoad(Deserializer& source) { ReleaseModule(); loadByteCode_.Reset(); asIScriptEngine* engine = script_->GetScriptEngine(); { MutexLock lock(script_->GetModuleMutex()); // Create the module. Discard previous module if there was one scriptModule_ = engine->GetModule(GetName().CString(), asGM_ALWAYS_CREATE); if (!scriptModule_) { LOGERROR("Failed to create script module " + GetName()); return false; } } // Check if this file is precompiled bytecode if (source.ReadFileID() == "ASBC") { // Perform actual parsing in EndLoad(); read data now loadByteCodeSize_ = source.GetSize() - source.GetPosition(); loadByteCode_ = new unsigned char[loadByteCodeSize_]; source.Read(loadByteCode_.Get(), loadByteCodeSize_); return true; } else source.Seek(0); // Not bytecode: add the initial section and check for includes. // Perform actual building during EndLoad(), as AngelScript can not multithread module compilation, // and static initializers may access arbitrary engine functionality which may not be thread-safe return AddScriptSection(engine, source); }
void Mesh::Deserialize(Deserializer &deserializer) { Serialization::ChunkHeader header; do { deserializer.Read(header); switch (header.id) { case Serialization::ChunkID_Mesh_Material: { switch (header.version) { case 1: { String materialPath; deserializer.ReadString(materialPath); this->SetMaterial(materialPath.c_str()); this->material->Deserialize(deserializer); break; } default: { m_context.Logf("Error deserializing Mesh. Material chunk is an unsupported version (%d).", header.version); break; } } break; } case Serialization::ChunkID_Mesh_Geometry: { switch (header.version) { case 1: { if (this->deleteGeometry) { Renderer::DeleteVertexArray(this->geometry); } auto newVA = Renderer::CreateVertexArray(VertexArrayUsage_Static, VertexFormat()); newVA->Deserialize(deserializer); this->SetGeometry(newVA); this->deleteGeometry = true; break; } default: { m_context.Logf("Error deserializing Mesh. Geometry chunk is an unsupported version (%d).", header.version); break; } } break; } default: { // We're not aware of the chunk, so we'll skip it. deserializer.Seek(header.sizeInBytes); break; } } } while (header.id != Serialization::ChunkID_Null); }
bool Sound::LoadWav(Deserializer& source) { WavHeader header; // Try to open memset(&header, 0, sizeof header); source.Read(&header.riffText_, 4); header.totalLength_ = source.ReadUInt(); source.Read(&header.waveText_, 4); if (memcmp("RIFF", header.riffText_, 4) || memcmp("WAVE", header.waveText_, 4)) { LOGERROR("Could not read WAV data from " + source.GetName()); return false; } // Search for the FORMAT chunk for (;;) { source.Read(&header.formatText_, 4); header.formatLength_ = source.ReadUInt(); if (!memcmp("fmt ", &header.formatText_, 4)) break; source.Seek(source.GetPosition() + header.formatLength_); if (!header.formatLength_ || source.GetPosition() >= source.GetSize()) { LOGERROR("Could not read WAV data from " + source.GetName()); return false; } } // Read the FORMAT chunk header.format_ = source.ReadUShort(); header.channels_ = source.ReadUShort(); header.frequency_ = source.ReadUInt(); header.avgBytes_ = source.ReadUInt(); header.blockAlign_ = source.ReadUShort(); header.bits_ = source.ReadUShort(); // Skip data if the format chunk was bigger than what we use source.Seek(source.GetPosition() + header.formatLength_ - 16); // Check for correct format if (header.format_ != 1) { LOGERROR("Could not read WAV data from " + source.GetName()); return false; } // Search for the DATA chunk for (;;) { source.Read(&header.dataText_, 4); header.dataLength_ = source.ReadUInt(); if (!memcmp("data", &header.dataText_, 4)) break; source.Seek(source.GetPosition() + header.dataLength_); if (!header.dataLength_ || source.GetPosition() >= source.GetSize()) { LOGERROR("Could not read WAV data from " + source.GetName()); return false; } } // Allocate sound and load audio data unsigned length = header.dataLength_; SetSize(length); SetFormat(header.frequency_, header.bits_ == 16, header.channels_ == 2); source.Read(data_.Get(), length); // Convert 8-bit audio to signed if (!sixteenBit_) { for (unsigned i = 0; i < length; ++i) data_[i] -= 128; } return true; }
bool ModelDefinition::Deserialize(Deserializer &deserializer) { // Clear everything. this->ClearMeshes(); this->ClearBones(); this->ClearAnimations(true); // <-- 'true' = clear animation segments, too. this->ClearConvexHulls(); // We keep looping until we hit the null or unknown chunk. Serialization::ChunkHeader header; while (deserializer.Peek(&header, sizeof(header)) == sizeof(header)) { bool finished = false; switch (header.id) { case Serialization::ChunkID_Model_Bones: { deserializer.Seek(sizeof(header)); if (header.version == 1) { uint32_t boneCount; deserializer.Read(boneCount); for (uint32_t iBone = 0; iBone < boneCount; ++iBone) { // Name. String name; deserializer.ReadString(name); // Local transform. glm::vec3 position; glm::quat rotation; glm::vec3 scale; deserializer.Read(position); deserializer.Read(rotation); deserializer.Read(scale); // 4x4 offset matrix. glm::mat4 offsetMatrix; deserializer.Read(offsetMatrix); auto bone = new Bone; bone->SetName(name.c_str()); bone->SetPosition(position); bone->SetRotation(rotation); bone->SetScale(scale); bone->SetOffsetMatrix(offsetMatrix); this->AddBone(bone); // We need to create a channel for this bone. We then need to map that channel to a bone. auto &channel = m_animation.CreateChannel(); this->animationChannelBones.Add(bone, &channel); } // Parents. auto boneParentIndices = static_cast<uint32_t*>(malloc(boneCount * sizeof(uint32_t))); deserializer.Read(boneParentIndices, boneCount * sizeof(uint32_t)); for (uint32_t iBone = 0; iBone < boneCount; ++iBone) { uint32_t parentIndex = boneParentIndices[iBone]; if (parentIndex != static_cast<uint32_t>(-1)) { m_bones[parentIndex]->AttachChild(*m_bones[iBone]); } } free(boneParentIndices); } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Model_Meshes: { deserializer.Seek(sizeof(header)); if (header.version == 1) { uint32_t meshCount; deserializer.Read(meshCount); for (uint32_t iMesh = 0; iMesh < meshCount; ++iMesh) { ModelDefinition::Mesh newMesh(m_context); // Name. deserializer.ReadString(newMesh.name); // Material String materialName; deserializer.ReadString(materialName); newMesh.material = m_context.GetMaterialLibrary().Create(materialName.c_str()); // Geometry VertexFormat vertexFormat; vertexFormat.Deserialize(deserializer); newMesh.geometry = Renderer::CreateVertexArray(VertexArrayUsage_Static, vertexFormat); // Vertices. uint32_t vertexCount; deserializer.Read(vertexCount); if (vertexCount > 0) { newMesh.geometry->SetVertexData(nullptr, static_cast<size_t>(vertexCount)); auto vertexData = newMesh.geometry->MapVertexData(); { deserializer.Read(vertexData, vertexCount * vertexFormat.GetSizeInBytes()); } newMesh.geometry->UnmapVertexData(); } // Indices. uint32_t indexCount; deserializer.Read(indexCount); if (indexCount > 0) { newMesh.geometry->SetIndexData(nullptr, static_cast<size_t>(indexCount)); auto indexData = newMesh.geometry->MapIndexData(); { deserializer.Read(indexData, indexCount * sizeof(uint32_t)); } newMesh.geometry->UnmapIndexData(); } // Skinning Vertex Attributes uint32_t skinningVertexAttributeCount; deserializer.Read(skinningVertexAttributeCount); if (skinningVertexAttributeCount > 0) { newMesh.skinningVertexAttributes = new SkinningVertexAttribute[skinningVertexAttributeCount]; auto counts = static_cast<uint16_t*>(malloc(skinningVertexAttributeCount * sizeof(uint16_t))); deserializer.Read(counts, skinningVertexAttributeCount * sizeof(uint16_t)); uint32_t totalBoneWeights; deserializer.Read(totalBoneWeights); auto boneWeights = static_cast<BoneWeightPair*>(malloc(totalBoneWeights * sizeof(BoneWeightPair))); deserializer.Read(boneWeights, totalBoneWeights * sizeof(BoneWeightPair)); auto currentBoneWeight = boneWeights; for (uint32_t iVertex = 0; iVertex < skinningVertexAttributeCount; ++iVertex) { auto count = counts[iVertex]; // Here we allocate the buffer for the bones. We trick the vector here by modifying attributes directly. newMesh.skinningVertexAttributes[iVertex].bones.Reserve(count); newMesh.skinningVertexAttributes[iVertex].bones.count = count; for (uint16_t iBone = 0; iBone < count; ++iBone) { newMesh.skinningVertexAttributes[iVertex].bones[iBone] = *currentBoneWeight++; } } free(counts); free(boneWeights); } // Uniforms. newMesh.defaultUniforms.Deserialize(deserializer); // Finally, add the mesh. this->AddMesh(newMesh); } } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Model_Animation: { deserializer.Seek(sizeof(header)); if (header.version == 1) { uint32_t keyFrameCount; deserializer.Read(keyFrameCount); for (size_t iKeyFrame = 0; iKeyFrame < keyFrameCount; ++iKeyFrame) { float time; deserializer.Read(time); size_t keyFrameIndex = m_animation.AppendKeyFrame(static_cast<double>(time)); // With the key frame added, we now need to iterate over each channel in the key frame. uint32_t channelCount; deserializer.Read(channelCount); for (uint32_t iChannel = 0; iChannel < channelCount; ++iChannel) { uint32_t boneIndex; deserializer.Read(boneIndex); auto bone = m_bones[boneIndex]; assert(bone != nullptr); { auto iChannelBone = this->animationChannelBones.Find(bone); assert(iChannelBone != nullptr); { auto channel = iChannelBone->value; glm::vec3 position; glm::quat rotation; glm::vec3 scale; deserializer.Read(position); deserializer.Read(rotation); deserializer.Read(scale); auto key = new TransformAnimationKey(position, rotation, scale); channel->SetKey(keyFrameIndex, key); // We need to cache the key. this->animationKeyCache.PushBack(key); } } } } deserializer.Read(this->animationAABBPadding); } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Model_AnimationSegments: { deserializer.Seek(sizeof(header)); if (header.version == 1) { uint32_t animationSegmentCount; deserializer.Read(animationSegmentCount); for (uint32_t iSegment = 0; iSegment < animationSegmentCount; ++iSegment) { String name; uint32_t startKeyFrame; uint32_t endKeyFrame; deserializer.ReadString(name); deserializer.Read(startKeyFrame); deserializer.Read(endKeyFrame); m_animation.AddNamedSegment(name.c_str(), static_cast<size_t>(startKeyFrame), static_cast<size_t>(endKeyFrame)); } } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Model_AnimationSequences: { deserializer.Seek(sizeof(header)); if (header.version == 1) { } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Model_ConvexHulls: { deserializer.Seek(sizeof(header)); if (header.version == 1) { uint32_t convexHullCount; deserializer.Read(convexHullCount); uint32_t* vertexCounts = static_cast<uint32_t*>(malloc(convexHullCount * sizeof(uint32_t))); uint32_t* indexCounts = static_cast<uint32_t*>(malloc(convexHullCount * sizeof(uint32_t))); deserializer.Read(vertexCounts, convexHullCount * sizeof(uint32_t)); deserializer.Read(indexCounts, convexHullCount * sizeof(uint32_t)); uint32_t totalVertexCount; deserializer.Read(totalVertexCount); uint32_t totalIndexCount; deserializer.Read(totalIndexCount); auto vertices = static_cast<float* >(malloc(totalVertexCount * sizeof(float))); auto indices = static_cast<uint32_t*>(malloc(totalIndexCount * sizeof(uint32_t))); deserializer.Read(vertices, totalVertexCount * sizeof(float)); deserializer.Read(indices, totalIndexCount * sizeof(uint32_t)); auto currentVertices = vertices; auto currentIndices = indices; for (uint32_t iConvexHull = 0; iConvexHull < convexHullCount; ++iConvexHull) { size_t vertexCount = static_cast<size_t>(vertexCounts[iConvexHull]); size_t indexCount = static_cast<size_t>(indexCounts[iConvexHull]); m_convexHulls.PushBack(new ConvexHull(currentVertices, vertexCount, currentIndices, indexCount)); // Now we need to move our pointers forward. currentVertices += vertexCount * 3; currentIndices += indexCount; } // Build Settings. deserializer.Read(this->convexHullBuildSettings); free(vertexCounts); free(indexCounts); free(vertices); free(indices); } else { // Unsupported Version. deserializer.Seek(header.sizeInBytes); } break; } case Serialization::ChunkID_Null: { deserializer.Seek(sizeof(header)); finished = true; break; } default: { // Unknown chunk = Error. finished = true; return false; } } if (finished) { break; } } // If we get here, we were successful. return true; }
void SceneStateStackStagingArea::Deserialize(Deserializer &deserializer) { // We should clear the staging area just in case. this->Clear(); Serialization::ChunkHeader header; deserializer.Read(header); { assert(header.id == Serialization::ChunkID_SceneStateStackStagingArea); { switch (header.version) { case 1: { // Inserts. uint32_t insertsCount; deserializer.Read(insertsCount); for (uint32_t i = 0; i < insertsCount; ++i) { uint64_t sceneNodeID; deserializer.Read(sceneNodeID); this->inserts.PushBack(sceneNodeID); } // Deletes. uint32_t deletesCount; deserializer.Read(deletesCount); for (uint32_t i = 0; i < deletesCount; ++i) { uint64_t sceneNodeID; deserializer.Read(sceneNodeID); // The next chunk of data is the serialized data of the scene node. What we do here is ready the data into a temp buffer, and then // write that to a new BasicSerializer object. uint32_t serializerSizeInBytes; deserializer.Read(serializerSizeInBytes); void* serializerData = malloc(serializerSizeInBytes); deserializer.Read(serializerData, serializerSizeInBytes); auto sceneNodeSerializer = new BasicSerializer; sceneNodeSerializer->Write(serializerData, serializerSizeInBytes); this->deletes.Add(sceneNodeID, sceneNodeSerializer); free(serializerData); } // Updates. uint32_t updatesCount; deserializer.Read(updatesCount); for (uint32_t i = 0; i < updatesCount; ++i) { uint64_t sceneNodeID; deserializer.Read(sceneNodeID); this->updates.PushBack(sceneNodeID); } // Hierarchy. uint32_t hierarchyCount; deserializer.Read(hierarchyCount); for (uint32_t i = 0; i < hierarchyCount; ++i) { uint64_t sceneNodeID; deserializer.Read(sceneNodeID); uint64_t parentSceneNodeID; deserializer.Read(parentSceneNodeID); this->hierarchy.Add(sceneNodeID, parentSceneNodeID); } break; } default: { g_Context->Logf("Error deserializing SceneStateStackStagingArea. The main chunk is an unsupported version (%d).", header.version); deserializer.Seek(header.sizeInBytes); break; } } } } }
void ModelComponent::Deserialize(Deserializer &deserializer) { uint32_t whatChanged = 0; this->LockOnChanged(); Serialization::ChunkHeader header; deserializer.Read(header); assert(header.id == Serialization::ChunkID_ModelComponent_Main); { size_t deserializerStart = deserializer.Tell(); switch (header.version) { case 1: { // Flags are first. uint32_t newFlags; deserializer.Read(newFlags); if (newFlags != this->flags) { this->flags = newFlags; whatChanged |= ChangeFlag_Flags; } // Next is a boolean indicating whether or not a model is defined here. bool hasModel; deserializer.Read(hasModel); // We will only have additional data at this point if we have actually have a model defined. if (hasModel) { auto oldModel = this->model; String modelPath; deserializer.ReadString(modelPath); if (!modelPath.IsEmpty()) { this->SetModel(modelPath.c_str()); // If we failed to set the model (most likely due to the file not existing) we need to skip this chunk and return. if (this->model == nullptr) { const size_t bytesReadSoFar = deserializer.Tell() - deserializerStart; deserializer.Seek(header.sizeInBytes - bytesReadSoFar); return; } } else { if (this->GetContext() != NULL) { ModelDefinition nullDefinition(*this->GetContext()); this->SetModel(new Model(nullDefinition), true); } } assert(this->model != nullptr); { this->model->Deserialize(deserializer); } if (this->model != oldModel) { whatChanged |= ChangeFlag_Model; } } break; } default: { if (this->GetContext() != NULL) { this->GetContext()->Logf("Error deserializing ModelComponent. Main chunk has an unsupported version (%d).", header.version); } break; } } } this->UnlockOnChanged(); if (whatChanged != 0) { this->OnChanged(whatChanged); } }