////////////////////////////////////////////////////////////////////////// // import //virtual BcBool ScnTexture::import( class CsPackageImporter& Importer, const Json::Value& Object ) { const std::string& FileName = Object[ "source" ].asString(); // Add root dependency. Importer.addDependency( FileName.c_str() ); // Load texture from file and create the data for export. ImgImage* pImage = Img::load( FileName.c_str() ); if( pImage != NULL ) { // Encode the image as a format. BcU8* pEncodedImageData = NULL; BcU32 EncodedImageDataSize = 0; // TODO: Take from parameters. ImgEncodeFormat EncodeFormat = imgEF_RGBA8; eRsTextureFormat TextureFormat = rsTF_RGBA8; if( pImage->encodeAs( EncodeFormat, pEncodedImageData, EncodedImageDataSize ) ) { // Serialize encoded image. BcStream BodyStream( BcFalse, 1024, EncodedImageDataSize ); BodyStream.push( pEncodedImageData, EncodedImageDataSize ); delete pEncodedImageData; pEncodedImageData = NULL; BcStream HeaderStream; THeader Header = { pImage->width(), pImage->height(), 1, TextureFormat }; HeaderStream << Header; // Delete image. delete pImage; // Add chunks and finish up. Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); Importer.addChunk( BcHash( "body" ), BodyStream.pData(), BodyStream.dataSize() ); // return BcTrue; } else { BcPrintf( "Failed to encode image \"%s\"\n", FileName.c_str() ); } } else { BcPrintf( "Failed to load image \"%s\"\n", FileName.c_str() ); } return BcFalse; }
////////////////////////////////////////////////////////////////////////// // import //virtual BcBool AkBank::import( class CsPackageImporter& Importer, const Json::Value& Object ) { // Temporary hack until post-LD. std::string BankName = (*getName()); BcStream HeaderStream; THeader Header = { Importer.addString( BankName.c_str() ) }; HeaderStream << Header; // Add chunks and finish up. Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); return BcTrue; }
////////////////////////////////////////////////////////////////////////// // import //virtual BcBool ScnRenderTarget::import( class CsPackageImporter& Importer, const Json::Value& Object ) { BcU32 Width = Object[ "width" ].asUInt(); BcU32 Height = Object[ "height" ].asUInt(); // Streams. BcStream HeaderStream; // Write header. ScnTextureHeader Header = { Width, Height, 1, 1, rsTT_2D, rsTF_RGBA8 }; HeaderStream << Header; Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); return BcTrue; }
////////////////////////////////////////////////////////////////////////// // save BcBool CsPackageImporter::save( const BcPath& Path ) { // Create target folder. std::string PackedPath = *CsCore::pImpl()->getPackagePackedPath( "" ); if( !boost::filesystem::exists( PackedPath ) ) { boost::filesystem::create_directories( PackedPath ); } // Open package output. BcPath TempFile = *Path + ".tmp"; if( File_.open( (*TempFile).c_str(), bcFM_WRITE ) ) { // Generate string table. BcStream StringTableStream; for( BcU32 Idx = 0; Idx < StringList_.size(); ++Idx ) { const std::string& StringEntry( StringList_[ Idx ] ); StringTableStream.push( StringEntry.c_str(), StringEntry.size() ); StringTableStream << '\0'; } // Setup header. Header_.Magic_ = CsPackageHeader::MAGIC; Header_.Version_ = CsPackageHeader::VERSION; Header_.Flags_ = csPF_DEFAULT; // TODO: Flags. Header_.StringTableBytes_ = (BcU32)StringTableStream.dataSize(); Header_.TotalPackageCrossRefs_ = (BcU32)PackageCrossRefList_.size(); Header_.TotalPackageDependencies_ = (BcU32)PackageDependencyList_.size(); Header_.TotalResources_ = (BcU32)ResourceHeaders_.size(); Header_.TotalChunks_ = (BcU32)ChunkHeaders_.size(); Header_.TotalAllocSize_ = 0; Header_.MinAlignment_ = 16; // TODO: Platform specific. Header_.MaxAlignment_ = 4096; // TODO: Platform specific. // Calculate package alloc size. Header_.TotalAllocSize_ += (BcU32)StringTableStream.dataSize(); Header_.TotalAllocSize_ += (BcU32)BcCalcAlignment( PackageCrossRefList_.size() * sizeof( CsPackageCrossRefData ), Header_.MinAlignment_ ); Header_.TotalAllocSize_ += (BcU32)BcCalcAlignment( PackageDependencyList_.size() * sizeof( CsPackageDependencyData ), Header_.MinAlignment_ ); Header_.TotalAllocSize_ += (BcU32)BcCalcAlignment( ResourceHeaders_.size() * sizeof( CsPackageResourceHeader ), Header_.MinAlignment_ ); Header_.TotalAllocSize_ += (BcU32)BcCalcAlignment( ChunkHeaders_.size() * sizeof( CsPackageChunkHeader ), Header_.MinAlignment_ ); Header_.TotalAllocSize_ += (BcU32)BcCalcAlignment( ChunkHeaders_.size() * sizeof( CsPackageChunkData ), Header_.MinAlignment_ ); // Align total size to 1 page for the start of resource data. Header_.TotalAllocSize_ = (BcU32)BcCalcAlignment( Header_.TotalAllocSize_, Header_.MaxAlignment_ ); // Set start of resource data. Header_.ResourceDataStart_ = Header_.TotalAllocSize_; // Add resource sizes. for( BcU32 Idx = 0; Idx < ChunkHeaders_.size(); ++Idx ) { const CsPackageChunkHeader& ChunkHeader = ChunkHeaders_[ Idx ]; if( ChunkHeader.Flags_ & csPCF_MANAGED ) { Header_.TotalAllocSize_ += BcCalcAlignment( ChunkHeader.UnpackedBytes_, ChunkHeader.RequiredAlignment_ ); } } // Write header. File_.write( &Header_, sizeof( Header_ ) ); // Write string table. File_.write( StringTableStream.pData(), StringTableStream.dataSize() ); // Write package cross refs. for( BcU32 Idx = 0; Idx < PackageCrossRefList_.size(); ++Idx ) { CsPackageCrossRefData& CrossRefData = PackageCrossRefList_[ Idx ]; File_.write( &CrossRefData, sizeof( CsPackageCrossRefData ) ); } // Write package dependencies. for( BcU32 Idx = 0; Idx < PackageDependencyDataList_.size(); ++Idx ) { CsPackageDependencyData& PackageDependencyData = PackageDependencyDataList_[ Idx ]; File_.write( &PackageDependencyData, sizeof( CsPackageDependencyData ) ); } // Write resource headers. for( BcU32 Idx = 0; Idx < ResourceHeaders_.size(); ++Idx ) { CsPackageResourceHeader& ResourceHeader = ResourceHeaders_[ Idx ]; File_.write( &ResourceHeader, sizeof( CsPackageResourceHeader ) ); } // Write chunk headers. BcU32 Offset = 0; for( BcU32 Idx = 0; Idx < ChunkHeaders_.size(); ++Idx ) { CsPackageChunkHeader& ChunkHeader = ChunkHeaders_[ Idx ]; // Setup chunk offset. ChunkHeader.Offset_ = Offset; Offset += ChunkHeader.PackedBytes_; // File_.write( &ChunkHeader, sizeof( CsPackageChunkHeader ) ); } // Write out all chunk data. for( BcU32 Idx = 0; Idx < ChunkDatas_.size(); ++Idx ) { const CsPackageChunkHeader& ChunkHeader = ChunkHeaders_[ Idx ]; const CsPackageChunkData& ChunkData = ChunkDatas_[ Idx ]; File_.write( ChunkData.pPackedData_, ChunkHeader.PackedBytes_ ); } // Close file. File_.close(); // Rename. if( boost::filesystem::exists( *Path ) ) { boost::filesystem::remove( *Path ); } boost::filesystem::rename( *TempFile, *Path ); // return BcTrue; } return BcFalse; }
////////////////////////////////////////////////////////////////////////// // CsFileWriter BcBool CsFileWriter::save() { const BcChar* pType = BcStrStr( Name_.c_str(), "." ); Header_.ID_ = BcHash( pType ); if( File_.open( Name_.c_str(), bcFM_WRITE ) ) { // Generate string table. BcStream StringTableStream; for( BcU32 Idx = 0; Idx < StringList_.size(); ++Idx ) { const std::string& StringEntry( StringList_[ Idx ] ); StringTableStream.push( StringEntry.c_str(), StringEntry.size() + 1 ); } // Write header. Header_.NoofChunks_ = Chunks_.size(); Header_.StringTableSize_ = StringTableStream.dataSize(); File_.write( &Header_, sizeof( Header_ ) ); // Write string table. File_.write( StringTableStream.pData(), StringTableStream.dataSize() ); // Write chunks. BcU32 Offset = sizeof( CsFileHeader ) + StringTableStream.dataSize() + ( sizeof( CsFileChunk ) * Chunks_.size() ); { CsFileChunkNativeListIterator Iter = Chunks_.begin(); while( Iter != Chunks_.end() ) { CsFileChunkNative Chunk = (*Iter); CsFileChunk FileChunk; FileChunk.ID_ = Chunk.ID_; FileChunk.Offset_ = Offset; FileChunk.Size_ = Chunk.Size_; FileChunk.Hash_ = (BcU32)BcHash( Chunk.pData_, Chunk.Size_ ); Offset += Chunk.Size_; File_.write( &FileChunk, sizeof( FileChunk ) ); // Next ++Iter; } } // Write data. { CsFileChunkNativeListIterator Iter = Chunks_.begin(); while( Iter != Chunks_.end() ) { CsFileChunkNative Chunk = (*Iter); File_.write( Chunk.pData_, Chunk.Size_ ); // Next ++Iter; } } // Done and done. File_.close(); return BcTrue; } return BcFalse; }