QJSValue ConversionOutput::addRawBuffer(const QString &category, const QString &filename, const QJSValue &buffer, bool compressed, int uncompressedSize) { QByteArray data = QBufferModule::getData(buffer); if (compressed) { /* We need to prepend the uncompressed size so qUncompress will do its work... */ QByteArray compressedData(data.size() + sizeof(int), Qt::Uninitialized); QDataStream dataStream(&compressedData, QIODevice::WriteOnly); dataStream << uncompressedSize; dataStream.writeRawData(data.data(), data.size()); data = qUncompress(compressedData); } QFileInfo fi(filename); fi.dir().mkpath("."); QFile file(filename); if (!file.open(QFile::WriteOnly)) { QString message = QString("Unable to create %1").arg(filename); return QJSExceptionUtils::newError(mCommonJsModule->engine(), message); } file.write(data); return QJSValue(); }
bool Vocabulary::save( const QString& filename ) const { QByteArray data; QDataStream out( &data, QIODevice::WriteOnly ); out.setVersion( QDataStream::Qt_2_1 ); // 0x0010 means 0.10.x version. out << qint32( Vocabulary::magicNumber ) << qint16( 0x0010 ) << *this; QByteArray compressedData( qCompress( data ) ); QFile dataFile( filename ); QFileInfo dataFileInfo( dataFile ); QDir dataFileDir( dataFileInfo.absoluteDir() ); if( !dataFileDir.mkpath( dataFileDir.path() ) ) return( false ); if( !dataFile.open( QIODevice::WriteOnly ) ) return( false ); int ret = dataFile.write( compressedData ); dataFile.close(); if( ret == -1 || dataFile.error() != QFile::NoError ) { dataFile.unsetError(); return( false ); } return( true ); }
void LoadDataFromFile(const std::wstring & filename, std::initializer_list<const char*> dataNames, std::initializer_list<DataTarget*> dataTargets) { if (!dataNames.size()) ShowErrorMessage(L"At least one data chunk must be requested from LoadDataFromFile."); else if (dataNames.size() != dataTargets.size()) ShowErrorMessage(L"The lists passed to LoadDataFromFile must be the same size."); else { std::ifstream file(filename, std::ios_base::binary); file.seekg(0, std::ios_base::end); std::streamoff FileSize = file.tellg(); file.seekg(0, std::ios_base::beg); if (FileSize < 28) ShowErrorMessage(L"Invalid header (too small)"); else { DataFileHeader FileHeader; file.read((char*)&FileHeader, sizeof(DataFileHeader)); if (memcmp(&FileHeader, "PLIB\xDE\xAD\xBA\xBE\x00\x01\x00\x00", 12)) ShowErrorMessage(L"Invalid header (magic string doesn't match)"); //technically I'm subsuming the version in there too but AFAIK there are no other .j2d formats out there so it doesn't really matter else if (FileHeader.FileSize != FileSize) ShowErrorMessage(L"Invalid header (internal filesize doesn't match)"); else if (FileHeader.USize % sizeof(DataSubfileHeader) != 0) ShowErrorMessage(L"Invalid header (subfile header list is an invalid size)"); //in theory there should be a CRC check here but maybe I don't really care else { std::vector<DataSubfileHeader> SubfileHeaders(FileHeader.USize / sizeof(DataSubfileHeader)); std::vector<sf::Uint8> compressedData(FileHeader.CSize); file.read((char*)compressedData.data(), FileHeader.CSize); if (uncompress((Bytef*)SubfileHeaders.data(), (uLongf*)&FileHeader.USize, compressedData.data(), FileHeader.CSize) != Z_OK) ShowErrorMessageF(L"Decompression of %s failed", filename); else { auto name = dataNames.begin(); auto target = dataTargets.begin(); while (true) { for (const auto subfileHeader : SubfileHeaders) { if (!strcmp(*name, subfileHeader.Name)) { //found the right subfile compressedData.resize(subfileHeader.CSize); (*target)->resize(subfileHeader.USize); file.seekg(sizeof(DataFileHeader) + FileHeader.CSize + subfileHeader.Location, std::ios_base::beg); file.read((char*)compressedData.data(), subfileHeader.CSize); if (uncompress((*target)->data(), (uLongf*)&subfileHeader.USize, compressedData.data(), subfileHeader.CSize) != Z_OK) ShowErrorMessageF(L"Decompression of subfile %s in %s failed (%u into %u)", WStringFromCharArray(subfileHeader.Name).c_str(), filename.c_str(), subfileHeader.CSize, subfileHeader.USize); break; //don't need to keep looping through subfilenames } } if (++name == dataNames.end()) break; ++target; } } } } file.close(); } }
SeekableReadStream *decompressDeflate(ReadStream &input, size_t inputSize, size_t outputSize, int windowBits) { ScopedArray<byte> compressedData(new byte[inputSize]); if (input.read(compressedData.get(), inputSize) != inputSize) throw Exception(kReadError); const byte *decompressedData = decompressDeflate(compressedData.get(), inputSize, outputSize, windowBits); return new MemoryReadStream(decompressedData, outputSize, true); }
/* * This method compresses the given data. */ std::vector<BYTE> SocketCompressor::Compress(std::vector<BYTE>& data) { std::vector<BYTE> compressedData(data.size() + data.size() / 1000 + 12); m_CompressionStream.next_in = &data[0]; m_CompressionStream.avail_in = data.size(); m_CompressionStream.next_out = &compressedData[0]; m_CompressionStream.avail_out = compressedData.size(); deflate(&m_CompressionStream, Z_SYNC_FLUSH); compressedData.resize(compressedData.size() - m_CompressionStream.avail_out); return compressedData; }
static void TestPDFStream(skiatest::Reporter* reporter) { char streamBytes[] = "Test\nFoo\tBar"; SkRefPtr<SkMemoryStream> streamData = new SkMemoryStream( streamBytes, strlen(streamBytes), true); streamData->unref(); // SkRefPtr and new both took a reference. SkRefPtr<SkPDFStream> stream = new SkPDFStream(streamData.get()); stream->unref(); // SkRefPtr and new both took a reference. SimpleCheckObjectOutput( reporter, stream.get(), "<</Length 12\n>> stream\nTest\nFoo\tBar\nendstream"); stream->insert("Attribute", new SkPDFInt(42))->unref(); SimpleCheckObjectOutput(reporter, stream.get(), "<</Length 12\n/Attribute 42\n>> stream\n" "Test\nFoo\tBar\nendstream"); if (SkFlate::HaveFlate()) { char streamBytes2[] = "This is a longer string, so that compression " "can do something with it. With shorter strings, " "the short circuit logic cuts in and we end up " "with an uncompressed string."; SkAutoDataUnref streamData2(SkData::NewWithCopy(streamBytes2, strlen(streamBytes2))); SkRefPtr<SkPDFStream> stream = new SkPDFStream(streamData2.get()); stream->unref(); // SkRefPtr and new both took a reference. SkDynamicMemoryWStream compressedByteStream; SkFlate::Deflate(streamData2.get(), &compressedByteStream); SkAutoDataUnref compressedData(compressedByteStream.copyToData()); // Check first without compression. SkDynamicMemoryWStream expectedResult1; expectedResult1.writeText("<</Length 167\n>> stream\n"); expectedResult1.writeText(streamBytes2); expectedResult1.writeText("\nendstream"); SkAutoDataUnref expectedResultData1(expectedResult1.copyToData()); CheckObjectOutput(reporter, stream.get(), (const char*) expectedResultData1->data(), expectedResultData1->size(), true, false); // Then again with compression. SkDynamicMemoryWStream expectedResult2; expectedResult2.writeText("<</Filter /FlateDecode\n/Length 116\n" ">> stream\n"); expectedResult2.write(compressedData->data(), compressedData->size()); expectedResult2.writeText("\nendstream"); SkAutoDataUnref expectedResultData2(expectedResult2.copyToData()); CheckObjectOutput(reporter, stream.get(), (const char*) expectedResultData2->data(), expectedResultData2->size(), true, true); } }
bool Vocabulary::load( const QString& filename ) { QFile dataFile( filename ); if( !dataFile.open( QIODevice::ReadOnly ) ) return( false ); QByteArray compressedData( dataFile.readAll() ); QByteArray data( qUncompress( compressedData ) ); QDataStream in( data ); qint32 tempMagicNumber; qint16 tempVersion; Vocabulary tempVocab; in >> tempMagicNumber >> tempVersion; if( tempMagicNumber != Vocabulary::magicNumber ) { cerr << "Wrong magic number: Incompatible vocabulary data file." << endl; return( false ); } if( tempVersion > 0x0010 ) { cerr << "Vocabulary data file is from a more recent version. Upgrade toMOTko." << endl; return( false ); } in.setVersion( QDataStream::Qt_2_1 ); in >> tempVocab; dataFile.close(); id = tempVocab.getId(); markedForStudy = tempVocab.isMarkedForStudy(); title = tempVocab.getTitle(); description = tempVocab.getDescription(); author = tempVocab.getAuthor(); creationDate = tempVocab.getCreationDate(); modificationDate = tempVocab.getModificationDate(); dirty = tempVocab.isDirty(); for( TermMap::ConstIterator it = tempVocab.begin(); it != tempVocab.end(); it++ ) { const Term& term = *it; addTerm( term ); } return( true ); }
static void TestPDFStream(skiatest::Reporter* reporter) { char streamBytes[] = "Test\nFoo\tBar"; SkAutoTDelete<SkMemoryStream> streamData(new SkMemoryStream( streamBytes, strlen(streamBytes), true)); SkAutoTUnref<SkPDFStream> stream(new SkPDFStream(streamData.get())); ASSERT_EMIT_EQ(reporter, *stream, "<</Length 12>> stream\nTest\nFoo\tBar\nendstream"); stream->insertInt("Attribute", 42); ASSERT_EMIT_EQ(reporter, *stream, "<</Length 12\n/Attribute 42>> stream\n" "Test\nFoo\tBar\nendstream"); { char streamBytes2[] = "This is a longer string, so that compression " "can do something with it. With shorter strings, " "the short circuit logic cuts in and we end up " "with an uncompressed string."; SkAutoDataUnref streamData2(SkData::NewWithCopy(streamBytes2, strlen(streamBytes2))); SkAutoTUnref<SkPDFStream> stream(new SkPDFStream(streamData2.get())); SkDynamicMemoryWStream compressedByteStream; SkFlate::Deflate(streamData2.get(), &compressedByteStream); SkAutoDataUnref compressedData(compressedByteStream.copyToData()); SkDynamicMemoryWStream expected; expected.writeText("<</Filter /FlateDecode\n/Length 116>> stream\n"); expected.write(compressedData->data(), compressedData->size()); expected.writeText("\nendstream"); SkAutoDataUnref expectedResultData2(expected.copyToData()); SkString result = emit_to_string(*stream); ASSERT_EQL(reporter, result, (const char*)expectedResultData2->data(), expectedResultData2->size()); } }
bool DecompressLZOCompressedPackage(UPKReader *Package) { if (!Package->IsCompressed()) { _LogError("Package is not compressed!", "DecompressLZO"); return false; } if (!Package->IsLZOCompressed() && !Package->IsFullyCompressed()) { _LogError("Cannot decompress non-LZO compressed packages!", "DecompressLZO"); return false; } /// init lzo library int lzo_err; lzo_uint in_len; lzo_uint out_len; lzo_uint new_len; if (lzo_init() != LZO_E_OK) { _LogError("LZO library internal error: lzo_init() failed!", "DecompressLZO"); return false; } lzo_memset(in, 0, IN_LEN); std::stringstream decompressed_stream; unsigned int NumCompressedChunks = Package->Summary.NumCompressedChunks; if (Package->IsFullyCompressed()) { NumCompressedChunks = 1; } else { _LogDebug("Resetting package compression flags...", "DecompressLZO"); /// reset compression flags Package->Summary.CompressionFlags = 0; Package->Summary.PackageFlags ^= (uint32_t)UPackageFlags::Compressed; Package->Summary.NumCompressedChunks = 0; /// serialize package summary std::vector<char> sVect = Package->SerializeSummary(); decompressed_stream.write(sVect.data(), sVect.size()); } _LogDebug("Decompressing...", "DecompressLZO"); for (unsigned int i = 0; i < NumCompressedChunks; ++i) { if (Package->IsFullyCompressed()) { Package->UPKStream.seekg(0); } else { Package->UPKStream.seekg(Package->Summary.CompressedChunks[i].CompressedOffset); } _LogDebug("Decompressing chunk #" + ToString(i), "DecompressLZO"); uint32_t tag = 0; Package->UPKStream.read(reinterpret_cast<char*>(&tag), 4); if (tag != 0x9E2A83C1) { _LogError("Missing 0x9E2A83C1 signature!", "DecompressLZO"); return false; } uint32_t blockSize = 0; Package->UPKStream.read(reinterpret_cast<char*>(&blockSize), 4); if (blockSize != IN_LEN) { _LogError("Incorrect max block size!", "DecompressLZO"); return false; } std::vector<uint32_t> sizes(2); /// compressed/uncompressed pairs Package->UPKStream.read(reinterpret_cast<char*>(sizes.data()), 4 * sizes.size()); size_t dataSize = sizes[1]; /// uncompressed data chunk size unsigned numBlocks = (dataSize + blockSize - 1) / blockSize; _LogDebug("numBlocks = " + ToString(numBlocks), "DecompressLZO"); if (numBlocks < 1) { _LogError("Bad data!", "DecompressLZO"); return false; } sizes.resize((numBlocks + 1)*2); Package->UPKStream.read(reinterpret_cast<char*>(sizes.data()) + 8, 4 * sizes.size() - 8); for (unsigned i = 0; i <= numBlocks; ++i) { _LogDebug("Compressed size = " + ToString(sizes[i * 2]) + + "\tUncompressed size = " + ToString(sizes[i * 2 + 1]), "DecompressLZO"); } std::vector<unsigned char> dataChunk(dataSize); std::vector<unsigned char> compressedData(sizes[0]); Package->UPKStream.read(reinterpret_cast<char*>(compressedData.data()), compressedData.size()); size_t blockOffset = 0; size_t dataOffset = 0; for (unsigned i = 1; i <= numBlocks; ++i) { out_len = sizes[i * 2]; /// compressed size lzo_memcpy(out, compressedData.data() + blockOffset, out_len); in_len = sizes[i * 2 + 1]; /// uncompressed size new_len = in_len; lzo_err = lzo1x_decompress(out, out_len, in, &new_len, NULL); if (lzo_err == LZO_E_OK && new_len == in_len) { _LogDebug("Decompressed " + ToString(out_len) + " bytes back into " + ToString(in_len), "DecompressLZO"); } else { _LogError("LZO library internal error: decompression failed!", "DecompressLZO"); return false; } lzo_memcpy(dataChunk.data() + dataOffset, in, in_len); blockOffset += out_len; dataOffset += in_len; } decompressed_stream.write(reinterpret_cast<char*>(dataChunk.data()), dataSize); } _LogDebug("Package decompressed successfully.", "DecompressLZO"); Package->UPKStream.str(decompressed_stream.str()); return Package->ReadPackageHeader(); }
uint32_t TIGER::decodeCDRM(iostream *dataStream, uint32_t &size, string &path, string &name) { bool changes = false; CDRM_Header table; dataStream->read((char*)&table, sizeof(CDRM_Header)); if (table.magic != 0x4D524443) exit(-1); vector<CDRM_BlockHeader> BlockHeader(table.count); dataStream->read((char*)BlockHeader.data(), table.count*sizeof(CDRM_BlockHeader)); size = 0; uint32_t total_size = 0; uint32_t offset = 0; for (uint32_t i = 0; i < BlockHeader.size(); i++) //Calculate total size. { size += BlockHeader[i].uncompressedSize; } total_size = size; auto_ptr<char> uncompressedData(new char[size]); for (uint32_t i = 0; i < BlockHeader.size(); i++) { uint32_t pos = dataStream->tellg(); pos = ((pos + 15) / 16) * 16; dataStream->seekg(pos); //Data is 16byte aligned. dataStream->seekp(pos); //Data is 16byte aligned. if (BlockHeader[i].blockType == 1) { dataStream->read(uncompressedData.get() + offset, BlockHeader[i].uncompressedSize); offset += BlockHeader[i].uncompressedSize; } else if (BlockHeader[i].blockType == 2) { auto_ptr<char> compressedData(new char[BlockHeader[i].compressedSize]); dataStream->read(compressedData.get(), BlockHeader[i].compressedSize); int ret = Z_OK; uLong uncompressSize = size - offset; uint8_t *dataPtr = (uint8_t*)uncompressedData.get() + offset; ret = uncompress(dataPtr, &uncompressSize, (Bytef*)compressedData.get(), BlockHeader[i].compressedSize); offset += BlockHeader[i].uncompressedSize; if ((ret != Z_OK) && (ret != Z_STREAM_END)) { exit(ret); } } } CDRM_BlockFooter footer; dataStream->seekg(((((uint64_t)dataStream->tellg()) + 15) / 16) * 16); //Data is 16byte aligned. dataStream->read((char*)&footer, sizeof(footer)); switch (((uint32_t*)(uncompressedData.get()))[0]) { case '9DCP': //PCD9 { uncompressedData = decodePCD9(uncompressedData, size, path, name); changes = true; } break; case 0x00000006: { if ((currentMode == UNPACK)) { try { string fullpath = path + "\\" + name + ".mesh"; cout << "Writing \"" << fullpath << "\"\n"; //Scene scene(uncompressedData.get() , size); fstream output(fullpath, ios_base::binary | ios_base::out); if (!output.is_open()) exit(errno); output.write(uncompressedData.get(), size); output.close(); } catch (exception e) { cout << e.what(); } } //return -1; } default: { if (writeDRM && (currentMode == UNPACK)) { string fullpath = path + "\\" + name + ".drm"; cout << "Writing \"" << fullpath << "\"\n"; fstream output(fullpath, ios_base::binary | ios_base::out); if (!output.is_open()) exit(errno); output.write(uncompressedData.get(), size); output.close(); } } } if (currentMode == PACK /*&& changes*/) { if (total_size != size) { cout << "Incorrect size\n"; exit(-1); } /*Find next free CDRM*/ tigerFiles[4]->seekg(0); tigerFiles[4]->seekp(0); uint32_t blockheaderpos = 0; CDRM_Header i; while (true) { uint32_t size = 0; i.load(tigerFiles[4]); if (i.magic == 0) break; size += i.count*sizeof(CDRM_BlockHeader); size = ((size + 15) / 16) * 16; CDRM_BlockHeader j; for (int k = 0;k < i.count;k++) { j.load(tigerFiles[4]); size += j.compressedSize; } size = (((size + 0x800 - 1) / 0x800) * 0x800); blockheaderpos += size; size -= sizeof(CDRM_Header); tigerFiles[4]->seekg(size, ios_base::cur); tigerFiles[4]->seekp(size, ios_base::cur); } tigerFiles[4]->seekg(blockheaderpos); tigerFiles[4]->seekp(blockheaderpos); tigerFiles[4]->write((char*)&table, sizeof(CDRM_Header)); blockheaderpos = tigerFiles[4]->tellp(); tigerFiles[4]->write((char*)BlockHeader.data(), sizeof(CDRM_BlockHeader)*BlockHeader.size()); tigerFiles[4]->seekp(((((uint64_t)tigerFiles[4]->tellp()) + 15) / 16) * 16); //Data is 16byte aligned. for (uint32_t i = 0; i < BlockHeader.size(); i++) { BlockHeader[i].uncompressedSize = size; if (BlockHeader[i].blockType == 1) { BlockHeader[i].compressedSize = BlockHeader[i].uncompressedSize; tigerFiles[4]->write(uncompressedData.get(), BlockHeader[i].uncompressedSize); } else if (BlockHeader[i].blockType == 2) { auto_ptr<char> compressedData(new char[BlockHeader[i].uncompressedSize]); int ret = Z_OK; BlockHeader[i].compressedSize = BlockHeader[i].uncompressedSize; ret = compress2((Bytef*)compressedData.get(), (uLongf*)(&BlockHeader[i].compressedSize), (Bytef*)uncompressedData.get(), BlockHeader[i].uncompressedSize, 4); if ((ret != Z_OK) && (ret != Z_STREAM_END)) { cout << "Error Compressing\n"; exit(ret); } tigerFiles[4]->write(compressedData.get(), BlockHeader[i].compressedSize); } } uint32_t cdrm_footer = tigerFiles[4]->tellp(); cdrm_footer = ((cdrm_footer + 15) / 16) * 16; footer.relative_offset = 0x800 - (cdrm_footer % 0x800); tigerFiles[4]->seekp(cdrm_footer); //Data is 16byte aligned. tigerFiles[4]->write((char*)&footer, sizeof(CDRM_BlockFooter)); tigerFiles[4]->seekp(blockheaderpos); tigerFiles[4]->write((char*)BlockHeader.data(), sizeof(CDRM_BlockHeader)*BlockHeader.size()); tigerFiles[4]->flush(); return (blockheaderpos & 0xFFFFFF00) | 4; } return -1; }
static void TestFlate(skiatest::Reporter* reporter, SkMemoryStream* testStream, size_t dataSize) { SkASSERT(testStream != NULL); SkAutoDataUnref testData(new_test_data(dataSize)); SkASSERT(testData->size() == dataSize); testStream->setMemory(testData->data(), dataSize, /*copyData=*/ true); SkDynamicMemoryWStream compressed; bool deflateSuccess = SkFlate::Deflate(testStream, &compressed); REPORTER_ASSERT(reporter, deflateSuccess); // Check that the input data wasn't changed. size_t inputSize = testStream->getLength(); if (inputSize == 0) { inputSize = testStream->read(NULL, SkZeroSizeMemStream::kGetSizeKey); } REPORTER_ASSERT(reporter, dataSize == inputSize); if (dataSize == inputSize) { REPORTER_ASSERT(reporter, memcmp(testData->data(), testStream->getMemoryBase(), dataSize) == 0); } size_t compressedSize = compressed.getOffset(); SkAutoDataUnref compressedData(compressed.copyToData()); testStream->setData(compressedData.get()); SkDynamicMemoryWStream uncompressed; bool inflateSuccess = SkFlate::Inflate(testStream, &uncompressed); REPORTER_ASSERT(reporter, inflateSuccess); // Check that the input data wasn't changed. inputSize = testStream->getLength(); if (inputSize == 0) { inputSize = testStream->read(NULL, SkZeroSizeMemStream::kGetSizeKey); } REPORTER_ASSERT(reporter, compressedSize == inputSize); if (compressedData->size() == inputSize) { REPORTER_ASSERT(reporter, memcmp(testStream->getMemoryBase(), compressedData->data(), compressedData->size()) == 0); } // Check that the uncompressed data matches the source data. SkAutoDataUnref uncompressedData(uncompressed.copyToData()); REPORTER_ASSERT(reporter, dataSize == uncompressedData->size()); if (dataSize == uncompressedData->size()) { REPORTER_ASSERT(reporter, memcmp(testData->data(), uncompressedData->data(), dataSize) == 0); } if (compressedSize < 1) { return; } double compressionRatio = static_cast<double>(dataSize) / compressedSize; // Assert that some compression took place. REPORTER_ASSERT(reporter, compressionRatio > 1.2); if (reporter->verbose()) { SkDebugf("Flate Test: \t input size: " SK_SIZE_T_SPECIFIER "\tcompressed size: " SK_SIZE_T_SPECIFIER "\tratio: %.4g\n", dataSize, compressedSize, compressionRatio); } }
// CompressHelper //------------------------------------------------------------------------------ void TestCompressor::CompressHelper( const char * fileName ) const { // read some test data into a file AutoPtr< void > data; size_t dataSize; { FileStream fs; TEST_ASSERT( fs.Open( fileName ) ); dataSize = (size_t)fs.GetFileSize(); data = (char *)ALLOC( dataSize ); TEST_ASSERT( (uint32_t)fs.Read( data.Get(), dataSize ) == dataSize ); } OUTPUT( "File : %s\n", fileName ); OUTPUT( "Size : %u\n", (uint32_t)dataSize ); // compress the data to obtain size Compressor comp; comp.Compress( data.Get(), dataSize ); size_t compressedSize = comp.GetResultSize(); AutoPtr< char > compressedData( (char *)ALLOC( compressedSize ) ); memcpy( compressedData.Get(), comp.GetResult(), compressedSize ); float compressedPerc = ( (float)compressedSize / (float)dataSize ) * 100.0f; OUTPUT( "Compressed Size: %u (%2.1f%% of original)\n", (uint32_t)compressedSize, compressedPerc ); // decompress to check we get original data back Compressor decomp; decomp.Decompress( compressedData.Get() ); size_t uncompressedSize = decomp.GetResultSize(); TEST_ASSERT( uncompressedSize == dataSize ); for ( size_t i=0; i<uncompressedSize; ++i ) { TEST_ASSERT( ( (char *)data.Get() )[ i ] == ( (char *)decomp.GetResult() )[ i ] ); } // speed checks //-------------- const size_t NUM_REPEATS( 100 ); // compress the data several times to get more stable throughput value Timer t; for ( size_t i=0; i<NUM_REPEATS; ++i ) { Compressor c; c.Compress( data.Get(), dataSize ); TEST_ASSERT( c.GetResultSize() == compressedSize ); } float compressTimeTaken = t.GetElapsed(); double compressThroughputMBs = ( ( (double)dataSize / 1024.0 * (double)NUM_REPEATS ) / compressTimeTaken ) / 1024.0; OUTPUT( " Comp Speed: %2.1f MB/s - %2.3fs (%u repeats)\n", (float)compressThroughputMBs, compressTimeTaken, NUM_REPEATS ); // decompress the data Timer t2; for ( size_t i=0; i<NUM_REPEATS; ++i ) { Compressor d; d.Decompress( compressedData.Get() ); TEST_ASSERT( d.GetResultSize() == dataSize ); } float decompressTimeTaken = t2.GetElapsed(); double decompressThroughputMBs = ( ( (double)dataSize / 1024.0 * (double)NUM_REPEATS ) / decompressTimeTaken ) / 1024.0; OUTPUT( " Decomp Speed: %2.1f MB/s - %2.3fs (%u repeats)\n", (float)decompressThroughputMBs, decompressTimeTaken, NUM_REPEATS ); // time memcpy to compare with Timer t0; for ( size_t i=0; i<NUM_REPEATS; ++i ) { char * mem = (char *)ALLOC( dataSize ); memcpy( mem, data.Get(), dataSize ); FREE( mem ); } float memcpyTimeTaken = t0.GetElapsed(); double memcpyThroughputMBs = ( ( (double)dataSize / 1024.0 * (double)NUM_REPEATS ) / memcpyTimeTaken ) / 1024.0; OUTPUT( " MemCpy Speed: %2.1f MB/s - %2.3fs (%u repeats)\n", (float)memcpyThroughputMBs, memcpyTimeTaken, NUM_REPEATS ); }
void encodeOpen3DGCMesh(shared_ptr <GLTFMesh> mesh, shared_ptr<JSONObject> floatAttributeIndexMapping, const GLTFConverterContext& converterContext) { o3dgc::SC3DMCEncodeParams params; o3dgc::IndexedFaceSet <unsigned short> ifs; //setup options int qcoord = 12; int qtexCoord = 10; int qnormal = 10; int qcolor = 10; int qWeights = 8; GLTFOutputStream *outputStream = converterContext._compressionOutputStream; size_t bufferOffset = outputStream->length(); O3DGCSC3DMCPredictionMode floatAttributePrediction = O3DGC_SC3DMC_PARALLELOGRAM_PREDICTION; unsigned int nFloatAttributes = 0; PrimitiveVector primitives = mesh->getPrimitives(); unsigned int primitivesCount = (unsigned int)primitives.size(); unsigned int allIndicesCount = 0; unsigned int allTrianglesCount = 0; std::vector <unsigned int> trianglesPerPrimitive; //First run through primitives to gather the number of indices and infer the number of triangles. for (unsigned int i = 0 ; i < primitivesCount ; i++) { shared_ptr<GLTF::GLTFPrimitive> primitive = primitives[i]; shared_ptr <GLTF::GLTFIndices> uniqueIndices = primitive->getUniqueIndices(); unsigned int indicesCount = (unsigned int)(uniqueIndices->getCount()); //FIXME: assumes triangles, but we are guarded from issues by canEncodeOpen3DGCMesh allIndicesCount += indicesCount; trianglesPerPrimitive.push_back(indicesCount / 3); } //Then we setup the matIDs array and at the same time concatenate all triangle indices unsigned long *primitiveIDs = (unsigned long*)malloc(sizeof(unsigned long) * (allIndicesCount / 3)); unsigned long *primitiveIDsPtr = primitiveIDs; unsigned short* allConcatenatedIndices = (unsigned short*)malloc(allIndicesCount * sizeof(unsigned short)); unsigned short* allConcatenatedIndicesPtr = allConcatenatedIndices; for (unsigned int i = 0 ; i < trianglesPerPrimitive.size() ; i++) { unsigned int trianglesCount = trianglesPerPrimitive[i]; for (unsigned int j = 0 ; j < trianglesCount ; j++) { primitiveIDsPtr[j] = i; } primitiveIDsPtr += trianglesCount; allTrianglesCount += trianglesCount; shared_ptr<GLTF::GLTFPrimitive> primitive = primitives[i]; shared_ptr <GLTF::GLTFIndices> uniqueIndices = primitive->getUniqueIndices(); unsigned int indicesCount = (unsigned int)(uniqueIndices->getCount()); unsigned int* indicesPtr = (unsigned int*)uniqueIndices->getBufferView()->getBufferDataByApplyingOffset(); for (unsigned int j = 0 ; j < indicesCount ; j++) { allConcatenatedIndicesPtr[j] = indicesPtr[j]; } allConcatenatedIndicesPtr += indicesCount; } //FIXME:Open3DGC SetNCoordIndex is not a good name here (file against o3dgc) ifs.SetNCoordIndex(allTrianglesCount); ifs.SetCoordIndex((unsigned short * const ) allConcatenatedIndices); ifs.SetIndexBufferID(primitiveIDs); size_t vertexCount = 0; std::vector <GLTF::Semantic> semantics = mesh->allSemantics(); for (unsigned int i = 0 ; i < semantics.size() ; i ++) { GLTF::Semantic semantic = semantics[i]; size_t attributesCount = mesh->getMeshAttributesCountForSemantic(semantic); for (size_t j = 0 ; j < attributesCount ; j++) { shared_ptr <GLTFMeshAttribute> meshAttribute = mesh->getMeshAttribute(semantic, j); vertexCount = meshAttribute->getCount(); size_t componentsPerAttribute = meshAttribute->getComponentsPerAttribute(); char *buffer = (char*)meshAttribute->getBufferView()->getBufferDataByApplyingOffset(); switch (semantic) { case POSITION: params.SetCoordQuantBits(qcoord); params.SetCoordPredMode(floatAttributePrediction); ifs.SetNCoord(vertexCount); ifs.SetCoord((Real * const)buffer); break; case NORMAL: params.SetNormalQuantBits(qnormal); params.SetNormalPredMode(O3DGC_SC3DMC_SURF_NORMALS_PREDICTION); ifs.SetNNormal(vertexCount); ifs.SetNormal((Real * const)buffer); break; case TEXCOORD: params.SetFloatAttributeQuantBits(nFloatAttributes, qtexCoord); params.SetFloatAttributePredMode(nFloatAttributes, floatAttributePrediction); ifs.SetNFloatAttribute(nFloatAttributes, vertexCount); ifs.SetFloatAttributeDim(nFloatAttributes, componentsPerAttribute); ifs.SetFloatAttributeType(nFloatAttributes, O3DGC_IFS_FLOAT_ATTRIBUTE_TYPE_TEXCOORD); ifs.SetFloatAttribute(nFloatAttributes, (Real * const)buffer); floatAttributeIndexMapping->setUnsignedInt32(meshAttribute->getID(), nFloatAttributes); nFloatAttributes++; break; case COLOR: params.SetFloatAttributeQuantBits(nFloatAttributes, qcolor); params.SetFloatAttributePredMode(nFloatAttributes, floatAttributePrediction); ifs.SetNFloatAttribute(nFloatAttributes, vertexCount); ifs.SetFloatAttributeDim(nFloatAttributes, componentsPerAttribute); ifs.SetFloatAttributeType(nFloatAttributes, O3DGC_IFS_FLOAT_ATTRIBUTE_TYPE_COLOR); ifs.SetFloatAttribute(nFloatAttributes, (Real * const)buffer); floatAttributeIndexMapping->setUnsignedInt32(meshAttribute->getID(), nFloatAttributes); nFloatAttributes++; break; case WEIGHT: params.SetFloatAttributeQuantBits(nFloatAttributes, qWeights); params.SetFloatAttributePredMode(nFloatAttributes, O3DGC_SC3DMC_DIFFERENTIAL_PREDICTION); ifs.SetNFloatAttribute(nFloatAttributes, vertexCount); ifs.SetFloatAttributeDim(nFloatAttributes, componentsPerAttribute); ifs.SetFloatAttributeType(nFloatAttributes, O3DGC_IFS_FLOAT_ATTRIBUTE_TYPE_WEIGHT); ifs.SetFloatAttribute(nFloatAttributes, (Real * const)buffer); floatAttributeIndexMapping->setUnsignedInt32(meshAttribute->getID(), nFloatAttributes); nFloatAttributes++; break; case JOINT: /* params.SetIntAttributePredMode(nIntAttributes, O3DGC_SC3DMC_DIFFERENTIAL_PREDICTION); ifs.SetNIntAttribute(nIntAttributes, jointIDs.size() / numJointsPerVertex); ifs.SetIntAttributeDim(nIntAttributes, numJointsPerVertex); ifs.SetIntAttributeType(nIntAttributes, O3DGC_IFS_INT_ATTRIBUTE_TYPE_JOINT_ID); ifs.SetIntAttribute(nIntAttributes, (long * const ) & (jointIDs[0])); nIntAttributes++; */ params.SetFloatAttributeQuantBits(nFloatAttributes, 10); params.SetFloatAttributePredMode(nFloatAttributes, O3DGC_SC3DMC_PARALLELOGRAM_PREDICTION); ifs.SetNFloatAttribute(nFloatAttributes, vertexCount); ifs.SetFloatAttributeDim(nFloatAttributes, componentsPerAttribute); ifs.SetFloatAttributeType(nFloatAttributes, O3DGC_IFS_FLOAT_ATTRIBUTE_TYPE_UNKOWN); ifs.SetFloatAttribute(nFloatAttributes, (Real * const)buffer); floatAttributeIndexMapping->setUnsignedInt32(meshAttribute->getID(), nFloatAttributes); nFloatAttributes++; break; default: break; } } } params.SetNumFloatAttributes(nFloatAttributes); ifs.SetNumFloatAttributes(nFloatAttributes); shared_ptr<JSONObject> compressionObject = static_pointer_cast<JSONObject>(mesh->getExtensions()->createObjectIfNeeded("Open3DGC-compression")); ifs.ComputeMinMax(O3DGC_SC3DMC_MAX_ALL_DIMS); BinaryStream bstream(vertexCount * 8); SC3DMCEncoder <unsigned short> encoder; shared_ptr<JSONObject> compressedData(new JSONObject()); compressedData->setInt32("verticesCount", vertexCount); compressedData->setInt32("indicesCount", allIndicesCount); //Open3DGC binary is disabled params.SetStreamType(converterContext.compressionMode == "binary" ? O3DGC_STREAM_TYPE_BINARY : O3DGC_STREAM_TYPE_ASCII); #if DUMP_O3DGC_OUTPUT static int dumpedId = 0; COLLADABU::URI outputURI(converterContext.outputFilePath.c_str()); std::string outputFilePath = outputURI.getPathDir() + GLTFUtils::toString(dumpedId) + ".txt"; dumpedId++; SaveIFS(outputFilePath, ifs); #endif encoder.Encode(params, ifs, bstream); compressedData->setString("mode", converterContext.compressionMode); compressedData->setUnsignedInt32("count", bstream.GetSize()); compressedData->setUnsignedInt32("type", converterContext.profile->getGLenumForString("UNSIGNED_BYTE")); compressedData->setUnsignedInt32("byteOffset", bufferOffset); compressedData->setValue("floatAttributesIndexes", floatAttributeIndexMapping); compressionObject->setValue("compressedData", compressedData); //testDecode(mesh, bstream); outputStream->write((const char*)bstream.GetBuffer(0), bstream.GetSize()); if (ifs.GetCoordIndex()) { free(ifs.GetCoordIndex()); } if (primitiveIDs) { free(primitiveIDs); } }