/*static*/ KValueRef Codec::ExtractZipAsync(const ValueList& args) { std::string zipFile = args.GetString(0); std::string directory = args.GetString(1); AutoPtr<AsyncJob> job = args.GetObject(2).cast<AsyncJob>(); KMethodRef callback = 0; if (args.size() > 3) { callback = args.GetMethod(3); } std::ifstream stream(UTF8ToSystem(zipFile).c_str(), std::ios::binary); Poco::Zip::Decompress decompressor(stream, directory); try { decompressor.decompressAllFiles(); } catch (std::exception& e) { Logger::Get("Codec")->Error("exception decompressing: %s", e.what()); throw ValueException::FromFormat("Exception during extraction: %s", e.what()); } stream.close(); if (!callback.isNull()) { ValueList args; args.push_back(Value::NewString(directory)); RunOnMainThread(callback, args, true); } return Value::Undefined; }
const shared_ptr<std::string> ZCompressedFileImage::stringData() const { shared_ptr<ZLInputStream> stream = ZLFile(myPath).inputStream(); shared_ptr<std::string> imageData = new std::string(); if (!stream.isNull() && stream->open()) { stream->seek(myOffset, false); ZLZDecompressor decompressor(myCompressedSize); static const size_t charBufferSize = 2048; char *charBuffer = new char[charBufferSize]; std::vector<std::string> buffer; size_t s; do { s = decompressor.decompress(*stream, charBuffer, charBufferSize); if (s != 0) { buffer.push_back(std::string()); buffer.back().append(charBuffer, s); } } while (s == charBufferSize); ZLStringUtil::append(*imageData, buffer); delete[] charBuffer; } return imageData; }
int main(int argc, char **argv) { if (!ReadParameter(argc, argv)) { std::cout << "Bad Parameters.\n"; return 1; } SetConfig(); if (compress) { // Compress db_compress::Compressor compressor(outputFileName, schema, config); int iter_cnt = 0; while (1) { std::cout << "Iteration " << ++iter_cnt << " Starts\n"; std::ifstream inFile(inputFileName); std::string str; int tuple_cnt = 0; while (std::getline(inFile,str)) { std::stringstream sstream(str); std::string item; db_compress::Tuple tuple(schema.attr_type.size()); db_compress::IntegerAttrValue attr(++tuple_cnt); tuple.attr[0] = &attr; size_t count = 0; std::vector< std::unique_ptr<ColorAttr> > vec; while (std::getline(sstream, item, ' ')) { if (++ count > 1) AppendAttr(std::stod(item), &tuple, count - 1); } // The first item is tuple id if (count != schema.attr_type.size()) { std::cerr << "File Format Error!\n"; } compressor.ReadTuple(tuple); if (!compressor.RequireFullPass() && tuple_cnt >= NonFullPassStopPoint) { break; } } compressor.EndOfData(); if (!compressor.RequireMoreIterations()) break; } } else { // Decompress db_compress::Decompressor decompressor(inputFileName, schema); std::ofstream outFile(outputFileName); decompressor.Init(); while (decompressor.HasNext()) { db_compress::Tuple tuple(33); decompressor.ReadNextTuple(&tuple); for (size_t i = 0; i < schema.attr_type.size(); ++i) { double attr = ExtractAttr(&tuple, i); outFile << attr << (i == schema.attr_type.size() - 1 ? '\n' : ' '); } } } return 0; }
extern "C" int iris_uo_huffman(char *in, int insz, char *out, int outsz) { uo_DecompressingCopier decompressor; int in_size = insz, out_size = outsz; decompressor.initialise(); decompressor (out, in, in_size, out_size); return(out_size); }
void ZLZipHeader::skipEntry(ZLInputStream &stream, const ZLZipHeader &header) { if (header.Flags & 0x08) { stream.seek(header.ExtraLength); ZLZDecompressor decompressor((size_t)-1); while (decompressor.decompress(stream, 0, 2048) == 2048) { } stream.seek(16); } else { stream.seek(header.ExtraLength + header.CompressedSize); } }
TEST(Decompressor, GZip) { StringWriter* writer = new StringWriter; GZipCompressor compressor(writer); EXPECT_LT(0, compressor.WriteString(IPSUM)); EXPECT_LT(writer->GetBuffer().size(), compressor.totalBytesIn()); StringWriter* outputWriter = new StringWriter; GZipDecompressor decompressor(outputWriter); EXPECT_EQ(decompressor.WriteString(writer->GetBuffer()), IPSUM.size()); ASSERT_EQ(compressor.totalBytesIn(), outputWriter->GetBuffer().size()); for (int i = 0; i < outputWriter->GetBuffer().size(); ++i) { EXPECT_EQ(IPSUM.data()[i], outputWriter->GetBuffer()[i]); }; };
status_t PackageReader::_ReadCompressedBuffer(off_t offset, void* buffer, size_t compressedSize, size_t uncompressedSize, uint32 compression) { switch (compression) { case B_HPKG_COMPRESSION_NONE: return _ReadBuffer(offset, buffer, compressedSize); case B_HPKG_COMPRESSION_ZLIB: { // init the decompressor BufferDataOutput bufferOutput(buffer, uncompressedSize); ZlibDecompressor decompressor(&bufferOutput); status_t error = decompressor.Init(); if (error != B_OK) return error; while (compressedSize > 0) { // read compressed buffer size_t toRead = std::min(compressedSize, fScratchBufferSize); error = _ReadBuffer(offset, fScratchBuffer, toRead); if (error != B_OK) return error; // uncompress error = decompressor.DecompressNext(fScratchBuffer, toRead); if (error != B_OK) return error; compressedSize -= toRead; offset += toRead; } error = decompressor.Finish(); if (error != B_OK) return error; // verify that all data have been read if (bufferOutput.BytesWritten() != uncompressedSize) { fErrorOutput->PrintError("Error: Missing bytes in uncompressed " "buffer!\n"); return B_BAD_DATA; } return B_OK; } default: return B_BAD_DATA; } }
Cell::PooledStack LazPerfStorage::read( const arbiter::Endpoint& endpoint, PointPool& pool, const Id& id) const { auto compressed(io::ensureGet(endpoint, m_metadata.basename(id))); const Tail tail(*compressed, m_tailFields); const Schema& schema(pool.schema()); const std::size_t pointSize(schema.pointSize()); const std::size_t numPoints(tail.numPoints()); const std::size_t numBytes(compressed->size() + tail.size()); BinaryPointTable table(schema); pdal::PointRef pointRef(table, 0); if (id >= m_metadata.structure().coldIndexBegin() && !numPoints) { throw std::runtime_error("Invalid lazperf chunk - no numPoints"); } if (tail.numBytes() && tail.numBytes() != numBytes) { std::cout << tail.numBytes() << " != " << numBytes << std::endl; throw std::runtime_error("Invalid lazperf chunk numBytes"); } Data::PooledStack dataStack(pool.dataPool().acquire(numPoints)); Cell::PooledStack cellStack(pool.cellPool().acquire(numPoints)); DecompressionStream stream(*compressed); pdal::LazPerfDecompressor<DecompressionStream> decompressor( stream, schema.pdalLayout().dimTypes()); for (Cell& cell : cellStack) { Data::PooledNode dataNode(dataStack.popOne()); table.setPoint(*dataNode); decompressor.decompress(*dataNode, pointSize); cell.set(pointRef, std::move(dataNode)); } assert(dataStack.empty()); return cellStack; }
std::unique_ptr<std::vector<char>> Compression::decompress( const std::vector<char>& data, const Schema& schema, const std::size_t numPoints) { const std::size_t decompressedSize(numPoints * schema.pointSize()); DecompressionStream decompressionStream(data); pdal::LazPerfDecompressor<DecompressionStream> decompressor( decompressionStream, schema.pdalLayout().dimTypes()); std::unique_ptr<std::vector<char>> decompressed( new std::vector<char>(decompressedSize)); decompressor.decompress(decompressed->data(), decompressed->size()); return decompressed; }
void ZLZipHeader::skipEntry(ZLInputStream &stream, ZLZipHeader &header) { switch (header.Signature) { default: break; case SignatureLocalFile: if ((header.Flags & 0x08) == 0x08 && header.CompressionMethod != 0) { stream.seek(header.ExtraLength, false); ZLZDecompressor decompressor((std::size_t)-1); std::size_t size; do { size = decompressor.decompress(stream, 0, 2048); header.UncompressedSize += size; } while (size == 2048); //stream.seek(16, false); } else { stream.seek(header.ExtraLength + header.CompressedSize, false); } break; } }
std::unique_ptr<std::vector<char>> Compression::decompress( const std::vector<char>& data, const Schema& nativeSchema, const Schema* const wantedSchema, const std::size_t numPoints) { if (!wantedSchema || *wantedSchema == nativeSchema) { return decompress(data, nativeSchema, numPoints); } // Get decompressor in the native schema. DecompressionStream decompressionStream(data); pdal::LazPerfDecompressor<DecompressionStream> decompressor( decompressionStream, nativeSchema.pdalLayout().dimTypes()); // Allocate room for a single point in the native schema. std::vector<char> nativePoint(nativeSchema.pointSize()); BinaryPointTable table(nativeSchema, nativePoint.data()); pdal::PointRef pointRef(table, 0); // Get our result space, in the desired schema, ready. std::unique_ptr<std::vector<char>> decompressed( new std::vector<char>(numPoints * wantedSchema->pointSize(), 0)); char* pos(decompressed->data()); const char* end(pos + decompressed->size()); while (pos < end) { decompressor.decompress(nativePoint.data(), nativePoint.size()); for (const auto& d : wantedSchema->dims()) { pointRef.getField(pos, d.id(), d.type()); pos += d.size(); } } return decompressed; }
void read_compressed(config &cfg, std::istream &file, abstract_validator * validator) { //an empty gzip file seems to confuse boost on msvc //so return early if this is the case if (file.peek() == EOF) { return; } boost::iostreams::filtering_stream<boost::iostreams::input> filter; filter.push(decompressor()); filter.push(file); // This causes especially gzip_error (and the corresponding bz2 error), std::ios_base::failure to be thrown here. // save_index_class::data expects that and config_cache::read_cache and other functions are also capable of catching. // Note that parser(cuff, filter,validator)(); -> tokenizer::tokenizer can throw exeptions too (meaning this functions did already throw these exceptions before this patch). // We try to fix https://svn.boost.org/trac/boost/ticket/5237 by not creating empty gz files. filter.exceptions(filter.exceptions() | std::ios_base::badbit); /* * It sometimes seems the file is not empty but still no real data. * Filter that case here. It might be previous test is no longer required * but simply keep it. */ // on msvc filter.peek() != EOF does not imply filter.good(). // we never create empty compressed gzip files because boosts gzip fails at doing that. // but empty compressed bz2 files are possible. if(filter.peek() == EOF) { LOG_CF << "Empty compressed file or error at reading a compressed file."; return; } if(!filter.good()) { LOG_CF << " filter.peek() != EOF but !filter.good(), this indicates a malformed gz stream, and can make wesnoth crash."; } parser(cfg, filter,validator)(); }
void ImapTransport::test() { #if 0 qMailLog(IMAP) << "Rfc1951Compressor and Rfc1951Decompressor functional testing running..."; // Mainly aiming to test for bounday conditions // So make the compression/decompression buffers about the same size as the input/output QByteArray data("This\n is some test data.\r\n The quick brown fox jumps over the lazy dog. 0123456789.\r\n"); for(int i = 10; i <= 100; ++ i) { for(int j = 10; i <= 100; ++ i) { for (int k = 10; k <= 100; ++k) { Rfc1951Compressor compressor(i); Rfc1951Decompressor decompressor(j); QByteArray input(data.left(k)); input += "\r\n"; QByteArray compressed; { QDataStream stream(&compressed, QIODevice::WriteOnly); compressor.write(&stream, &input); } { QByteArray output; QBuffer buffer(&compressed); buffer.open(QIODevice::ReadOnly); decompressor.consume(&buffer); while (decompressor.canReadLine()) { output += decompressor.readLine(); } if (input != output) { qMailLog(IMAP) << "Test failure: input" << input.toHex() << "output" << output.toHex(); Q_ASSERT(input == output); } } } } } qMailLog(IMAP) << "Rfc1951Compressor and Rfc1951Decompressor functional testing completed OK"; #endif }
void ZLZipHeader::skipEntry(ZLInputStream &stream, ZLZipHeader &header) { switch (header.Signature) { default: break; case SignatureLocalFile: if (header.Flags & 0x08) { stream.seek(header.ExtraLength, false); AppLog("ZLZDecompressor decompressor %d", (size_t)-1); ZLZDecompressor decompressor((size_t)-1); size_t size; do { //AppLog("ZLZipHeader::skipEntry 1"); size = decompressor.decompress(stream, 0, BUFFER_SIZE); //AppLog("decompress size=%d",size); header.UncompressedSize += size; } while (size == BUFFER_SIZE); AppLog("header.UncompressedSize %d",header.UncompressedSize); //stream.seek(16, false); } else { stream.seek(header.ExtraLength + header.CompressedSize, false); } break; } }
PooledInfoStack Compression::decompress( const std::vector<char>& data, const std::size_t numPoints, PointPool& pointPool) { PooledDataStack dataStack(pointPool.dataPool().acquire(numPoints)); PooledInfoStack infoStack(pointPool.infoPool().acquire(numPoints)); BinaryPointTable table(pointPool.schema()); pdal::PointRef pointRef(table, 0); const std::size_t pointSize(pointPool.schema().pointSize()); DecompressionStream decompressionStream(data); pdal::LazPerfDecompressor<DecompressionStream> decompressor( decompressionStream, pointPool.schema().pdalLayout().dimTypes()); RawInfoNode* info(infoStack.head()); char* pos(nullptr); while (info) { info->construct(dataStack.popOne()); pos = info->val().data(); decompressor.decompress(pos, pointSize); table.setPoint(pos); info->val().point(pointRef); info = info->next(); } return infoStack; }
RawImage NefDecoder::decodeRawInternal() { vector<TiffIFD*> data = mRootIFD->getIFDsWithTag(CFAPATTERN); if (data.empty()) ThrowRDE("NEF Decoder: No image data found"); TiffIFD* raw = data[0]; int compression = raw->getEntry(COMPRESSION)->getInt(); data = mRootIFD->getIFDsWithTag(MODEL); if (data.empty()) ThrowRDE("NEF Decoder: No model data found"); TiffEntry *offsets = raw->getEntry(STRIPOFFSETS); TiffEntry *counts = raw->getEntry(STRIPBYTECOUNTS); if (!data[0]->getEntry(MODEL)->getString().compare("NIKON D100 ")) { /**Sigh**/ if (!mFile->isValid(offsets->getInt())) ThrowRDE("NEF Decoder: Image data outside of file."); if (!D100IsCompressed(offsets->getInt())) { DecodeD100Uncompressed(); return mRaw; } } if (compression == 1) { DecodeUncompressed(); return mRaw; } if (offsets->count != 1) { ThrowRDE("NEF Decoder: Multiple Strips found: %u", offsets->count); } if (counts->count != offsets->count) { ThrowRDE("NEF Decoder: Byte count number does not match strip size: count:%u, strips:%u ", counts->count, offsets->count); } if (!mFile->isValid(offsets->getInt() + counts->getInt())) ThrowRDE("NEF Decoder: Invalid strip byte count. File probably truncated."); if (34713 != compression) ThrowRDE("NEF Decoder: Unsupported compression"); uint32 width = raw->getEntry(IMAGEWIDTH)->getInt(); uint32 height = raw->getEntry(IMAGELENGTH)->getInt(); uint32 bitPerPixel = raw->getEntry(BITSPERSAMPLE)->getInt(); mRaw->dim = iPoint2D(width, height); mRaw->createData(); data = mRootIFD->getIFDsWithTag(MAKERNOTE); if (data.empty()) ThrowRDE("NEF Decoder: No EXIF data found"); TiffIFD* exif = data[0]; TiffEntry *makernoteEntry = exif->getEntry(MAKERNOTE); const uchar8* makernote = makernoteEntry->getData(); FileMap makermap((uchar8*)&makernote[10], mFile->getSize() - makernoteEntry->getDataOffset() - 10); TiffParser makertiff(&makermap); makertiff.parseData(); data = makertiff.RootIFD()->getIFDsWithTag((TiffTag)0x8c); if (data.empty()) ThrowRDE("NEF Decoder: Decompression info tag not found"); TiffEntry *meta; try { meta = data[0]->getEntry((TiffTag)0x96); } catch (TiffParserException) { meta = data[0]->getEntry((TiffTag)0x8c); // Fall back } try { NikonDecompressor decompressor(mFile, mRaw); ByteStream* metastream; if (getHostEndianness() == data[0]->endian) metastream = new ByteStream(meta->getData(), meta->count); else metastream = new ByteStreamSwap(meta->getData(), meta->count); decompressor.DecompressNikon(metastream, width, height, bitPerPixel, offsets->getInt(), counts->getInt()); delete metastream; } catch (IOException &e) { mRaw->setError(e.what()); // Let's ignore it, it may have delivered somewhat useful data. } return mRaw; }
status_t BHttpRequest::_MakeRequest() { if (fSocket == NULL) return B_NO_MEMORY; _EmitDebug(B_URL_PROTOCOL_DEBUG_TEXT, "Connection to %s on port %d.", fUrl.Authority().String(), fRemoteAddr.Port()); status_t connectError = fSocket->Connect(fRemoteAddr); if (connectError != B_OK) { _EmitDebug(B_URL_PROTOCOL_DEBUG_ERROR, "Socket connection error %s", strerror(connectError)); return connectError; } //! ProtocolHook:ConnectionOpened if (fListener != NULL) fListener->ConnectionOpened(this); _EmitDebug(B_URL_PROTOCOL_DEBUG_TEXT, "Connection opened, sending request."); _SendRequest(); _SendHeaders(); fSocket->Write("\r\n", 2); _EmitDebug(B_URL_PROTOCOL_DEBUG_TEXT, "Request sent."); if (fRequestMethod == B_HTTP_POST && fOptPostFields != NULL) { if (fOptPostFields->GetFormType() != B_HTTP_FORM_MULTIPART) { BString outputBuffer = fOptPostFields->RawData(); _EmitDebug(B_URL_PROTOCOL_DEBUG_TRANSFER_OUT, "%s", outputBuffer.String()); fSocket->Write(outputBuffer.String(), outputBuffer.Length()); } else { for (BHttpForm::Iterator it = fOptPostFields->GetIterator(); const BHttpFormData* currentField = it.Next(); ) { _EmitDebug(B_URL_PROTOCOL_DEBUG_TRANSFER_OUT, it.MultipartHeader().String()); fSocket->Write(it.MultipartHeader().String(), it.MultipartHeader().Length()); switch (currentField->Type()) { default: case B_HTTPFORM_UNKNOWN: ASSERT(0); break; case B_HTTPFORM_STRING: fSocket->Write(currentField->String().String(), currentField->String().Length()); break; case B_HTTPFORM_FILE: { BFile upFile(currentField->File().Path(), B_READ_ONLY); char readBuffer[kHttpBufferSize]; ssize_t readSize; readSize = upFile.Read(readBuffer, sizeof(readBuffer)); while (readSize > 0) { fSocket->Write(readBuffer, readSize); readSize = upFile.Read(readBuffer, sizeof(readBuffer)); } break; } case B_HTTPFORM_BUFFER: fSocket->Write(currentField->Buffer(), currentField->BufferSize()); break; } fSocket->Write("\r\n", 2); } BString footer = fOptPostFields->GetMultipartFooter(); fSocket->Write(footer.String(), footer.Length()); } } else if ((fRequestMethod == B_HTTP_POST || fRequestMethod == B_HTTP_PUT) && fOptInputData != NULL) { for (;;) { char outputTempBuffer[kHttpBufferSize]; ssize_t read = fOptInputData->Read(outputTempBuffer, sizeof(outputTempBuffer)); if (read <= 0) break; if (fOptInputDataSize < 0) { // Chunked transfer char hexSize[16]; size_t hexLength = sprintf(hexSize, "%ld", read); fSocket->Write(hexSize, hexLength); fSocket->Write("\r\n", 2); fSocket->Write(outputTempBuffer, read); fSocket->Write("\r\n", 2); } else { fSocket->Write(outputTempBuffer, read); } } if (fOptInputDataSize < 0) { // Chunked transfer terminating sequence fSocket->Write("0\r\n\r\n", 5); } } fRequestStatus = kRequestInitialState; // Receive loop bool receiveEnd = false; bool parseEnd = false; bool readByChunks = false; bool decompress = false; status_t readError = B_OK; ssize_t bytesRead = 0; ssize_t bytesReceived = 0; ssize_t bytesTotal = 0; char* inputTempBuffer = new(std::nothrow) char[kHttpBufferSize]; ssize_t inputTempSize = kHttpBufferSize; ssize_t chunkSize = -1; DynamicBuffer decompressorStorage; BPrivate::ZlibDecompressor decompressor(&decompressorStorage); while (!fQuit && !(receiveEnd && parseEnd)) { if (!receiveEnd) { fSocket->WaitForReadable(); BNetBuffer chunk(kHttpBufferSize); bytesRead = fSocket->Read(chunk.Data(), kHttpBufferSize); if (bytesRead < 0) { readError = bytesRead; break; } else if (bytesRead == 0) receiveEnd = true; fInputBuffer.AppendData(chunk.Data(), bytesRead); } else bytesRead = 0; if (fRequestStatus < kRequestStatusReceived) { _ParseStatus(); //! ProtocolHook:ResponseStarted if (fRequestStatus >= kRequestStatusReceived && fListener != NULL) fListener->ResponseStarted(this); } if (fRequestStatus < kRequestHeadersReceived) { _ParseHeaders(); if (fRequestStatus >= kRequestHeadersReceived) { _ResultHeaders() = fHeaders; //! ProtocolHook:HeadersReceived if (fListener != NULL) fListener->HeadersReceived(this); // Parse received cookies if (fContext != NULL) { for (int32 i = 0; i < fHeaders.CountHeaders(); i++) { if (fHeaders.HeaderAt(i).NameIs("Set-Cookie")) { fContext->GetCookieJar().AddCookie( fHeaders.HeaderAt(i).Value(), fUrl); } } } if (BString(fHeaders["Transfer-Encoding"]) == "chunked") readByChunks = true; BString contentEncoding(fHeaders["Content-Encoding"]); if (contentEncoding == "gzip" || contentEncoding == "deflate") { decompress = true; decompressor.Init(); } int32 index = fHeaders.HasHeader("Content-Length"); if (index != B_ERROR) bytesTotal = atoi(fHeaders.HeaderAt(index).Value()); else bytesTotal = 0; } } if (fRequestStatus >= kRequestHeadersReceived) { // If Transfer-Encoding is chunked, we should read a complete // chunk in buffer before handling it if (readByChunks) { if (chunkSize >= 0) { if ((ssize_t)fInputBuffer.Size() >= chunkSize + 2) { // 2 more bytes to handle the closing CR+LF bytesRead = chunkSize; if (inputTempSize < chunkSize + 2) { delete[] inputTempBuffer; inputTempSize = chunkSize + 2; inputTempBuffer = new(std::nothrow) char[inputTempSize]; } if (inputTempBuffer == NULL) { readError = B_NO_MEMORY; break; } fInputBuffer.RemoveData(inputTempBuffer, chunkSize + 2); chunkSize = -1; } else { // Not enough data, try again later bytesRead = -1; } } else { BString chunkHeader; if (_GetLine(chunkHeader) == B_ERROR) { chunkSize = -1; bytesRead = -1; } else { // Format of a chunk header: // <chunk size in hex>[; optional data] int32 semiColonIndex = chunkHeader.FindFirst(';', 0); // Cut-off optional data if present if (semiColonIndex != -1) { chunkHeader.Remove(semiColonIndex, chunkHeader.Length() - semiColonIndex); } chunkSize = strtol(chunkHeader.String(), NULL, 16); PRINT(("BHP[%p] Chunk %s=%ld\n", this, chunkHeader.String(), chunkSize)); if (chunkSize == 0) fRequestStatus = kRequestContentReceived; bytesRead = -1; } } // A chunk of 0 bytes indicates the end of the chunked transfer if (bytesRead == 0) receiveEnd = true; } else { bytesRead = fInputBuffer.Size(); if (bytesRead > 0) { if (inputTempSize < bytesRead) { inputTempSize = bytesRead; delete[] inputTempBuffer; inputTempBuffer = new(std::nothrow) char[bytesRead]; } if (inputTempBuffer == NULL) { readError = B_NO_MEMORY; break; } fInputBuffer.RemoveData(inputTempBuffer, bytesRead); } } if (bytesRead > 0) { bytesReceived += bytesRead; if (fListener != NULL) { if (decompress) { decompressor.DecompressNext(inputTempBuffer, bytesRead); ssize_t size = decompressorStorage.Size(); BStackOrHeapArray<char, 4096> buffer(size); size = decompressorStorage.Read(buffer, size); if (size > 0) { fListener->DataReceived(this, buffer, size); } } else { fListener->DataReceived(this, inputTempBuffer, bytesRead); } fListener->DownloadProgress(this, bytesReceived, bytesTotal); } if (bytesTotal > 0 && bytesReceived >= bytesTotal) { receiveEnd = true; if (decompress) { decompressor.Finish(); ssize_t size = decompressorStorage.Size(); BStackOrHeapArray<char, 4096> buffer(size); size = decompressorStorage.Read(buffer, size); if (fListener != NULL && size > 0) { fListener->DataReceived(this, buffer, size); } } } } } parseEnd = (fInputBuffer.Size() == 0); } fSocket->Disconnect(); delete[] inputTempBuffer; if (readError != B_OK) return readError; return fQuit ? B_INTERRUPTED : B_OK; }
RawImage NefDecoder::decodeRawInternal() { vector<TiffIFD*> data = mRootIFD->getIFDsWithTag(CFAPATTERN); if (data.empty()) ThrowRDE("NEF Decoder: No image data found"); TiffIFD* raw = data[0]; int compression = raw->getEntry(COMPRESSION)->getInt(); data = mRootIFD->getIFDsWithTag(MODEL); if (data.empty()) ThrowRDE("NEF Decoder: No model data found"); TiffEntry *offsets = raw->getEntry(STRIPOFFSETS); TiffEntry *counts = raw->getEntry(STRIPBYTECOUNTS); if (!data[0]->getEntry(MODEL)->getString().compare("NIKON D100 ")) { /**Sigh**/ if (!mFile->isValid(offsets->getInt())) ThrowRDE("NEF Decoder: Image data outside of file."); if (!D100IsCompressed(offsets->getInt())) { DecodeD100Uncompressed(); return mRaw; } } if (compression == 1 || (hints.find(string("force_uncompressed")) != hints.end()) || NEFIsUncompressed(raw)) { DecodeUncompressed(); return mRaw; } if (NEFIsUncompressedRGB(raw)) { DecodeSNefUncompressed(); return mRaw; } if (offsets->count != 1) { ThrowRDE("NEF Decoder: Multiple Strips found: %u", offsets->count); } if (counts->count != offsets->count) { ThrowRDE("NEF Decoder: Byte count number does not match strip size: count:%u, strips:%u ", counts->count, offsets->count); } if (!mFile->isValid(offsets->getInt(), counts->getInt())) ThrowRDE("NEF Decoder: Invalid strip byte count. File probably truncated."); if (34713 != compression) ThrowRDE("NEF Decoder: Unsupported compression"); uint32 width = raw->getEntry(IMAGEWIDTH)->getInt(); uint32 height = raw->getEntry(IMAGELENGTH)->getInt(); uint32 bitPerPixel = raw->getEntry(BITSPERSAMPLE)->getInt(); mRaw->dim = iPoint2D(width, height); mRaw->createData(); data = mRootIFD->getIFDsWithTag((TiffTag)0x8c); if (data.empty()) ThrowRDE("NEF Decoder: Decompression info tag not found"); TiffEntry *meta; if (data[0]->hasEntry((TiffTag)0x96)) { meta = data[0]->getEntry((TiffTag)0x96); } else { meta = data[0]->getEntry((TiffTag)0x8c); // Fall back } try { NikonDecompressor decompressor(mFile, mRaw); decompressor.uncorrectedRawValues = uncorrectedRawValues; ByteStream* metastream; if (getHostEndianness() == data[0]->endian) metastream = new ByteStream(meta->getData(), meta->count); else metastream = new ByteStreamSwap(meta->getData(), meta->count); decompressor.DecompressNikon(metastream, width, height, bitPerPixel, offsets->getInt(), counts->getInt()); delete metastream; } catch (IOException &e) { mRaw->setError(e.what()); // Let's ignore it, it may have delivered somewhat useful data. } return mRaw; }
void AbstractDngDecompressor::decompressThreaded( const RawDecompressorThread* t) const { assert(t); assert(mRaw->dim.x > 0); assert(mRaw->dim.y > 0); assert(mRaw->getCpp() > 0); assert(mBps > 0 && mBps <= 32); if (compression == 1) { for (size_t i = t->start; i < t->end && i < slices.size(); i++) { auto e = &slices[i]; UncompressedDecompressor decompressor(e->bs, mRaw); size_t thisTileLength = e->offY + e->height > static_cast<uint32>(mRaw->dim.y) ? mRaw->dim.y - e->offY : e->height; if (thisTileLength == 0) ThrowRDE("Tile is empty. Can not decode!"); iPoint2D tileSize(mRaw->dim.x, thisTileLength); iPoint2D pos(0, e->offY); // FIXME: does bytestream have correct byteorder from the src file? bool big_endian = e->bs.getByteOrder() == Endianness::big; // DNG spec says that if not 8 or 16 bit/sample, always use big endian if (mBps != 8 && mBps != 16) big_endian = true; try { const int inputPitchBits = mRaw->getCpp() * mRaw->dim.x * mBps; assert(inputPitchBits > 0); const int inputPitch = inputPitchBits / 8; if (inputPitch == 0) ThrowRDE("Data input pitch is too short. Can not decode!"); decompressor.readUncompressedRaw(tileSize, pos, inputPitch, mBps, big_endian ? BitOrder_MSB : BitOrder_LSB); } catch (RawDecoderException& err) { mRaw->setError(err.what()); } catch (IOException& err) { mRaw->setError(err.what()); } } } else if (compression == 7) { for (size_t i = t->start; i < t->end && i < slices.size(); i++) { auto e = &slices[i]; LJpegDecompressor d(e->bs, mRaw); try { d.decode(e->offX, e->offY, mFixLjpeg); } catch (RawDecoderException& err) { mRaw->setError(err.what()); } catch (IOException& err) { mRaw->setError(err.what()); } } /* Deflate compression */ } else if (compression == 8) { #ifdef HAVE_ZLIB std::unique_ptr<unsigned char[]> uBuffer; for (size_t i = t->start; i < t->end && i < slices.size(); i++) { auto e = &slices[i]; DeflateDecompressor z(e->bs, mRaw, mPredictor, mBps); try { z.decode(&uBuffer, e->width, e->height, e->offX, e->offY); } catch (RawDecoderException& err) { mRaw->setError(err.what()); } catch (IOException& err) { mRaw->setError(err.what()); } } #else #pragma message \ "ZLIB is not present! Deflate compression will not be supported!" ThrowRDE("deflate support is disabled."); #endif /* Lossy DNG */ } else if (compression == 0x884c) { #ifdef HAVE_JPEG /* Each slice is a JPEG image */ for (size_t i = t->start; i < t->end && i < slices.size(); i++) { auto e = &slices[i]; JpegDecompressor j(e->bs, mRaw); try { j.decode(e->offX, e->offY); } catch (RawDecoderException& err) { mRaw->setError(err.what()); } catch (IOException& err) { mRaw->setError(err.what()); } } #else #pragma message "JPEG is not present! Lossy JPEG DNG will not be supported!" ThrowRDE("jpeg support is disabled."); #endif } else mRaw->setError("AbstractDngDecompressor: Unknown compression"); }
Decompressor* Factory::decompressor(const std::string& dec_method) { return decompressor(method(dec_method)); }
int main(int argc, char **argv) { if (argc == 1) PrintHelpInfo(); else { if (!ReadParameter(argc, argv)) { std::cerr << "Bad Parameters.\n"; return 1; } ReadConfig(configFileName); if (compress) { // Compress db_compress::Compressor compressor(outputFileName, schema, config); int iter_cnt = 0; while (1) { std::cout << "Iteration " << ++iter_cnt << " Starts\n"; std::ifstream inFile(inputFileName); std::string str; int tuple_cnt = 0; while (std::getline(inFile,str)) { std::stringstream sstream(str); std::string item; db_compress::Tuple tuple(schema.attr_type.size()); size_t count = 0; while (std::getline(sstream, item, ',')) { AppendAttr(&tuple, item, attr_type[count], count); ++ count; } // The last item might be empty string if (str[str.length() - 1] == ',') { AppendAttr(&tuple, "", attr_type[count], count); ++ count; } if (count != attr_type.size()) { std::cerr << "File Format Error!\n"; } compressor.ReadTuple(tuple); if (!compressor.RequireFullPass() && ++ tuple_cnt >= NonFullPassStopPoint) { break; } } compressor.EndOfData(); if (!compressor.RequireMoreIterations()) break; } } else { // Decompress db_compress::Decompressor decompressor(inputFileName, schema); std::ofstream outFile(outputFileName); decompressor.Init(); while (decompressor.HasNext()) { db_compress::Tuple tuple(attr_type.size()); decompressor.ReadNextTuple(&tuple); for (size_t i = 0; i < attr_type.size(); ++i) { std::string str = ExtractAttr(tuple, attr_type[i], i); outFile << str << (i == attr_type.size() - 1 ? '\n' : ','); } } } } return 0; }