const ScaledImageFragment* ImageFrameGenerator::tryToDecodeAndScale(const SkISize& scaledSize) { RefPtr<SharedBuffer> data; bool allDataReceived = false; { MutexLocker lock(m_dataMutex); // FIXME: We should do a shallow copy instead. Now we're restricted by the API of SharedBuffer. data = m_data->copy(); allDataReceived = m_allDataReceived; } OwnPtr<ImageDecoder> decoder(adoptPtr(ImageDecoder::create(*data.get(), ImageSource::AlphaPremultiplied, ImageSource::GammaAndColorProfileApplied))); if (!decoder && m_imageDecoderFactory) decoder = m_imageDecoderFactory->create(); if (!decoder) return 0; decoder->setData(data.get(), allDataReceived); ImageFrame* frame = decoder->frameBufferAtIndex(0); if (!frame || frame->status() == ImageFrame::FrameEmpty) return 0; bool isComplete = frame->status() == ImageFrame::FrameComplete; SkBitmap fullSizeBitmap = frame->getSkBitmap(); ASSERT(fullSizeBitmap.width() == m_fullSize.width() && fullSizeBitmap.height() == m_fullSize.height()); const ScaledImageFragment* fullSizeImage = ImageDecodingStore::instance()->insertAndLockCache( this, ScaledImageFragment::create(m_fullSize, fullSizeBitmap, isComplete)); if (m_fullSize == scaledSize) return fullSizeImage; return tryToScale(fullSizeImage, scaledSize); }
template <J_COLOR_SPACE colorSpace> bool outputRows(JPEGImageReader* reader, ImageFrame& buffer) { JSAMPARRAY samples = reader->samples(); jpeg_decompress_struct* info = reader->info(); int width = info->output_width; while (info->output_scanline < info->output_height) { // jpeg_read_scanlines will increase the scanline counter, so we // save the scanline before calling it. int y = info->output_scanline; // Request one scanline: returns 0 or 1 scanlines. if (jpeg_read_scanlines(info, samples, 1) != 1) return false; #if USE(QCMSLIB) if (reader->colorTransform() && colorSpace == JCS_RGB) qcms_transform_data(reader->colorTransform(), *samples, *samples, width); #endif ImageFrame::PixelData* pixel = buffer.getAddr(0, y); for (int x = 0; x < width; ++pixel, ++x) setPixel<colorSpace>(buffer, pixel, samples, x); } buffer.setPixelsChanged(true); return true; }
sk_sp<SkImage> DeferredImageDecoder::createFrameAtIndex(size_t index) { if (m_frameGenerator && m_frameGenerator->decodeFailed()) return nullptr; prepareLazyDecodedFrames(); if (index < m_frameData.size()) { DeferredFrameData* frameData = &m_frameData[index]; if (m_actualDecoder) frameData->m_frameBytes = m_actualDecoder->frameBytesAtIndex(index); else frameData->m_frameBytes = m_size.area() * sizeof(ImageFrame::PixelData); // ImageFrameGenerator has the latest known alpha state. There will be a // performance boost if this frame is opaque. DCHECK(m_frameGenerator); return createFrameImageAtIndex(index, !m_frameGenerator->hasAlpha(index)); } if (!m_actualDecoder || m_actualDecoder->failed()) return nullptr; ImageFrame* frame = m_actualDecoder->frameBufferAtIndex(index); if (!frame || frame->getStatus() == ImageFrame::FrameEmpty) return nullptr; return (frame->getStatus() == ImageFrame::FrameComplete) ? frame->finalizePixelsAndGetImage() : SkImage::MakeFromBitmap(frame->bitmap()); }
ListesAuxShowListeImages::ListesAuxShowListeImages(wxWindow *parent, wxString itemSelected, int &nbImg) : wxFrame(parent, wxID_ANY, wxEmptyString) { try { ObjPhotoGenealogy *objPhotoGenealogy = new ObjPhotoGenealogy(); vector<wxImage> imgVect; imgVect = model->arrayImageMicroFromNomLatin(itemSelected); wxArrayInt idVect = model->idImageMicroArrayFromNomLatin(itemSelected); for (int ijk = 0; ijk < imgVect.size(); ijk++) { wxImage imgAux = imgVect[ijk]; wxString title = objPhotoGenealogy->getFullNomFromIdPhoto(idVect[ijk]); wxRect rect = parent->GetScreenRect(); int x0 = rect.x ; int y0 = rect.y ; int w0 = rect.width ; int xPos = x0+w0+x0+(nbImg+ijk)*16; int yPos = y0+(nbImg+ijk)*16; ImageFrame *imageFrame = new ImageFrame(parent, imgAux, title, wxPoint(xPos, yPos), wxDEFAULT_FRAME_STYLE); imageFrame->SetBackgroundColour(Couleurs::backgColor); imageFrame->Show(); } nbImg = nbImg + (int)imgVect.size(); } catch (const exception &e) { Aux::logsStr("", e.what(), logPut); } }
bool ImageSource::frameHasAlphaAtIndex(size_t index) { #ifdef ANDROID_ANIMATED_GIF if (m_decoder.m_gifDecoder) { ImageFrame* buffer = m_decoder.m_gifDecoder->frameBufferAtIndex(index); if (!buffer || buffer->status() == ImageFrame::FrameEmpty) return false; return buffer->hasAlpha(); } #else SkASSERT(0 == index); #endif if (NULL == m_decoder.m_image) return true; // if we're not sure, assume the worse-case const PrivateAndroidImageSourceRec& decoder = *m_decoder.m_image; // if we're 16bit, we know even without all the data available if (decoder.bitmap().getConfig() == SkBitmap::kRGB_565_Config) return false; if (!decoder.fAllDataReceived) return true; // if we're not sure, assume the worse-case return !decoder.bitmap().isOpaque(); }
void WEBPImageDecoder::applyColorProfile(const uint8_t* data, size_t size, ImageFrame& buffer) { int width; int decodedHeight; if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 if (decodedHeight <= 0) return; if (!m_haveReadProfile) { readColorProfile(data, size); m_haveReadProfile = true; } ASSERT(width == scaledSize().width()); ASSERT(decodedHeight <= scaledSize().height()); for (int y = m_decodedHeight; y < decodedHeight; ++y) { uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(0, y)); if (qcms_transform* transform = colorTransform()) qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX); uint8_t* pixel = row; for (int x = 0; x < width; ++x, pixel += 4) buffer.setRGBA(x, y, pixel[0], pixel[1], pixel[2], pixel[3]); } m_decodedHeight = decodedHeight; }
TEST(GIFImageDecoderTest, parseAndDecodeByteByByte) { OwnPtr<GIFImageDecoder> decoder = createDecoder(); RefPtr<SharedBuffer> data = readFile("/LayoutTests/fast/images/resources/animated-gif-with-offsets.gif"); ASSERT_TRUE(data.get()); size_t frameCount = 0; size_t framesDecoded = 0; // Pass data to decoder byte by byte. for (size_t length = 1; length <= data->size(); ++length) { RefPtr<SharedBuffer> tempData = SharedBuffer::create(data->data(), length); decoder->setData(tempData.get(), length == data->size()); EXPECT_LE(frameCount, decoder->frameCount()); frameCount = decoder->frameCount(); ImageFrame* frame = decoder->frameBufferAtIndex(frameCount - 1); if (frame && frame->status() == ImageFrame::FrameComplete && framesDecoded < frameCount) ++framesDecoded; } EXPECT_EQ(5u, decoder->frameCount()); EXPECT_EQ(5u, framesDecoded); EXPECT_EQ(cAnimationLoopInfinite, decoder->repetitionCount()); }
TEST(GIFImageDecoderTest, resumePartialDecodeAfterClearFrameBufferCache) { RefPtr<SharedBuffer> fullData = readFile("/LayoutTests/fast/images/resources/animated-10color.gif"); ASSERT_TRUE(fullData.get()); Vector<unsigned> baselineHashes; createDecodingBaseline(fullData.get(), &baselineHashes); size_t frameCount = baselineHashes.size(); OwnPtr<GIFImageDecoder> decoder = createDecoder(); // Let frame 0 be partially decoded. size_t partialSize = 1; do { RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), partialSize); decoder->setData(data.get(), false); ++partialSize; } while (!decoder->frameCount() || decoder->frameBufferAtIndex(0)->status() == ImageFrame::FrameEmpty); // Skip to the last frame and clear. decoder->setData(fullData.get(), true); EXPECT_EQ(frameCount, decoder->frameCount()); ImageFrame* lastFrame = decoder->frameBufferAtIndex(frameCount - 1); EXPECT_EQ(baselineHashes[frameCount - 1], hashSkBitmap(lastFrame->getSkBitmap())); decoder->clearCacheExceptFrame(kNotFound); // Resume decoding of the first frame. ImageFrame* firstFrame = decoder->frameBufferAtIndex(0); EXPECT_EQ(ImageFrame::FrameComplete, firstFrame->status()); EXPECT_EQ(baselineHashes[0], hashSkBitmap(firstFrame->getSkBitmap())); }
bool DeferredImageDecoder::createFrameAtIndex(size_t index, SkBitmap* bitmap) { prepareLazyDecodedFrames(); if (index < m_frameData.size()) { // ImageFrameGenerator has the latest known alpha state. There will // be a performance boost if this frame is opaque. *bitmap = createBitmap(index); if (m_frameGenerator->hasAlpha(index)) { m_frameData[index].m_hasAlpha = true; bitmap->setAlphaType(kPremul_SkAlphaType); } else { m_frameData[index].m_hasAlpha = false; bitmap->setAlphaType(kOpaque_SkAlphaType); } m_frameData[index].m_frameBytes = m_size.area() * sizeof(ImageFrame::PixelData); return true; } if (m_actualDecoder) { ImageFrame* buffer = m_actualDecoder->frameBufferAtIndex(index); if (!buffer || buffer->status() == ImageFrame::FrameEmpty) return false; *bitmap = buffer->bitmap(); return true; } return false; }
// Reproduce a crash that used to happen for a specific file with specific sequence of method calls. TEST(AnimatedWebPTests, reproCrash) { OwnPtr<WEBPImageDecoder> decoder = createDecoder(); RefPtr<SharedBuffer> fullData = readFile("/LayoutTests/fast/images/resources/invalid_vp8_vp8x.webp"); ASSERT_TRUE(fullData.get()); // Parse partial data up to which error in bitstream is not detected. const size_t partialSize = 32768; ASSERT_GT(fullData->size(), partialSize); RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), partialSize); decoder->setData(data.get(), false); EXPECT_EQ(1u, decoder->frameCount()); ImageFrame* frame = decoder->frameBufferAtIndex(0); ASSERT_TRUE(frame); EXPECT_EQ(ImageFrame::FramePartial, frame->status()); EXPECT_FALSE(decoder->failed()); // Parse full data now. The error in bitstream should now be detected. decoder->setData(fullData.get(), true); EXPECT_EQ(1u, decoder->frameCount()); frame = decoder->frameBufferAtIndex(0); ASSERT_TRUE(frame); EXPECT_EQ(ImageFrame::FramePartial, frame->status()); EXPECT_EQ(cAnimationLoopOnce, decoder->repetitionCount()); EXPECT_TRUE(decoder->failed()); }
void NotificationImageLoader::didFinishLoading(unsigned long resourceIdentifier, double finishTime) { // If this has been stopped it is not desirable to trigger further work, // there is a shutdown of some sort in progress. if (m_stopped) return; DEFINE_THREAD_SAFE_STATIC_LOCAL( CustomCountHistogram, finishedTimeHistogram, new CustomCountHistogram("Notifications.Icon.LoadFinishTime", 1, 1000 * 60 * 60 /* 1 hour max */, 50 /* buckets */)); finishedTimeHistogram.count(monotonicallyIncreasingTimeMS() - m_startTime); if (m_data) { DEFINE_THREAD_SAFE_STATIC_LOCAL( CustomCountHistogram, fileSizeHistogram, new CustomCountHistogram("Notifications.Icon.FileSize", 1, 10000000 /* ~10mb max */, 50 /* buckets */)); fileSizeHistogram.count(m_data->size()); std::unique_ptr<ImageDecoder> decoder = ImageDecoder::create( m_data, true /* dataComplete */, ImageDecoder::AlphaPremultiplied, ImageDecoder::ColorSpaceApplied); if (decoder) { // The |ImageFrame*| is owned by the decoder. ImageFrame* imageFrame = decoder->frameBufferAtIndex(0); if (imageFrame) { (*m_imageCallback)(imageFrame->bitmap()); return; } } } runCallbackWithEmptyBitmap(); }
bool ImageFrameGenerator::decode(size_t index, ImageDecoder** decoder, SkBitmap* bitmap) { TRACE_EVENT2("blink", "ImageFrameGenerator::decode", "width", m_fullSize.width(), "height", m_fullSize.height()); ASSERT(decoder); SharedBuffer* data = 0; bool allDataReceived = false; bool newDecoder = false; m_data.data(&data, &allDataReceived); // Try to create an ImageDecoder if we are not given one. if (!*decoder) { newDecoder = true; if (m_imageDecoderFactory) *decoder = m_imageDecoderFactory->create().leakPtr(); if (!*decoder) *decoder = ImageDecoder::create(*data, ImageSource::AlphaPremultiplied, ImageSource::GammaAndColorProfileApplied).leakPtr(); if (!*decoder) return false; } if (!m_isMultiFrame && newDecoder && allDataReceived) { // If we're using an external memory allocator that means we're decoding // directly into the output memory and we can save one memcpy. ASSERT(m_externalAllocator.get()); (*decoder)->setMemoryAllocator(m_externalAllocator.get()); } (*decoder)->setData(data, allDataReceived); ImageFrame* frame = (*decoder)->frameBufferAtIndex(index); // For multi-frame image decoders, we need to know how many frames are // in that image in order to release the decoder when all frames are // decoded. frameCount() is reliable only if all data is received and set in // decoder, particularly with GIF. if (allDataReceived) m_frameCount = (*decoder)->frameCount(); (*decoder)->setData(0, false); // Unref SharedBuffer from ImageDecoder. (*decoder)->clearCacheExceptFrame(index); (*decoder)->setMemoryAllocator(0); if (!frame || frame->status() == ImageFrame::FrameEmpty) return false; // A cache object is considered complete if we can decode a complete frame. // Or we have received all data. The image might not be fully decoded in // the latter case. const bool isDecodeComplete = frame->status() == ImageFrame::FrameComplete || allDataReceived; SkBitmap fullSizeBitmap = frame->getSkBitmap(); if (!fullSizeBitmap.isNull()) { ASSERT(fullSizeBitmap.width() == m_fullSize.width() && fullSizeBitmap.height() == m_fullSize.height()); setHasAlpha(index, !fullSizeBitmap.isOpaque()); } *bitmap = fullSizeBitmap; return isDecodeComplete; }
bool ImageSource::frameIsCompleteAtIndex(size_t index) { if (!m_decoder) return false; ImageFrame* buffer = m_decoder->frameBufferAtIndex(index); return buffer && buffer->status() == ImageFrame::FrameComplete; }
void GIFImageDecoder::initializeNewFrame(size_t index) { ImageFrame* buffer = &m_frameBufferCache[index]; const GIFFrameContext* frameContext = m_reader->frameContext(index); buffer->setOriginalFrameRect(intersection(frameContext->frameRect(), IntRect(IntPoint(), size()))); buffer->setDuration(frameContext->delayTime()); buffer->setDisposalMethod(frameContext->disposalMethod()); buffer->setRequiredPreviousFrameIndex(findRequiredPreviousFrame(index, false)); }
TEST(GIFImageDecoderTest, decodeTwoFrames) { OwnPtr<GIFImageDecoder> decoder = createDecoder(); RefPtr<SharedBuffer> data = readFile("/LayoutTests/fast/images/resources/animated.gif"); ASSERT_TRUE(data.get()); decoder->setData(data.get(), true); EXPECT_EQ(cAnimationLoopOnce, decoder->repetitionCount()); ImageFrame* frame = decoder->frameBufferAtIndex(0); uint32_t generationID0 = frame->getSkBitmap().getGenerationID(); EXPECT_EQ(ImageFrame::FrameComplete, frame->status()); EXPECT_EQ(16, frame->getSkBitmap().width()); EXPECT_EQ(16, frame->getSkBitmap().height()); frame = decoder->frameBufferAtIndex(1); uint32_t generationID1 = frame->getSkBitmap().getGenerationID(); EXPECT_EQ(ImageFrame::FrameComplete, frame->status()); EXPECT_EQ(16, frame->getSkBitmap().width()); EXPECT_EQ(16, frame->getSkBitmap().height()); EXPECT_TRUE(generationID0 != generationID1); EXPECT_EQ(2u, decoder->frameCount()); EXPECT_EQ(cAnimationLoopInfinite, decoder->repetitionCount()); }
ImageFrame* ICOImageDecoder::frameBufferAtIndex(size_t index) { // Ensure |index| is valid. if (index >= frameCount()) return 0; ImageFrame* buffer = &m_frameBufferCache[index]; if (!buffer->isComplete()) decode(index, false); return buffer; }
void VisualOdometry::PlotTracking(const ImageFrame &frame0, const ImageFrame &frame1, const std::vector<cv::DMatch> &matches) { cv::Mat output_img = frame0.GetImage().clone(); int thickness = 2; for (int i = 0; i < matches.size(); ++i) { line(output_img, frame0.keypoints()[matches[i].trainIdx].pt, frame1.keypoints()[matches[i].queryIdx].pt, cv::Scalar(255, 0, 0), thickness); } cv::imshow("tracking_result", output_img); cv::waitKey(tracking_wait_time_); }
bool ImageSource::frameIsCompleteAtIndex(size_t index) { #ifdef ANDROID_ANIMATED_GIF if (m_decoder.m_gifDecoder) { ImageFrame* buffer = m_decoder.m_gifDecoder->frameBufferAtIndex(index); return buffer && buffer->status() == ImageFrame::FrameComplete; } #else SkASSERT(0 == index); #endif return m_decoder.m_image && m_decoder.m_image->fAllDataReceived; }
static bool decodeBitmap(const void* data, size_t length, SkBitmap* result) { RefPtr<SharedBuffer> buffer = SharedBuffer::create(static_cast<const char*>(data), length); OwnPtr<ImageDecoder> imageDecoder = ImageDecoder::create(*buffer, ImageDecoder::AlphaPremultiplied, ImageDecoder::GammaAndColorProfileIgnored); if (!imageDecoder) return false; imageDecoder->setData(buffer.get(), true); ImageFrame* frame = imageDecoder->frameBufferAtIndex(0); if (!frame) return true; *result = frame->getSkBitmap(); return true; }
bool GraphicsContext3D::ImageExtractor::extractImage(bool premultiplyAlpha, bool ignoreGammaAndColorProfile) { if (!m_image) return false; m_skiaImage = m_image->nativeImageForCurrentFrame(); m_alphaOp = AlphaDoNothing; bool hasAlpha = m_skiaImage ? !m_skiaImage->bitmap().isOpaque() : true; if ((!m_skiaImage || ignoreGammaAndColorProfile || (hasAlpha && !premultiplyAlpha)) && m_image->data()) { // Attempt to get raw unpremultiplied image data. OwnPtr<ImageDecoder> decoder(ImageDecoder::create( *(m_image->data()), ImageSource::AlphaNotPremultiplied, ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied)); if (!decoder) return false; decoder->setData(m_image->data(), true); if (!decoder->frameCount()) return false; ImageFrame* frame = decoder->frameBufferAtIndex(0); if (!frame || frame->status() != ImageFrame::FrameComplete) return false; hasAlpha = frame->hasAlpha(); m_nativeImage = adoptPtr(frame->asNewNativeImage()); if (!m_nativeImage.get() || !m_nativeImage->isDataComplete() || !m_nativeImage->bitmap().width() || !m_nativeImage->bitmap().height()) return false; SkBitmap::Config skiaConfig = m_nativeImage->bitmap().config(); if (skiaConfig != SkBitmap::kARGB_8888_Config) return false; m_skiaImage = m_nativeImage.get(); if (hasAlpha && premultiplyAlpha) m_alphaOp = AlphaDoPremultiply; } else if (!premultiplyAlpha && hasAlpha) { // 1. For texImage2D with HTMLVideoElment input, assume no PremultiplyAlpha had been applied and the alpha value for each pixel is 0xFF // which is true at present and may be changed in the future and needs adjustment accordingly. // 2. For texImage2D with HTMLCanvasElement input in which Alpha is already Premultiplied in this port, // do AlphaDoUnmultiply if UNPACK_PREMULTIPLY_ALPHA_WEBGL is set to false. if (m_imageHtmlDomSource != HtmlDomVideo) m_alphaOp = AlphaDoUnmultiply; } if (!m_skiaImage) return false; m_imageSourceFormat = SK_B32_SHIFT ? DataFormatRGBA8 : DataFormatBGRA8; m_imageWidth = m_skiaImage->bitmap().width(); m_imageHeight = m_skiaImage->bitmap().height(); if (!m_imageWidth || !m_imageHeight) return false; m_imageSourceUnpackAlignment = 0; m_skiaImage->bitmap().lockPixels(); m_imagePixelData = m_skiaImage->bitmap().getPixels(); return true; }
TEST(GIFImageDecoderTest, progressiveDecode) { RefPtr<SharedBuffer> fullData = readFile("/Source/web/tests/data/radient.gif"); ASSERT_TRUE(fullData.get()); const size_t fullLength = fullData->size(); OwnPtr<GIFImageDecoder> decoder; ImageFrame* frame; Vector<unsigned> truncatedHashes; Vector<unsigned> progressiveHashes; // Compute hashes when the file is truncated. const size_t increment = 1; for (size_t i = 1; i <= fullLength; i += increment) { decoder = createDecoder(); RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), i); decoder->setData(data.get(), i == fullLength); frame = decoder->frameBufferAtIndex(0); if (!frame) { truncatedHashes.append(0); continue; } truncatedHashes.append(hashSkBitmap(frame->getSkBitmap())); } // Compute hashes when the file is progressively decoded. decoder = createDecoder(); EXPECT_EQ(cAnimationLoopOnce, decoder->repetitionCount()); for (size_t i = 1; i <= fullLength; i += increment) { RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), i); decoder->setData(data.get(), i == fullLength); frame = decoder->frameBufferAtIndex(0); if (!frame) { progressiveHashes.append(0); continue; } progressiveHashes.append(hashSkBitmap(frame->getSkBitmap())); } EXPECT_EQ(cAnimationNone, decoder->repetitionCount()); bool match = true; for (size_t i = 0; i < truncatedHashes.size(); ++i) { if (truncatedHashes[i] != progressiveHashes[i]) { match = false; break; } } EXPECT_TRUE(match); }
// Test if a BMP decoder returns a proper error while decoding an empty image. TEST(BMPImageDecoderTest, emptyImage) { const char* bmpFile = "/LayoutTests/fast/images/resources/0x0.bmp"; // 0x0 RefPtr<SharedBuffer> data = readFile(bmpFile); ASSERT_TRUE(data.get()); OwnPtr<ImageDecoder> decoder = createDecoder(); decoder->setData(data.get(), true); ImageFrame* frame = decoder->frameBufferAtIndex(0); ASSERT_TRUE(frame); EXPECT_EQ(ImageFrame::FrameEmpty, frame->status()); EXPECT_TRUE(decoder->failed()); }
void downsample(size_t maxDecodedBytes, unsigned* outputWidth, unsigned* outputHeight, const char* imageFilePath) { RefPtr<SharedBuffer> data = readFile(imageFilePath); ASSERT_TRUE(data.get()); OwnPtr<JPEGImageDecoder> decoder = createDecoder(maxDecodedBytes); decoder->setData(data.get(), true); ImageFrame* frame = decoder->frameBufferAtIndex(0); ASSERT_TRUE(frame); *outputWidth = frame->getSkBitmap().width(); *outputHeight = frame->getSkBitmap().height(); EXPECT_EQ(IntSize(*outputWidth, *outputHeight), decoder->decodedSize()); }
ImageFrame* BMPImageDecoder::frameBufferAtIndex(size_t index) { if (index) return 0; if (m_frameBufferCache.isEmpty()) { m_frameBufferCache.resize(1); m_frameBufferCache.first().setPremultiplyAlpha(m_premultiplyAlpha); } ImageFrame* buffer = &m_frameBufferCache.first(); if (buffer->status() != ImageFrame::FrameComplete) decode(false); return buffer; }
ImageFrame* ImageDecoder::frameBufferAtIndex(size_t index) { if (index >= frameCount()) return 0; ImageFrame* frame = &m_frameBufferCache[index]; if (frame->status() != ImageFrame::FrameComplete) { PlatformInstrumentation::willDecodeImage(filenameExtension()); decode(index); PlatformInstrumentation::didDecodeImage(); } frame->notifyBitmapIfPixelsChanged(); return frame; }
TEST(AnimatedWebPTests, uniqueGenerationIDs) { OwnPtr<WEBPImageDecoder> decoder = createDecoder(); RefPtr<SharedBuffer> data = readFile("/LayoutTests/fast/images/resources/webp-animated.webp"); ASSERT_TRUE(data.get()); decoder->setData(data.get(), true); ImageFrame* frame = decoder->frameBufferAtIndex(0); uint32_t generationID0 = frame->getSkBitmap().getGenerationID(); frame = decoder->frameBufferAtIndex(1); uint32_t generationID1 = frame->getSkBitmap().getGenerationID(); EXPECT_TRUE(generationID0 != generationID1); }
TEST(AnimatedWebPTests, progressiveDecode) { RefPtr<SharedBuffer> fullData = readFile("/LayoutTests/fast/images/resources/webp-animated.webp"); ASSERT_TRUE(fullData.get()); const size_t fullLength = fullData->size(); OwnPtr<WEBPImageDecoder> decoder; ImageFrame* frame; Vector<unsigned> truncatedHashes; Vector<unsigned> progressiveHashes; // Compute hashes when the file is truncated. const size_t increment = 1; for (size_t i = 1; i <= fullLength; i += increment) { decoder = createDecoder(); RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), i); decoder->setData(data.get(), i == fullLength); frame = decoder->frameBufferAtIndex(0); if (!frame) { truncatedHashes.append(0); continue; } truncatedHashes.append(hashSkBitmap(frame->getSkBitmap())); } // Compute hashes when the file is progressively decoded. decoder = createDecoder(); for (size_t i = 1; i <= fullLength; i += increment) { RefPtr<SharedBuffer> data = SharedBuffer::create(fullData->data(), i); decoder->setData(data.get(), i == fullLength); frame = decoder->frameBufferAtIndex(0); if (!frame) { progressiveHashes.append(0); continue; } progressiveHashes.append(hashSkBitmap(frame->getSkBitmap())); } bool match = true; for (size_t i = 0; i < truncatedHashes.size(); ++i) { if (truncatedHashes[i] != progressiveHashes[i]) { match = false; break; } } EXPECT_TRUE(match); }
TEST(BMPImageDecoderTest, parseAndDecode) { const char* bmpFile = "/LayoutTests/fast/images/resources/lenna.bmp"; // 256x256 RefPtr<SharedBuffer> data = readFile(bmpFile); ASSERT_TRUE(data.get()); OwnPtr<ImageDecoder> decoder = createDecoder(); decoder->setData(data.get(), true); ImageFrame* frame = decoder->frameBufferAtIndex(0); ASSERT_TRUE(frame); EXPECT_EQ(ImageFrame::FrameComplete, frame->status()); EXPECT_EQ(256, frame->getSkBitmap().width()); EXPECT_EQ(256, frame->getSkBitmap().height()); EXPECT_FALSE(decoder->failed()); }
static bool decodeBitmap(const void* data, size_t length, SkBitmap* result) { std::unique_ptr<ImageDecoder> imageDecoder = ImageDecoder::create(static_cast<const char*>(data), length, ImageDecoder::AlphaPremultiplied, ImageDecoder::GammaAndColorProfileIgnored); if (!imageDecoder) return false; // No need to copy the data; this decodes immediately. RefPtr<SegmentReader> segmentReader = SegmentReader::createFromSkData(adoptRef(SkData::NewWithoutCopy(data, length))); imageDecoder->setData(segmentReader.release(), true); ImageFrame* frame = imageDecoder->frameBufferAtIndex(0); if (!frame) return true; *result = frame->bitmap(); return true; }
bool JPEGImageDecoder::outputScanlines(ImageFrame& buffer) { JSAMPARRAY samples = m_reader->samples(); jpeg_decompress_struct* info = m_reader->info(); int width = isScaled ? m_scaledColumns.size() : info->output_width; while (info->output_scanline < info->output_height) { // jpeg_read_scanlines will increase the scanline counter, so we // save the scanline before calling it. int sourceY = info->output_scanline; /* Request one scanline. Returns 0 or 1 scanlines. */ if (jpeg_read_scanlines(info, samples, 1) != 1) return false; int destY = scaledY(sourceY); if (destY < 0) continue; #if USE(QCMSLIB) if (m_reader->colorTransform() && colorSpace == JCS_RGB) qcms_transform_data(m_reader->colorTransform(), *samples, *samples, info->output_width); #endif ImageFrame::PixelData* currentAddress = buffer.getAddr(0, destY); for (int x = 0; x < width; ++x) { setPixel<colorSpace>(buffer, currentAddress, samples, isScaled ? m_scaledColumns[x] : x); ++currentAddress; } } return true; }