static void decode_callback(int what) { if (what == 0 && !kParams.done) { int duration = 0; if (kParams.dmux != NULL) { WebPIterator* const curr = &kParams.curr_frame; if (!WebPDemuxNextFrame(curr)) { WebPDemuxReleaseIterator(curr); if (WebPDemuxGetFrame(kParams.dmux, 1, curr)) { --kParams.loop_count; kParams.done = (kParams.loop_count == 0); } else { kParams.decoding_error = 1; kParams.done = 1; return; } } duration = curr->duration; } if (!Decode()) { kParams.decoding_error = 1; kParams.done = 1; } else { glutPostRedisplay(); glutTimerFunc(duration, decode_callback, what); } } }
void WEBPImageDecoder::initializeNewFrame(size_t index) { if (!(m_formatFlags & ANIMATION_FLAG)) { ASSERT(!index); return; } WebPIterator animatedFrame; WebPDemuxGetFrame(m_demux, index + 1, &animatedFrame); ASSERT(animatedFrame.complete == 1); ImageFrame* buffer = &m_frameBufferCache[index]; IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height); buffer->setOriginalFrameRect( intersection(frameRect, IntRect(IntPoint(), size()))); buffer->setDuration(animatedFrame.duration); buffer->setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep); buffer->setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor); buffer->setRequiredPreviousFrameIndex( findRequiredPreviousFrame(index, !animatedFrame.has_alpha)); WebPDemuxReleaseIterator(&animatedFrame); }
static void decode_callback(int what) { if (what == 0 && !kParams.done) { int duration = 0; if (kParams.dmux != NULL) { WebPIterator* const curr = &kParams.curr_frame; if (!WebPDemuxNextFrame(curr)) { WebPDemuxReleaseIterator(curr); if (WebPDemuxGetFrame(kParams.dmux, 1, curr)) { --kParams.loop_count; kParams.done = (kParams.loop_count == 0); if (kParams.done) return; ClearPreviousFrame(); } else { kParams.decoding_error = 1; kParams.done = 1; return; } } duration = curr->duration; // Behavior copied from Chrome, cf: // https://cs.chromium.org/chromium/src/third_party/WebKit/Source/ // platform/graphics/DeferredImageDecoder.cpp? // rcl=b4c33049f096cd283f32be9a58b9a9e768227c26&l=246 if (duration <= 10) duration = 100; } if (!Decode()) { kParams.decoding_error = 1; kParams.done = 1; } else { glutPostRedisplay(); glutTimerFunc(duration, decode_callback, what); } } }
ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index) { if (index >= frameCount()) return 0; ImageFrame& frame = m_frameBufferCache[index]; if (frame.status() == ImageFrame::FrameComplete) return &frame; Vector<size_t> framesToDecode; size_t frameToDecode = index; do { framesToDecode.append(frameToDecode); frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex(); } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete); ASSERT(m_demux); for (size_t i = framesToDecode.size(); i > 0; --i) { size_t frameIndex = framesToDecode[i - 1]; if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex)) return 0; WebPIterator webpFrame; if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame)) return 0; PlatformInstrumentation::willDecodeImage("WEBP"); decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex); PlatformInstrumentation::didDecodeImage(); WebPDemuxReleaseIterator(&webpFrame); if (failed()) return 0; // We need more data to continue decoding. if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete) break; } // It is also a fatal error if all data is received and we have decoded all // frames available but the file is truncated. if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE) setFailed(); frame.notifyBitmapIfPixelsChanged(); return &frame; }
void WEBPImageDecoder::decode(size_t index) { if (failed()) return; Vector<size_t> framesToDecode; size_t frameToDecode = index; do { framesToDecode.append(frameToDecode); frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex(); } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].getStatus() != ImageFrame::FrameComplete); ASSERT(m_demux); for (auto i = framesToDecode.rbegin(); i != framesToDecode.rend(); ++i) { if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(*i)) return; WebPIterator webpFrame; if (!WebPDemuxGetFrame(m_demux, *i + 1, &webpFrame)) { setFailed(); } else { decodeSingleFrame(webpFrame.fragment.bytes, webpFrame.fragment.size, *i); WebPDemuxReleaseIterator(&webpFrame); } if (failed()) return; // We need more data to continue decoding. if (m_frameBufferCache[*i].getStatus() != ImageFrame::FrameComplete) break; if (m_purgeAggressively) clearCacheExceptFrame(*i); } // It is also a fatal error if all data is received and we have decoded all // frames available but the file is truncated. if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE) setFailed(); }
bool QWebpHandler::ensureScanned() const { if (m_scanState != ScanNotScanned) return m_scanState == ScanSuccess; m_scanState = ScanError; if (device()->isSequential()) { qWarning() << "Sequential devices are not supported"; return false; } QWebpHandler *that = const_cast<QWebpHandler *>(this); // FIXME: should not read all when scanning that->m_rawData = device()->readAll(); that->m_webpData.bytes = reinterpret_cast<const uint8_t *>(m_rawData.data()); that->m_webpData.size = m_rawData.size(); that->m_demuxer = WebPDemux(&m_webpData); if (m_demuxer == NULL) return false; that->m_flags = WebPDemuxGetI(m_demuxer, WEBP_FF_FORMAT_FLAGS); that->m_width = WebPDemuxGetI(m_demuxer, WEBP_FF_CANVAS_WIDTH); that->m_height = WebPDemuxGetI(m_demuxer, WEBP_FF_CANVAS_HEIGHT); that->m_loop = WebPDemuxGetI(m_demuxer, WEBP_FF_LOOP_COUNT); that->m_frameCount = WebPDemuxGetI(m_demuxer, WEBP_FF_FRAME_COUNT); if (!WebPDemuxGetFrame(m_demuxer, 1, &(that->m_iter))) { return false; } m_scanState = ScanSuccess; return true; }
bool WEBPImageDecoder::updateDemuxer() { if (failed()) return false; if (m_haveAlreadyParsedThisData) return true; m_haveAlreadyParsedThisData = true; const unsigned webpHeaderSize = 20; if (m_data->size() < webpHeaderSize) return false; // Wait for headers so that WebPDemuxPartial doesn't return null. WebPDemuxDelete(m_demux); WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() }; m_demux = WebPDemuxPartial(&inputData, &m_demuxState); if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE)) return setFailed(); if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER) return false; // Not enough data for parsing canvas width/height yet. bool hasAnimation = (m_formatFlags & ANIMATION_FLAG); if (!ImageDecoder::isSizeAvailable()) { m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); hasAnimation = (m_formatFlags & ANIMATION_FLAG); if (!hasAnimation) m_repetitionCount = cAnimationNone; else m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images. #if USE(QCMSLIB) if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) m_hasColorProfile = true; #endif if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT))) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT); if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) { // As we have parsed at least one frame (even if partially), // we must already have parsed the animation properties. // This is because ANIM chunk always precedes ANMF chunks. m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits. // |m_repetitionCount| is the total number of animation cycles to show, // with 0 meaning "infinite". But ImageSource::repetitionCount() // returns -1 for "infinite", and 0 and up for "show the animation one // cycle more than this value". By subtracting one here, we convert // both finite and infinite cases correctly. --m_repetitionCount; m_haveReadAnimationParameters = true; } const size_t oldFrameCount = m_frameBufferCache.size(); if (newFrameCount > oldFrameCount) { m_frameBufferCache.resize(newFrameCount); for (size_t i = oldFrameCount; i < newFrameCount; ++i) { m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha); if (!hasAnimation) { ASSERT(!i); m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound); continue; } WebPIterator animatedFrame; WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame); ASSERT(animatedFrame.complete == 1); m_frameBufferCache[i].setDuration(animatedFrame.duration); m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep); m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor); IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height); // Make sure the frameRect doesn't extend outside the buffer. if (frameRect.maxX() > size().width()) frameRect.setWidth(size().width() - animatedFrame.x_offset); if (frameRect.maxY() > size().height()) frameRect.setHeight(size().height() - animatedFrame.y_offset); m_frameBufferCache[i].setOriginalFrameRect(frameRect); m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha)); WebPDemuxReleaseIterator(&animatedFrame); } } return true; }
bool QWebpHandler::write(const QImage &image) { if (image.isNull()) { qWarning() << "source image is null."; return false; } QImage srcImage = image; #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN if (srcImage.format() != QImage::Format_ARGB32) srcImage = srcImage.convertToFormat(QImage::Format_ARGB32); #else /* Q_BIG_ENDIAN */ if (srcImage.format() != QImage::Format_RGBA8888) srcImage = srcImage.convertToFormat(QImage::Format_RGBA8888); #endif WebPPicture picture; WebPConfig config; if (!WebPPictureInit(&picture) || !WebPConfigInit(&config)) { qWarning() << "failed to init webp picture and config"; return false; } picture.width = srcImage.width(); picture.height = srcImage.height(); picture.use_argb = 1; #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN if (!WebPPictureImportBGRA(&picture, srcImage.bits(), srcImage.bytesPerLine())) { #else /* Q_BIG_ENDIAN */ if (!WebPPictureImportRGBA(&picture, srcImage.bits(), srcImage.bytesPerLine())) { #endif qWarning() << "failed to import image data to webp picture."; WebPPictureFree(&picture); return false; } config.lossless = m_lossless; config.quality = m_quality; picture.writer = pictureWriter; picture.custom_ptr = device(); if (!WebPEncode(&config, &picture)) { qWarning() << "failed to encode webp picture, error code: " << picture.error_code; WebPPictureFree(&picture); return false; } WebPPictureFree(&picture); return true; } QVariant QWebpHandler::option(ImageOption option) const { if (!supportsOption(option) || !ensureScanned()) return QVariant(); switch (option) { case Quality: return m_quality; case Size: return QSize(m_width, m_height); case Animation: return (m_flags & ANIMATION_FLAG) == ANIMATION_FLAG; default: return QVariant(); } } void QWebpHandler::setOption(ImageOption option, const QVariant &value) { switch (option) { case Quality: m_quality = qBound(0, value.toInt(), 100); m_lossless = (m_quality >= 100); return; default: break; } return QImageIOHandler::setOption(option, value); } bool QWebpHandler::supportsOption(ImageOption option) const { return option == Quality || option == Size || option == Animation; } QByteArray QWebpHandler::name() const { return QByteArrayLiteral("webp"); } int QWebpHandler::imageCount() const { if (!ensureScanned()) return 0; if ((m_flags & ANIMATION_FLAG) == 0) return 1; return m_frameCount; } int QWebpHandler::currentImageNumber() const { if (!ensureScanned()) return 0; return m_iter.frame_num; } QRect QWebpHandler::currentImageRect() const { if (!ensureScanned()) return QRect(); return QRect(m_iter.x_offset, m_iter.y_offset, m_iter.width, m_iter.height); } bool QWebpHandler::jumpToImage(int imageNumber) { if (!ensureScanned()) return false; WebPDemuxReleaseIterator(&m_iter); return WebPDemuxGetFrame(m_demuxer, imageNumber, &m_iter); } bool QWebpHandler::jumpToNextImage() { if (!ensureScanned()) return false; return WebPDemuxNextFrame(&m_iter); } int QWebpHandler::loopCount() const { if (!ensureScanned() || (m_flags & ANIMATION_FLAG) == 0) return 0; return m_loop; } int QWebpHandler::nextImageDelay() const { if (!ensureScanned()) return 0; return m_iter.duration; }