bool WEBPImageDecoder::updateDemuxer() { if (failed()) return false; if (m_haveAlreadyParsedThisData) return true; m_haveAlreadyParsedThisData = true; const unsigned webpHeaderSize = 30; if (m_data->size() < webpHeaderSize) return false; // Await VP8X header so WebPDemuxPartial succeeds. WebPDemuxDelete(m_demux); WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() }; m_demux = WebPDemuxPartial(&inputData, &m_demuxState); if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE)) return setFailed(); ASSERT(m_demuxState > WEBP_DEMUX_PARSING_HEADER); if (!WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT)) return false; // Wait until the encoded image frame data arrives. if (!isDecodedSizeAvailable()) { int width = WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH); int height = WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT); if (!setSize(width, height)) return setFailed(); m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); if (!(m_formatFlags & ANIMATION_FLAG)) { m_repetitionCount = cAnimationNone; } else { // Since we have parsed at least one frame, even if partially, // the global animation (ANIM) properties have been read since // an ANIM chunk must precede the ANMF frame chunks. m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); // Repetition count is always <= 16 bits. ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Repetition count is the number of animation cycles to show, // where 0 means "infinite". But ImageSource::repetitionCount() // returns -1 for "infinite", and 0 and up for "show the image // animation one cycle more than the value". Subtract one here // to correctly handle the finite and infinite cases. --m_repetitionCount; // FIXME: Implement ICC profile support for animated images. m_formatFlags &= ~ICCP_FLAG; } #if USE(QCMSLIB) if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) readColorProfile(); #endif } ASSERT(isDecodedSizeAvailable()); return true; }
static void HandleKey(unsigned char key, int pos_x, int pos_y) { (void)pos_x; (void)pos_y; if (key == 'q' || key == 'Q' || key == 27 /* Esc */) { #ifdef FREEGLUT glutLeaveMainLoop(); #else ClearParams(); exit(0); #endif } else if (key == 'c') { if (kParams.has_color_profile && !kParams.decoding_error) { kParams.use_color_profile = 1 - kParams.use_color_profile; if (kParams.has_animation) { // Restart the completed animation to pickup the color profile change. if (kParams.done && kParams.loop_count == 0) { kParams.loop_count = (int)WebPDemuxGetI(kParams.dmux, WEBP_FF_LOOP_COUNT) + 1; kParams.done = 0; // Start the decode loop immediately. glutTimerFunc(0, decode_callback, 0); } } else { Decode(); glutPostRedisplay(); } } } else if (key == 'i') { kParams.print_info = 1 - kParams.print_info; glutPostRedisplay(); } }
size_t WEBPImageDecoder::decodeFrameCount() { // If updateDemuxer() fails, return the existing number of frames. This way // if we get halfway through the image before decoding fails, we won't // suddenly start reporting that the image has zero frames. return updateDemuxer() ? WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT) : m_frameBufferCache.size(); }
bool QWebpHandler::ensureScanned() const { if (m_scanState != ScanNotScanned) return m_scanState == ScanSuccess; m_scanState = ScanError; if (device()->isSequential()) { qWarning() << "Sequential devices are not supported"; return false; } QWebpHandler *that = const_cast<QWebpHandler *>(this); // FIXME: should not read all when scanning that->m_rawData = device()->readAll(); that->m_webpData.bytes = reinterpret_cast<const uint8_t *>(m_rawData.data()); that->m_webpData.size = m_rawData.size(); that->m_demuxer = WebPDemux(&m_webpData); if (m_demuxer == NULL) return false; that->m_flags = WebPDemuxGetI(m_demuxer, WEBP_FF_FORMAT_FLAGS); that->m_width = WebPDemuxGetI(m_demuxer, WEBP_FF_CANVAS_WIDTH); that->m_height = WebPDemuxGetI(m_demuxer, WEBP_FF_CANVAS_HEIGHT); that->m_loop = WebPDemuxGetI(m_demuxer, WEBP_FF_LOOP_COUNT); that->m_frameCount = WebPDemuxGetI(m_demuxer, WEBP_FF_FRAME_COUNT); if (!WebPDemuxGetFrame(m_demuxer, 1, &(that->m_iter))) { return false; } m_scanState = ScanSuccess; return true; }
static void HandleKey(unsigned char key, int pos_x, int pos_y) { // Note: rescaling the window or toggling some features during an animation // generates visual artifacts. This is not fixed because refreshing the frame // may require rendering the whole animation from start till current frame. (void)pos_x; (void)pos_y; if (key == 'q' || key == 'Q' || key == 27 /* Esc */) { #ifdef FREEGLUT glutLeaveMainLoop(); #else ClearParams(); exit(0); #endif } else if (key == 'c') { if (kParams.has_color_profile && !kParams.decoding_error) { kParams.use_color_profile = 1 - kParams.use_color_profile; if (kParams.has_animation) { // Restart the completed animation to pickup the color profile change. if (kParams.done && kParams.loop_count == 0) { kParams.loop_count = (int)WebPDemuxGetI(kParams.dmux, WEBP_FF_LOOP_COUNT) + 1; kParams.done = 0; // Start the decode loop immediately. glutTimerFunc(0, decode_callback, 0); } } else { Decode(); glutPostRedisplay(); } } } else if (key == 'b') { kParams.draw_anim_background_color = 1 - kParams.draw_anim_background_color; if (!kParams.has_animation) ClearPreviousFrame(); glutPostRedisplay(); } else if (key == 'i') { kParams.print_info = 1 - kParams.print_info; if (!kParams.has_animation) ClearPreviousFrame(); glutPostRedisplay(); } else if (key == 'd') { kParams.only_deltas = 1 - kParams.only_deltas; glutPostRedisplay(); } }
char* webpGetXMP(const uint8_t* data, size_t data_size, size_t* metadata_size) { char* metadata = NULL; WebPData webp_data = {data, data_size}; WebPDemuxer* demux = WebPDemux(&webp_data); uint32_t flags = WebPDemuxGetI(demux, WEBP_FF_FORMAT_FLAGS); *metadata_size = 0; if(flags & XMP_FLAG) { WebPChunkIterator it; memset(&it, 0, sizeof(it)); if(WebPDemuxGetChunk(demux, "XMP ", 1, &it)) { if(it.chunk.bytes != NULL && it.chunk.size > 0) { metadata = (char*)malloc(it.chunk.size); memcpy(metadata, it.chunk.bytes, it.chunk.size); *metadata_size = it.chunk.size; } } WebPDemuxReleaseChunkIterator(&it); } WebPDemuxDelete(demux); return metadata; }
bool WEBPImageDecoder::decode(bool onlySize) { if (failed()) return false; const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; #ifdef QCMS_WEBP_COLOR_CORRECTION WebPData inputData = { dataBytes, dataSize }; WebPDemuxState state; WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state); if (!demuxer) return setFailed(); width = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); height = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); m_formatFlags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS); m_hasAlpha = !!(m_formatFlags & ALPHA_FLAG); WebPDemuxDelete(demuxer); if (state <= WEBP_DEMUX_PARSING_HEADER) return false; #elif (WEBP_DECODER_ABI_VERSION >= 0x0163) WebPBitstreamFeatures features; if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) return setFailed(); width = features.width; height = features.height; m_hasAlpha = features.has_alpha; #else // Earlier version won't be able to display WebP files with alpha. if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); m_hasAlpha = false; #endif if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(m_hasAlpha); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_hasAlpha); if (!m_premultiplyAlpha) mode = outputMode(false); if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) mode = MODE_RGBA; // Decode to RGBA for input to libqcms. int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(mode, output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); buffer.setStatus(ImageFrame::FrameComplete); clear(); return true; case VP8_STATUS_SUSPENDED: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); return false; default: clear(); return setFailed(); } }
bool WEBPImageDecoder::updateDemuxer() { if (failed()) return false; if (m_haveAlreadyParsedThisData) return true; m_haveAlreadyParsedThisData = true; const unsigned webpHeaderSize = 20; if (m_data->size() < webpHeaderSize) return false; // Wait for headers so that WebPDemuxPartial doesn't return null. WebPDemuxDelete(m_demux); WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() }; m_demux = WebPDemuxPartial(&inputData, &m_demuxState); if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE)) return setFailed(); if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER) return false; // Not enough data for parsing canvas width/height yet. bool hasAnimation = (m_formatFlags & ANIMATION_FLAG); if (!ImageDecoder::isSizeAvailable()) { m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); hasAnimation = (m_formatFlags & ANIMATION_FLAG); if (!hasAnimation) m_repetitionCount = cAnimationNone; else m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images. #if USE(QCMSLIB) if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) m_hasColorProfile = true; #endif if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT))) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT); if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) { // As we have parsed at least one frame (even if partially), // we must already have parsed the animation properties. // This is because ANIM chunk always precedes ANMF chunks. m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits. // |m_repetitionCount| is the total number of animation cycles to show, // with 0 meaning "infinite". But ImageSource::repetitionCount() // returns -1 for "infinite", and 0 and up for "show the animation one // cycle more than this value". By subtracting one here, we convert // both finite and infinite cases correctly. --m_repetitionCount; m_haveReadAnimationParameters = true; } const size_t oldFrameCount = m_frameBufferCache.size(); if (newFrameCount > oldFrameCount) { m_frameBufferCache.resize(newFrameCount); for (size_t i = oldFrameCount; i < newFrameCount; ++i) { m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha); if (!hasAnimation) { ASSERT(!i); m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound); continue; } WebPIterator animatedFrame; WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame); ASSERT(animatedFrame.complete == 1); m_frameBufferCache[i].setDuration(animatedFrame.duration); m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep); m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor); IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height); // Make sure the frameRect doesn't extend outside the buffer. if (frameRect.maxX() > size().width()) frameRect.setWidth(size().width() - animatedFrame.x_offset); if (frameRect.maxY() > size().height()) frameRect.setHeight(size().height() - animatedFrame.y_offset); m_frameBufferCache[i].setOriginalFrameRect(frameRect); m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha)); WebPDemuxReleaseIterator(&animatedFrame); } } return true; }
bool WEBPImageDecoder::updateDemuxer() { if (failed()) return false; if (m_haveAlreadyParsedThisData) return true; m_haveAlreadyParsedThisData = true; const unsigned webpHeaderSize = 30; if (m_data->size() < webpHeaderSize) return false; // Await VP8X header so WebPDemuxPartial succeeds. WebPDemuxDelete(m_demux); m_consolidatedData = m_data->getAsSkData(); WebPData inputData = { reinterpret_cast<const uint8_t*>(m_consolidatedData->data()), m_consolidatedData->size()}; m_demux = WebPDemuxPartial(&inputData, &m_demuxState); if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE)) { if (!m_demux) m_consolidatedData.reset(); return setFailed(); } ASSERT(m_demuxState > WEBP_DEMUX_PARSING_HEADER); if (!WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT)) return false; // Wait until the encoded image frame data arrives. if (!isDecodedSizeAvailable()) { int width = WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH); int height = WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT); if (!setSize(width, height)) return setFailed(); m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); if (!(m_formatFlags & ANIMATION_FLAG)) { m_repetitionCount = cAnimationNone; } else { // Since we have parsed at least one frame, even if partially, // the global animation (ANIM) properties have been read since // an ANIM chunk must precede the ANMF frame chunks. m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); // Repetition count is always <= 16 bits. ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); if (!m_repetitionCount) m_repetitionCount = cAnimationLoopInfinite; // FIXME: Implement ICC profile support for animated images. m_formatFlags &= ~ICCP_FLAG; } if ((m_formatFlags & ICCP_FLAG) && !ignoresColorSpace()) readColorProfile(); } ASSERT(isDecodedSizeAvailable()); size_t frameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT); updateAggressivePurging(frameCount); return true; }