Exemple #1
0
void WEBPImageDecoder::initializeNewFrame(size_t index) {
  if (!(m_formatFlags & ANIMATION_FLAG)) {
    ASSERT(!index);
    return;
  }
  WebPIterator animatedFrame;
  WebPDemuxGetFrame(m_demux, index + 1, &animatedFrame);
  ASSERT(animatedFrame.complete == 1);
  ImageFrame* buffer = &m_frameBufferCache[index];
  IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset,
                    animatedFrame.width, animatedFrame.height);
  buffer->setOriginalFrameRect(
      intersection(frameRect, IntRect(IntPoint(), size())));
  buffer->setDuration(animatedFrame.duration);
  buffer->setDisposalMethod(animatedFrame.dispose_method ==
                                    WEBP_MUX_DISPOSE_BACKGROUND
                                ? ImageFrame::DisposeOverwriteBgcolor
                                : ImageFrame::DisposeKeep);
  buffer->setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND
                                  ? ImageFrame::BlendAtopPreviousFrame
                                  : ImageFrame::BlendAtopBgcolor);
  buffer->setRequiredPreviousFrameIndex(
      findRequiredPreviousFrame(index, !animatedFrame.has_alpha));
  WebPDemuxReleaseIterator(&animatedFrame);
}
Exemple #2
0
void GIFImageDecoder::initializeNewFrame(size_t index)
{
    ImageFrame* buffer = &m_frameBufferCache[index];
    const GIFFrameContext* frameContext = m_reader->frameContext(index);
    buffer->setOriginalFrameRect(intersection(frameContext->frameRect(), IntRect(IntPoint(), size())));
    buffer->setDuration(frameContext->delayTime());
    buffer->setDisposalMethod(frameContext->disposalMethod());
    buffer->setRequiredPreviousFrameIndex(findRequiredPreviousFrame(index, false));
}
Exemple #3
0
void GIFImageDecoder::parse(GIFParseQuery query)
{
    if (failed())
        return;

    if (!m_reader) {
        m_reader = adoptPtr(new GIFImageReader(this));
        m_reader->setData(m_data);
    }

    if (!m_reader->parse(query)) {
        setFailed();
        return;
    }

    const size_t oldSize = m_frameBufferCache.size();
    m_frameBufferCache.resize(m_reader->imagesCount());

    for (size_t i = oldSize; i < m_reader->imagesCount(); ++i) {
        ImageFrame& buffer = m_frameBufferCache[i];
        const GIFFrameContext* frameContext = m_reader->frameContext(i);
        buffer.setPremultiplyAlpha(m_premultiplyAlpha);
        buffer.setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i));
        buffer.setDuration(frameContext->delayTime);
        buffer.setDisposalMethod(frameContext->disposalMethod);

        // Initialize the frame rect in our buffer.
        IntRect frameRect(frameContext->xOffset, frameContext->yOffset, frameContext->width, frameContext->height);

        // Make sure the frameRect doesn't extend outside the buffer.
        if (frameRect.maxX() > size().width())
            frameRect.setWidth(size().width() - frameContext->xOffset);
        if (frameRect.maxY() > size().height())
            frameRect.setHeight(size().height() - frameContext->yOffset);

        buffer.setOriginalFrameRect(frameRect);
    }
}
 void resetRequiredPreviousFrames(bool knownOpaque = false)
 {
     for (size_t i = 0; i < m_frameBufferCache.size(); ++i)
         m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, knownOpaque));
 }
Exemple #5
0
bool WEBPImageDecoder::updateDemuxer()
{
    if (failed())
        return false;

    if (m_haveAlreadyParsedThisData)
        return true;

    m_haveAlreadyParsedThisData = true;

    const unsigned webpHeaderSize = 20;
    if (m_data->size() < webpHeaderSize)
        return false; // Wait for headers so that WebPDemuxPartial doesn't return null.

    WebPDemuxDelete(m_demux);
    WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
    m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
    if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE))
        return setFailed();

    if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
        return false; // Not enough data for parsing canvas width/height yet.

    bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
    if (!ImageDecoder::isSizeAvailable()) {
        m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
        hasAnimation = (m_formatFlags & ANIMATION_FLAG);
        if (!hasAnimation)
            m_repetitionCount = cAnimationNone;
        else
            m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images.
#if USE(QCMSLIB)
        if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
            m_hasColorProfile = true;
#endif
        if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
            return setFailed();
    }

    ASSERT(ImageDecoder::isSizeAvailable());
    const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
    if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
        // As we have parsed at least one frame (even if partially),
        // we must already have parsed the animation properties.
        // This is because ANIM chunk always precedes ANMF chunks.
        m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
        ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
        // |m_repetitionCount| is the total number of animation cycles to show,
        // with 0 meaning "infinite". But ImageSource::repetitionCount()
        // returns -1 for "infinite", and 0 and up for "show the animation one
        // cycle more than this value". By subtracting one here, we convert
        // both finite and infinite cases correctly.
        --m_repetitionCount;
        m_haveReadAnimationParameters = true;
    }

    const size_t oldFrameCount = m_frameBufferCache.size();
    if (newFrameCount > oldFrameCount) {
        m_frameBufferCache.resize(newFrameCount);
        for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
            m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
            if (!hasAnimation) {
                ASSERT(!i);
                m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound);
                continue;
            }
            WebPIterator animatedFrame;
            WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
            ASSERT(animatedFrame.complete == 1);
            m_frameBufferCache[i].setDuration(animatedFrame.duration);
            m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep);
            m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor);
            IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height);
            // Make sure the frameRect doesn't extend outside the buffer.
            if (frameRect.maxX() > size().width())
                frameRect.setWidth(size().width() - animatedFrame.x_offset);
            if (frameRect.maxY() > size().height())
                frameRect.setHeight(size().height() - animatedFrame.y_offset);
            m_frameBufferCache[i].setOriginalFrameRect(frameRect);
            m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha));
            WebPDemuxReleaseIterator(&animatedFrame);
        }
    }

    return true;
}