bool WEBPImageDecoder::decodeSingleFrame(const uint8_t* dataBytes, size_t dataSize, size_t frameIndex) { if (failed()) return false; ASSERT(isDecodedSizeAvailable()); ASSERT(m_frameBufferCache.size() > frameIndex); ImageFrame& buffer = m_frameBufferCache[frameIndex]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); // The buffer is transparent outside the decoded area while the image is loading. // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded. buffer.setHasAlpha(true); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } const IntRect& frameRect = buffer.originalFrameRect(); if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); if (!m_premultiplyAlpha) mode = outputMode(false); #if USE(QCMSLIB) if (colorTransform()) mode = MODE_RGBA; // Decode to RGBA for input to libqcms. #endif WebPInitDecBuffer(&m_decoderBuffer); m_decoderBuffer.colorspace = mode; m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData); m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height(); m_decoderBuffer.is_external_memory = 1; m_decoder = WebPINewDecoder(&m_decoderBuffer); if (!m_decoder) return setFailed(); } m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y())); switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: applyPostProcessing(frameIndex); buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha); buffer.setStatus(ImageFrame::FrameComplete); clearDecoder(); return true; case VP8_STATUS_SUSPENDED: if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) { applyPostProcessing(frameIndex); return false; } // FALLTHROUGH default: clear(); return setFailed(); } }
/* * Setup state for decoding a strip. */ static int TWebPPreDecode(TIFF* tif, uint16 s) { static const char module[] = "TWebPPreDecode"; uint32 segment_width, segment_height; WebPState* sp = DecoderState(tif); TIFFDirectory* td = &tif->tif_dir; (void) s; assert(sp != NULL); if (isTiled(tif)) { segment_width = td->td_tilewidth; segment_height = td->td_tilelength; } else { segment_width = td->td_imagewidth; segment_height = td->td_imagelength - tif->tif_row; if (segment_height > td->td_rowsperstrip) segment_height = td->td_rowsperstrip; } if( (sp->state & LSTATE_INIT_DECODE) == 0 ) tif->tif_setupdecode(tif); if (sp->psDecoder != NULL) { WebPIDelete(sp->psDecoder); WebPFreeDecBuffer(&sp->sDecBuffer); sp->psDecoder = NULL; } sp->last_y = 0; WebPInitDecBuffer(&sp->sDecBuffer); sp->sDecBuffer.is_external_memory = 0; sp->sDecBuffer.width = segment_width; sp->sDecBuffer.height = segment_height; sp->sDecBuffer.u.RGBA.stride = segment_width * sp->nSamples; sp->sDecBuffer.u.RGBA.size = segment_width * sp->nSamples * segment_height; if (sp->nSamples > 3) { sp->sDecBuffer.colorspace = MODE_RGBA; } else { sp->sDecBuffer.colorspace = MODE_RGB; } sp->psDecoder = WebPINewDecoder(&sp->sDecBuffer); if (sp->psDecoder == NULL) { TIFFErrorExt(tif->tif_clientdata, module, "Unable to allocate WebP decoder."); return 0; } return 1; }
void nsWEBPDecoder::InitInternal() { if (!WebPInitDecBuffer(&mDecBuf)) { PostDecoderError(NS_ERROR_FAILURE); return; } mLastLine = 0; mDecBuf.colorspace = MODE_rgbA; mDecoder = WebPINewDecoder(&mDecBuf); if (!mDecoder) { PostDecoderError(NS_ERROR_FAILURE); } }
static DFBResult WebP_decode_image( IDirectFBImageProvider_WebP_data *data, CoreSurfaceBufferLock *lock ) { VP8StatusCode status = VP8_STATUS_NOT_ENOUGH_DATA; DFBResult ret; uint32_t read_size; u8 image[data->image_size]; WebPIDecoder* WebP_dec; IDirectFBDataBuffer *buffer = data->base.buffer; WebP_dec = WebPINewDecoder( &data->config.output ); data->config.output.colorspace = (data->pixelformat == DSPF_ARGB) ? MODE_bgrA : MODE_BGR; data->config.output.u.RGBA.rgba = (uint8_t*)lock->addr; data->config.output.u.RGBA.stride = lock->pitch; data->config.output.u.RGBA.size = lock->pitch * data->height; data->config.output.is_external_memory = 1; ret = DFB_OK; while (ret != DFB_EOF && buffer->HasData( buffer ) == DFB_OK) { ret = buffer->GetData( buffer, data->image_size, image, &read_size ); status = WebPIAppend( WebP_dec, image, read_size ); if (!(status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED)) break; } WebPIDelete( WebP_dec ); return (status == VP8_STATUS_OK) ? DFB_OK : DFB_FAILURE; }
static void prepare (GeglOperation *operation) { GeglProperties *o = GEGL_PROPERTIES (operation); Priv *p = (o->user_data) ? o->user_data : g_new0 (Priv, 1); GError *error = NULL; GFile *file = NULL; guchar *buffer; gsize read; g_assert (p != NULL); if (p->file != NULL && (o->uri || o->path)) { if (o->uri && strlen (o->uri) > 0) file = g_file_new_for_uri (o->uri); else if (o->path && strlen (o->path) > 0) file = g_file_new_for_path (o->path); if (file != NULL) { if (!g_file_equal (p->file, file)) cleanup (operation); g_object_unref (file); } } o->user_data = (void*) p; if (p->config == NULL) { p->stream = gegl_gio_open_input_stream (o->uri, o->path, &p->file, &error); if (p->stream == NULL) { g_warning (error->message); g_error_free (error); cleanup (operation); return; } p->config = g_try_new (WebPDecoderConfig, 1); p->decoder = WebPINewDecoder (&p->config->output); g_assert (p->config != NULL); if (!WebPInitDecoderConfig (p->config)) { g_warning ("could not initialise WebP decoder configuration"); cleanup (operation); return; } read = read_from_stream (p->stream, &buffer, IO_BUFFER_SIZE); if (WebPGetFeatures (buffer, read, &p->config->input) != VP8_STATUS_OK) { g_warning ("failed reading WebP image file"); cleanup (operation); g_free (buffer); return; } if (!query_webp (operation)) { g_warning ("could not query WebP image file"); cleanup (operation); g_free (buffer); return; } WebPIAppend (p->decoder, buffer, read); g_free (buffer); } gegl_operation_set_format (operation, "output", p->format); }
static pixman_image_t * load_webp(FILE *fp) { WebPDecoderConfig config; uint8_t buffer[16 * 1024]; int len; VP8StatusCode status; WebPIDecoder *idec; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return NULL; } /* webp decoding api doesn't seem to specify a min size that's usable for GetFeatures, but 256 works... */ len = fread(buffer, 1, 256, fp); status = WebPGetFeatures(buffer, len, &config.input); if (status != VP8_STATUS_OK) { fprintf(stderr, "failed to parse webp header\n"); WebPFreeDecBuffer(&config.output); return NULL; } config.output.colorspace = MODE_BGRA; config.output.u.RGBA.stride = stride_for_width(config.input.width); config.output.u.RGBA.size = config.output.u.RGBA.stride * config.input.height; config.output.u.RGBA.rgba = malloc(config.output.u.RGBA.stride * config.input.height); config.output.is_external_memory = 1; if (!config.output.u.RGBA.rgba) { WebPFreeDecBuffer(&config.output); return NULL; } rewind(fp); idec = WebPINewDecoder(&config.output); if (!idec) { WebPFreeDecBuffer(&config.output); return NULL; } while (!feof(fp)) { len = fread(buffer, 1, sizeof buffer, fp); status = WebPIAppend(idec, buffer, len); if (status != VP8_STATUS_OK) { fprintf(stderr, "webp decode status %d\n", status); WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return NULL; } } WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return pixman_image_create_bits(PIXMAN_a8r8g8b8, config.input.width, config.input.height, (uint32_t *) config.output.u.RGBA.rgba, config.output.u.RGBA.stride); }
GthImage * _cairo_image_surface_create_from_webp (GInputStream *istream, GthFileData *file_data, int requested_size, int *original_width, int *original_height, gboolean *loaded_original, gpointer user_data, GCancellable *cancellable, GError **error) { GthImage *image; WebPDecoderConfig config; guchar *buffer; gssize bytes_read; int width, height; cairo_surface_t *surface; cairo_surface_metadata_t *metadata; WebPIDecoder *idec; image = gth_image_new (); if (! WebPInitDecoderConfig (&config)) return image; buffer = g_new (guchar, BUFFER_SIZE); bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error); if (WebPGetFeatures (buffer, bytes_read, &config.input) != VP8_STATUS_OK) { g_free (buffer); return image; } width = config.input.width; height = config.input.height; if (original_width != NULL) *original_width = width; if (original_height != NULL) *original_height = height; #if SCALING_WORKS if (requested_size > 0) scale_keeping_ratio (&width, &height, requested_size, requested_size, FALSE); #endif surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height); metadata = _cairo_image_surface_get_metadata (surface); _cairo_metadata_set_has_alpha (metadata, config.input.has_alpha); config.options.no_fancy_upsampling = 1; #if SCALING_WORKS if (requested_size > 0) { config.options.use_scaling = 1; config.options.scaled_width = width; config.options.scaled_height = height; } #endif #if G_BYTE_ORDER == G_LITTLE_ENDIAN config.output.colorspace = MODE_BGRA; #elif G_BYTE_ORDER == G_BIG_ENDIAN config.output.colorspace = MODE_ARGB; #endif config.output.u.RGBA.rgba = (uint8_t *) _cairo_image_surface_flush_and_get_data (surface); config.output.u.RGBA.stride = cairo_image_surface_get_stride (surface); config.output.u.RGBA.size = cairo_image_surface_get_stride (surface) * height; config.output.is_external_memory = 1; idec = WebPINewDecoder (&config.output); if (idec == NULL) { g_free (buffer); return image; } do { VP8StatusCode status = WebPIAppend (idec, buffer, bytes_read); if ((status != VP8_STATUS_OK) && (status != VP8_STATUS_SUSPENDED)) break; } while ((bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error)) > 0); cairo_surface_mark_dirty (surface); if (cairo_surface_status (surface) == CAIRO_STATUS_SUCCESS) gth_image_set_cairo_surface (image, surface); WebPIDelete (idec); WebPFreeDecBuffer (&config.output); g_free (buffer); return image; }
bool WEBPImageDecoder::decodeSingleFrame(const uint8_t* dataBytes, size_t dataSize, size_t frameIndex) { if (failed()) return false; ASSERT(isDecodedSizeAvailable()); ASSERT(m_frameBufferCache.size() > frameIndex); ImageFrame& buffer = m_frameBufferCache[frameIndex]; ASSERT(buffer.getStatus() != ImageFrame::FrameComplete); if (buffer.getStatus() == ImageFrame::FrameEmpty) { if (!buffer.setSizeAndColorSpace(size().width(), size().height(), colorSpace())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); // The buffer is transparent outside the decoded area while the image is // loading. The correct alpha value for the frame will be set when it is // fully decoded. buffer.setHasAlpha(true); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } const IntRect& frameRect = buffer.originalFrameRect(); if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); if (!m_premultiplyAlpha) mode = outputMode(false); if (colorTransform()) { // Swizzling between RGBA and BGRA is zero cost in a color transform. // So when we have a color transform, we should decode to whatever is // easiest for libwebp, and then let the color transform swizzle if // necessary. // Lossy webp is encoded as YUV (so RGBA and BGRA are the same cost). // Lossless webp is encoded as BGRA. This means decoding to BGRA is // either faster or the same cost as RGBA. mode = MODE_BGRA; } WebPInitDecBuffer(&m_decoderBuffer); m_decoderBuffer.colorspace = mode; m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData); m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height(); m_decoderBuffer.is_external_memory = 1; m_decoder = WebPINewDecoder(&m_decoderBuffer); if (!m_decoder) return setFailed(); } m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y())); switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: applyPostProcessing(frameIndex); buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha); buffer.setStatus(ImageFrame::FrameComplete); clearDecoder(); return true; case VP8_STATUS_SUSPENDED: if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) { applyPostProcessing(frameIndex); return false; } // FALLTHROUGH default: clear(); return setFailed(); } }