static int TWebPDecode(TIFF* tif, uint8* op, tmsize_t occ, uint16 s) { static const char module[] = "WebPDecode"; VP8StatusCode status = VP8_STATUS_OK; WebPState *sp = DecoderState(tif); (void) s; assert(sp != NULL); assert(sp->state == LSTATE_INIT_DECODE); if (occ % sp->sDecBuffer.u.RGBA.stride) { TIFFErrorExt(tif->tif_clientdata, module, "Fractional scanlines cannot be read"); return 0; } status = WebPIAppend(sp->psDecoder, tif->tif_rawcp, tif->tif_rawcc); if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) { if (status == VP8_STATUS_INVALID_PARAM) { TIFFErrorExt(tif->tif_clientdata, module, "Invalid parameter used."); } else if (status == VP8_STATUS_OUT_OF_MEMORY) { TIFFErrorExt(tif->tif_clientdata, module, "Out of memory."); } else { TIFFErrorExt(tif->tif_clientdata, module, "Unrecognized error."); } return 0; } else { int current_y, stride; uint8_t* buf; /* Returns the RGB/A image decoded so far */ buf = WebPIDecGetRGB(sp->psDecoder, ¤t_y, NULL, NULL, &stride); if ((buf != NULL) && (occ <= stride * (current_y - sp->last_y))) { memcpy(op, buf + (sp->last_y * stride), occ); tif->tif_rawcp += tif->tif_rawcc; tif->tif_rawcc = 0; sp->last_y += occ / sp->sDecBuffer.u.RGBA.stride; return 1; } else { TIFFErrorExt(tif->tif_clientdata, module, "Unable to decode WebP data."); return 0; } } }
static gsize decode_from_stream (GInputStream *stream, WebPIDecoder *decoder) { GError *error = NULL; const gsize size = IO_BUFFER_SIZE; guchar *buffer; gsize read, total = 0; VP8StatusCode status; gboolean success; buffer = g_try_new (guchar, size); g_assert (buffer != NULL); do { success = g_input_stream_read_all (G_INPUT_STREAM (stream), (void *) buffer, size, &read, NULL, &error); if (!success || error != NULL) { g_warning (error->message); g_error_free (error); return -1; } else if (read > 0) { total += read; status = WebPIAppend (decoder, buffer, read); if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) return -1; else if (status == VP8_STATUS_OK) break; } } while (success && read > 0); return total; }
static DFBResult WebP_decode_image( IDirectFBImageProvider_WebP_data *data, CoreSurfaceBufferLock *lock ) { VP8StatusCode status = VP8_STATUS_NOT_ENOUGH_DATA; DFBResult ret; uint32_t read_size; u8 image[data->image_size]; WebPIDecoder* WebP_dec; IDirectFBDataBuffer *buffer = data->base.buffer; WebP_dec = WebPINewDecoder( &data->config.output ); data->config.output.colorspace = (data->pixelformat == DSPF_ARGB) ? MODE_bgrA : MODE_BGR; data->config.output.u.RGBA.rgba = (uint8_t*)lock->addr; data->config.output.u.RGBA.stride = lock->pitch; data->config.output.u.RGBA.size = lock->pitch * data->height; data->config.output.is_external_memory = 1; ret = DFB_OK; while (ret != DFB_EOF && buffer->HasData( buffer ) == DFB_OK) { ret = buffer->GetData( buffer, data->image_size, image, &read_size ); status = WebPIAppend( WebP_dec, image, read_size ); if (!(status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED)) break; } WebPIDelete( WebP_dec ); return (status == VP8_STATUS_OK) ? DFB_OK : DFB_FAILURE; }
static void prepare (GeglOperation *operation) { GeglProperties *o = GEGL_PROPERTIES (operation); Priv *p = (o->user_data) ? o->user_data : g_new0 (Priv, 1); GError *error = NULL; GFile *file = NULL; guchar *buffer; gsize read; g_assert (p != NULL); if (p->file != NULL && (o->uri || o->path)) { if (o->uri && strlen (o->uri) > 0) file = g_file_new_for_uri (o->uri); else if (o->path && strlen (o->path) > 0) file = g_file_new_for_path (o->path); if (file != NULL) { if (!g_file_equal (p->file, file)) cleanup (operation); g_object_unref (file); } } o->user_data = (void*) p; if (p->config == NULL) { p->stream = gegl_gio_open_input_stream (o->uri, o->path, &p->file, &error); if (p->stream == NULL) { g_warning (error->message); g_error_free (error); cleanup (operation); return; } p->config = g_try_new (WebPDecoderConfig, 1); p->decoder = WebPINewDecoder (&p->config->output); g_assert (p->config != NULL); if (!WebPInitDecoderConfig (p->config)) { g_warning ("could not initialise WebP decoder configuration"); cleanup (operation); return; } read = read_from_stream (p->stream, &buffer, IO_BUFFER_SIZE); if (WebPGetFeatures (buffer, read, &p->config->input) != VP8_STATUS_OK) { g_warning ("failed reading WebP image file"); cleanup (operation); g_free (buffer); return; } if (!query_webp (operation)) { g_warning ("could not query WebP image file"); cleanup (operation); g_free (buffer); return; } WebPIAppend (p->decoder, buffer, read); g_free (buffer); } gegl_operation_set_format (operation, "output", p->format); }
static pixman_image_t * load_webp(FILE *fp) { WebPDecoderConfig config; uint8_t buffer[16 * 1024]; int len; VP8StatusCode status; WebPIDecoder *idec; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return NULL; } /* webp decoding api doesn't seem to specify a min size that's usable for GetFeatures, but 256 works... */ len = fread(buffer, 1, 256, fp); status = WebPGetFeatures(buffer, len, &config.input); if (status != VP8_STATUS_OK) { fprintf(stderr, "failed to parse webp header\n"); WebPFreeDecBuffer(&config.output); return NULL; } config.output.colorspace = MODE_BGRA; config.output.u.RGBA.stride = stride_for_width(config.input.width); config.output.u.RGBA.size = config.output.u.RGBA.stride * config.input.height; config.output.u.RGBA.rgba = malloc(config.output.u.RGBA.stride * config.input.height); config.output.is_external_memory = 1; if (!config.output.u.RGBA.rgba) { WebPFreeDecBuffer(&config.output); return NULL; } rewind(fp); idec = WebPINewDecoder(&config.output); if (!idec) { WebPFreeDecBuffer(&config.output); return NULL; } while (!feof(fp)) { len = fread(buffer, 1, sizeof buffer, fp); status = WebPIAppend(idec, buffer, len); if (status != VP8_STATUS_OK) { fprintf(stderr, "webp decode status %d\n", status); WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return NULL; } } WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return pixman_image_create_bits(PIXMAN_a8r8g8b8, config.input.width, config.input.height, (uint32_t *) config.output.u.RGBA.rgba, config.output.u.RGBA.stride); }
GthImage * _cairo_image_surface_create_from_webp (GInputStream *istream, GthFileData *file_data, int requested_size, int *original_width, int *original_height, gboolean *loaded_original, gpointer user_data, GCancellable *cancellable, GError **error) { GthImage *image; WebPDecoderConfig config; guchar *buffer; gssize bytes_read; int width, height; cairo_surface_t *surface; cairo_surface_metadata_t *metadata; WebPIDecoder *idec; image = gth_image_new (); if (! WebPInitDecoderConfig (&config)) return image; buffer = g_new (guchar, BUFFER_SIZE); bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error); if (WebPGetFeatures (buffer, bytes_read, &config.input) != VP8_STATUS_OK) { g_free (buffer); return image; } width = config.input.width; height = config.input.height; if (original_width != NULL) *original_width = width; if (original_height != NULL) *original_height = height; #if SCALING_WORKS if (requested_size > 0) scale_keeping_ratio (&width, &height, requested_size, requested_size, FALSE); #endif surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height); metadata = _cairo_image_surface_get_metadata (surface); _cairo_metadata_set_has_alpha (metadata, config.input.has_alpha); config.options.no_fancy_upsampling = 1; #if SCALING_WORKS if (requested_size > 0) { config.options.use_scaling = 1; config.options.scaled_width = width; config.options.scaled_height = height; } #endif #if G_BYTE_ORDER == G_LITTLE_ENDIAN config.output.colorspace = MODE_BGRA; #elif G_BYTE_ORDER == G_BIG_ENDIAN config.output.colorspace = MODE_ARGB; #endif config.output.u.RGBA.rgba = (uint8_t *) _cairo_image_surface_flush_and_get_data (surface); config.output.u.RGBA.stride = cairo_image_surface_get_stride (surface); config.output.u.RGBA.size = cairo_image_surface_get_stride (surface) * height; config.output.is_external_memory = 1; idec = WebPINewDecoder (&config.output); if (idec == NULL) { g_free (buffer); return image; } do { VP8StatusCode status = WebPIAppend (idec, buffer, bytes_read); if ((status != VP8_STATUS_OK) && (status != VP8_STATUS_SUSPENDED)) break; } while ((bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error)) > 0); cairo_surface_mark_dirty (surface); if (cairo_surface_status (surface) == CAIRO_STATUS_SUCCESS) gth_image_set_cairo_surface (image, surface); WebPIDelete (idec); WebPFreeDecBuffer (&config.output); g_free (buffer); return image; }
static int WEBPDecode(WEBPDec* pSrc, Vbitmap *vbitmap, YmagineFormatOptions *options) { int contentSize; int origWidth = 0; int origHeight = 0; int quality; unsigned char header[WEBP_HEADER_SIZE + 32]; int headerlen; int toRead; unsigned char *input = NULL; int inputlen; int oformat; int opitch; unsigned char *odata; int rc; Vrect srcrect; Vrect destrect; WebPIDecoder* idec; if (options == NULL) { /* Options argument is mandatory */ return 0; } headerlen = YchannelRead(pSrc->channel, (char *) header, sizeof(header)); if (headerlen < WEBP_HEADER_SIZE) { return 0; } /* Check WEBP header */ contentSize = WebpCheckHeader((const char*) header, headerlen); if (contentSize <= 0) { return 0; } if (WebPGetInfo(header, headerlen, &origWidth, &origHeight) == 0) { ALOGD("invalid VP8 header"); return 0; } if (origWidth <= 0 || origHeight <= 0) { return 0; } if (YmagineFormatOptions_invokeCallback(options, YMAGINE_IMAGEFORMAT_WEBP, origWidth, origHeight) != YMAGINE_OK) { return 0; } if (YmaginePrepareTransform(vbitmap, options, origWidth, origHeight, &srcrect, &destrect) != YMAGINE_OK) { return 0; } #if YMAGINE_DEBUG_WEBP ALOGD("size: %dx%d req: %dx%d %s -> output: %dx%d", origWidth, origHeight, destrect.width, destrect.height, (options->scalemode == YMAGINE_SCALE_CROP) ? "crop" : (options->scalemode == YMAGINE_SCALE_FIT ? "fit" : "letterbox"), destrect.width, destrect.height); #endif if (vbitmap != NULL) { if (options->resizable) { destrect.x = 0; destrect.y = 0; if (VbitmapResize(vbitmap, destrect.width, destrect.height) != YMAGINE_OK) { return 0; } } if (VbitmapType(vbitmap) == VBITMAP_NONE) { /* Decode bounds only, return positive number (number of lines) on success */ return VbitmapHeight(vbitmap); } } pSrc->bitmap = vbitmap; inputlen = contentSize; toRead = inputlen - headerlen; rc = VbitmapLock(vbitmap); if (rc != YMAGINE_OK) { ALOGE("VbitmapLock() failed (code %d)", rc); rc = YMAGINE_ERROR; } else { odata = VbitmapBuffer(vbitmap); opitch = VbitmapPitch(vbitmap); oformat = VbitmapColormode(vbitmap); pSrc->inwidth = origWidth; pSrc->inheight = origHeight; pSrc->outwidth = destrect.width; pSrc->outheight = destrect.height; if (odata == NULL) { ALOGD("failed to get reference to pixel buffer"); rc = YMAGINE_ERROR; } else { WebPDecoderConfig config; int supported = 1; int webpcolorspace; switch(oformat) { case VBITMAP_COLOR_RGBA: webpcolorspace = MODE_RGBA; break; case VBITMAP_COLOR_RGB: webpcolorspace = MODE_RGB; break; case VBITMAP_COLOR_rgbA: webpcolorspace = MODE_rgbA; break; case VBITMAP_COLOR_ARGB: webpcolorspace = MODE_ARGB; break; case VBITMAP_COLOR_Argb: webpcolorspace = MODE_Argb; break; case VBITMAP_COLOR_GRAYSCALE: case VBITMAP_COLOR_YUV: case VBITMAP_COLOR_CMYK: case VBITMAP_COLOR_YCbCr: default: supported = 0; break; } if (!supported) { ALOGD("currently only support RGB, RGBA webp decoding"); rc = YMAGINE_ERROR; } else { pSrc->isdirect = 1; pSrc->outformat = oformat; pSrc->outbpp = VbitmapBpp(vbitmap); pSrc->outstride = opitch; pSrc->outbuffer = odata + destrect.x * pSrc->outbpp + destrect.y * pSrc->outstride; WebPInitDecoderConfig(&config); quality = YmagineFormatOptions_normalizeQuality(options); if (quality < 90) { config.options.no_fancy_upsampling = 1; } if (quality < 60) { config.options.bypass_filtering = 1; } config.options.use_threads = 1; if (srcrect.x != 0 || srcrect.y != 0 || srcrect.width != origWidth || srcrect.height != origHeight) { /* Crop on source */ config.options.use_cropping = 1; config.options.crop_left = srcrect.x; config.options.crop_top = srcrect.y; config.options.crop_width = srcrect.width; config.options.crop_height = srcrect.height; } if (pSrc->outwidth != pSrc->inwidth || pSrc->outheight != pSrc->inheight) { config.options.use_scaling = 1; config.options.scaled_width = pSrc->outwidth; config.options.scaled_height = pSrc->outheight; } rc = YMAGINE_ERROR; // Specify the desired output colorspace: config.output.colorspace = webpcolorspace; // Have config.output point to an external buffer: config.output.u.RGBA.rgba = (uint8_t*) pSrc->outbuffer; config.output.u.RGBA.stride = pSrc->outstride; config.output.u.RGBA.size = pSrc->outstride * pSrc->outheight; config.output.is_external_memory = 1; idec = WebPIDecode(NULL, 0, &config); if (idec != NULL) { VP8StatusCode status; status = WebPIAppend(idec, header, headerlen); if (status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED) { int bytes_remaining = toRead; int bytes_read; int bytes_req; unsigned char rbuf[8192]; // See WebPIUpdate(idec, buffer, size_of_transmitted_buffer); bytes_req = sizeof(rbuf); while (bytes_remaining > 0) { if (bytes_req > bytes_remaining) { bytes_req = bytes_remaining; } bytes_read = YchannelRead(pSrc->channel, rbuf, bytes_req); if (bytes_read <= 0) { break; } status = WebPIAppend(idec, (uint8_t*) rbuf, bytes_read); if (status == VP8_STATUS_OK) { rc = YMAGINE_OK; break; } else if (status == VP8_STATUS_SUSPENDED) { if (bytes_remaining > 0) { bytes_remaining -= bytes_read; } } else { /* error */ break; } // The above call decodes the current available buffer. // Part of the image can now be refreshed by calling // WebPIDecGetRGB()/WebPIDecGetYUVA() etc. } } } // the object doesn't own the image memory, so it can now be deleted. WebPIDelete(idec); WebPFreeDecBuffer(&config.output); } } VbitmapUnlock(vbitmap); } if (input) { Ymem_free((char*) input); } if (!pSrc->isdirect) { Ymem_free(pSrc->outbuffer); } if (rc == YMAGINE_OK) { return origHeight; } return 0; }
void nsWEBPDecoder::WriteInternal(const char *aBuffer, uint32_t aCount) { MOZ_ASSERT(!HasError(), "Shouldn't call WriteInternal after error!"); const uint8_t* buf = (const uint8_t*)aBuffer; VP8StatusCode rv = WebPIAppend(mDecoder, buf, aCount); if (rv == VP8_STATUS_OUT_OF_MEMORY) { PostDecoderError(NS_ERROR_OUT_OF_MEMORY); return; } else if (rv == VP8_STATUS_INVALID_PARAM || rv == VP8_STATUS_BITSTREAM_ERROR) { PostDataError(); return; } else if (rv == VP8_STATUS_UNSUPPORTED_FEATURE || rv == VP8_STATUS_USER_ABORT) { PostDecoderError(NS_ERROR_FAILURE); return; } // Catch any remaining erroneous return value. if (rv != VP8_STATUS_OK && rv != VP8_STATUS_SUSPENDED) { PostDecoderError(NS_ERROR_FAILURE); return; } int lastLineRead = -1; int height = 0; int width = 0; int stride = 0; mData = WebPIDecGetRGB(mDecoder, &lastLineRead, &width, &height, &stride); // The only valid format for WebP decoding for both alpha and non-alpha // images is BGRA, where Opaque images have an A of 255. // Assume transparency for all images. // XXX: This could be compositor-optimized by doing a one-time check for // all-255 alpha pixels, but that might interfere with progressive // decoding. Probably not worth it? PostHasTransparency(); if (lastLineRead == -1 || !mData) return; if (width <= 0 || height <= 0) { PostDataError(); return; } if (!HasSize()) PostSize(width, height); if (IsSizeDecode()) return; if (!mImageData) { PostDecoderError(NS_ERROR_FAILURE); return; } // Transfer from mData to mImageData if (lastLineRead > mLastLine) { for (int line = mLastLine; line < lastLineRead; line++) { for (int pix = 0; pix < width; pix++) { // RGBA -> BGRA uint32_t DataOffset = 4 * (line * width + pix); mImageData[DataOffset+0] = mData[DataOffset+2]; mImageData[DataOffset+1] = mData[DataOffset+1]; mImageData[DataOffset+2] = mData[DataOffset+0]; mImageData[DataOffset+3] = mData[DataOffset+3]; } } // Invalidate nsIntRect r(0, mLastLine, width, lastLineRead); PostInvalidation(r); } mLastLine = lastLineRead; return; }
static gboolean gdk_pixbuf__webp_image_load_increment (gpointer context, const guchar *buf, guint size, GError **error) { gint w, h, stride; WebPContext *data = (WebPContext *) context; g_return_val_if_fail(data != NULL, FALSE); if (!data->got_header) { gint rc; rc = WebPGetInfo (buf, size, &w, &h); if (rc == 0) { g_set_error (error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_CORRUPT_IMAGE, "Cannot read WebP image header."); return FALSE; } stride = w * 3; /* TODO Update when alpha support released */ data->got_header = TRUE; if (data->size_func) { (* data->size_func) (&w, &h, data->user_data); } data->pixbuf = gdk_pixbuf_new (GDK_COLORSPACE_RGB, FALSE, 8, w, h); data->decbuf = g_try_malloc (h * stride); if (!data->decbuf) { g_set_error (error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_INSUFFICIENT_MEMORY, "Cannot allocate memory for decoded image data."); return FALSE; } data->idec = WebPINewRGB (MODE_RGB, data->decbuf, h * stride, stride); if (!data->idec) { g_set_error (error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_FAILED, "Cannot create WebP decoder."); return FALSE; } if (data->prepare_func) { (* data->prepare_func) (data->pixbuf, NULL, data->user_data); } } /* Append size bytes to decoder's buffer */ const VP8StatusCode status = WebPIAppend (data->idec, buf, size); if (status != VP8_STATUS_SUSPENDED && status != VP8_STATUS_OK) { g_set_error (error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_CORRUPT_IMAGE, "WebP decoder failed with status code %d.", status); return FALSE; } /* Decode decoder's updated buffer */ guint8 *dec_output; dec_output = WebPIDecGetRGB (data->idec, &data->last_y, &w, &h, &stride); if (dec_output == NULL && status != VP8_STATUS_SUSPENDED) { g_set_error(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_FAILED, "Bad inputs to WebP decoder."); return FALSE; } /* Copy decoder output to pixbuf */ gint y, row; guchar *dptr; dptr = gdk_pixbuf_get_pixels (data->pixbuf); const guint8 offset = w % 4; /* decoded width will be divisible by 4 */ for (y = 0; y < data->last_y; ++y, dptr += offset) { row = y * stride; g_memmove (dptr + row, dec_output + row, stride); } if (data->update_func) { (* data->update_func) (data->pixbuf, 0, 0, w, data->last_y, data->user_data); } return TRUE; }
void nsWEBPDecoder::WriteInternal(const char *aBuffer, uint32_t aCount, DecodeStrategy) { NS_ABORT_IF_FALSE(!HasError(), "Shouldn't call WriteInternal after error!"); const uint8_t* buf = (const uint8_t*)aBuffer; VP8StatusCode rv = WebPIAppend(mDecoder, buf, aCount); if (rv == VP8_STATUS_OUT_OF_MEMORY) { PostDecoderError(NS_ERROR_OUT_OF_MEMORY); return; } else if (rv == VP8_STATUS_INVALID_PARAM || rv == VP8_STATUS_BITSTREAM_ERROR) { PostDataError(); return; } else if (rv == VP8_STATUS_UNSUPPORTED_FEATURE || rv == VP8_STATUS_USER_ABORT) { PostDecoderError(NS_ERROR_FAILURE); return; } // Catch any remaining erroneous return value. if (rv != VP8_STATUS_OK && rv != VP8_STATUS_SUSPENDED) { PostDecoderError(NS_ERROR_FAILURE); return; } int lastLineRead = -1; int height = 0; int width = 0; int stride = 0; mData = WebPIDecGetRGB(mDecoder, &lastLineRead, &width, &height, &stride); if (lastLineRead == -1 || !mData) return; if (width <= 0 || height <= 0) { PostDataError(); return; } if (!HasSize()) PostSize(width, height); if (IsSizeDecode()) return; uint32_t imagelength; // First incremental Image data chunk. Special handling required. if (mLastLine == 0 && lastLineRead > 0) { imgFrame* aFrame; nsresult res = mImage.EnsureFrame(0, 0, 0, width, height, gfxASurface::ImageFormatARGB32, (uint8_t**)&mImageData, &imagelength, &aFrame); if (NS_FAILED(res) || !mImageData) { PostDecoderError(NS_ERROR_FAILURE); return; } } if (!mImageData) { PostDecoderError(NS_ERROR_FAILURE); return; } if (lastLineRead > mLastLine) { for (int line = mLastLine; line < lastLineRead; line++) { uint32_t *cptr32 = (uint32_t*)(mImageData + (line * width)); uint8_t *cptr8 = mData + (line * stride); for (int pix = 0; pix < width; pix++, cptr8 += 4) { // if((cptr8[3] != 0) && (cptr8[0] != 0) && (cptr8[1] != 0) && (cptr8[2] != 0)) *cptr32++ = gfxPackedPixel(cptr8[3], cptr8[0], cptr8[1], cptr8[2]); } } // Invalidate nsIntRect r(0, mLastLine, width, lastLineRead); PostInvalidation(r); } mLastLine = lastLineRead; return; }