static void cleanup(GeglOperation *operation) { GeglProperties *o = GEGL_PROPERTIES (operation); Priv *p = (Priv*) o->user_data; if (p != NULL) { if (p->decoder != NULL) WebPIDelete (p->decoder); p->decoder = NULL; if (p->config != NULL) WebPFreeDecBuffer (&p->config->output); if (p->config != NULL) g_free (p->config); p->config = NULL; if (p->stream != NULL) g_input_stream_close (G_INPUT_STREAM (p->stream), NULL, NULL); if (p->stream != NULL) g_clear_object (&p->stream); if (p->file != NULL) g_clear_object (&p->file); p->width = p->height = 0; p->format = NULL; } }
static void TWebPCleanup(TIFF* tif) { WebPState* sp = LState(tif); assert(sp != 0); tif->tif_tagmethods.vgetfield = sp->vgetparent; tif->tif_tagmethods.vsetfield = sp->vsetparent; if (sp->state & LSTATE_INIT_ENCODE) { WebPPictureFree(&sp->sPicture); } if (sp->psDecoder != NULL) { WebPIDelete(sp->psDecoder); WebPFreeDecBuffer(&sp->sDecBuffer); sp->psDecoder = NULL; sp->last_y = 0; } if (sp->pBuffer != NULL) { _TIFFfree(sp->pBuffer); sp->pBuffer = NULL; } _TIFFfree(tif->tif_data); tif->tif_data = NULL; _TIFFSetDefaultCompressionState(tif); }
void WEBPImageDecoder::clearDecoder() { WebPIDelete(m_decoder); m_decoder = 0; m_decodedHeight = 0; m_frameBackgroundHasAlpha = false; }
static gboolean process (GeglOperation *operation, GeglBuffer *output, const GeglRectangle *result, gint level) { GeglProperties *o = GEGL_PROPERTIES (operation); Priv *p = (Priv*) o->user_data; if (p->config != NULL) { if (p->decoder != NULL) { if (decode_from_stream (p->stream, p->decoder) < 0) { g_warning ("failed decoding WebP image file"); cleanup (operation); return FALSE; } g_input_stream_close (G_INPUT_STREAM (p->stream), NULL, NULL); g_clear_object (&p->stream); WebPIDelete (p->decoder); p->decoder = NULL; } gegl_buffer_set (output, result, 0, p->format, p->config->output.u.RGBA.rgba, p->config->output.u.RGBA.stride); } return FALSE; }
/* * Setup state for decoding a strip. */ static int TWebPPreDecode(TIFF* tif, uint16 s) { static const char module[] = "TWebPPreDecode"; uint32 segment_width, segment_height; WebPState* sp = DecoderState(tif); TIFFDirectory* td = &tif->tif_dir; (void) s; assert(sp != NULL); if (isTiled(tif)) { segment_width = td->td_tilewidth; segment_height = td->td_tilelength; } else { segment_width = td->td_imagewidth; segment_height = td->td_imagelength - tif->tif_row; if (segment_height > td->td_rowsperstrip) segment_height = td->td_rowsperstrip; } if( (sp->state & LSTATE_INIT_DECODE) == 0 ) tif->tif_setupdecode(tif); if (sp->psDecoder != NULL) { WebPIDelete(sp->psDecoder); WebPFreeDecBuffer(&sp->sDecBuffer); sp->psDecoder = NULL; } sp->last_y = 0; WebPInitDecBuffer(&sp->sDecBuffer); sp->sDecBuffer.is_external_memory = 0; sp->sDecBuffer.width = segment_width; sp->sDecBuffer.height = segment_height; sp->sDecBuffer.u.RGBA.stride = segment_width * sp->nSamples; sp->sDecBuffer.u.RGBA.size = segment_width * sp->nSamples * segment_height; if (sp->nSamples > 3) { sp->sDecBuffer.colorspace = MODE_RGBA; } else { sp->sDecBuffer.colorspace = MODE_RGB; } sp->psDecoder = WebPINewDecoder(&sp->sDecBuffer); if (sp->psDecoder == NULL) { TIFFErrorExt(tif->tif_clientdata, module, "Unable to allocate WebP decoder."); return 0; } return 1; }
void WEBPImageDecoder::clear() { #ifdef QCMS_WEBP_COLOR_CORRECTION if (m_transform) qcms_transform_release(m_transform); m_transform = 0; #endif if (m_decoder) WebPIDelete(m_decoder); m_decoder = 0; }
static gboolean gdk_pixbuf__webp_image_stop_load (gpointer context, GError **error) { WebPContext *data = (WebPContext *) context; g_return_val_if_fail(data != NULL, TRUE); if (data->pixbuf) { g_object_unref (data->pixbuf); } if (data->idec) { WebPIDelete (data->idec); } if (data->decbuf) { g_free (data->decbuf); } return TRUE; }
void nsWEBPDecoder::FinishInternal() { // Flush the Decoder and let it free the output image buffer. WebPIDelete(mDecoder); WebPFreeDecBuffer(&mDecBuf); // We should never make multiple frames MOZ_ASSERT(GetFrameCount() <= 1, "Multiple WebP frames?"); // Send notifications if appropriate if (!IsSizeDecode() && (GetFrameCount() == 1)) { PostFrameStop(); PostDecodeDone(); } }
VP8StatusCode DecodeWebPIncremental( const uint8_t* const data, size_t data_size, int verbose, WebPDecoderConfig* const config) { Stopwatch stop_watch; VP8StatusCode status = VP8_STATUS_OK; if (config == NULL) return VP8_STATUS_INVALID_PARAM; PrintAnimationWarning(config); StopwatchReset(&stop_watch); // Decoding call. { WebPIDecoder* const idec = WebPIDecode(data, data_size, config); if (idec == NULL) { fprintf(stderr, "Failed during WebPINewDecoder().\n"); return VP8_STATUS_OUT_OF_MEMORY; } else { #ifdef WEBP_EXPERIMENTAL_FEATURES size_t size = 0; const size_t incr = 2 + (data_size / 20); while (size < data_size) { size_t next_size = size + (rand() % incr); if (next_size > data_size) next_size = data_size; status = WebPIUpdate(idec, data, next_size); if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) break; size = next_size; } #else status = WebPIUpdate(idec, data, data_size); #endif WebPIDelete(idec); } } if (verbose) { const double decode_time = StopwatchReadAndReset(&stop_watch); fprintf(stderr, "Time to decode picture: %.3fs\n", decode_time); } return status; }
static DFBResult WebP_decode_image( IDirectFBImageProvider_WebP_data *data, CoreSurfaceBufferLock *lock ) { VP8StatusCode status = VP8_STATUS_NOT_ENOUGH_DATA; DFBResult ret; uint32_t read_size; u8 image[data->image_size]; WebPIDecoder* WebP_dec; IDirectFBDataBuffer *buffer = data->base.buffer; WebP_dec = WebPINewDecoder( &data->config.output ); data->config.output.colorspace = (data->pixelformat == DSPF_ARGB) ? MODE_bgrA : MODE_BGR; data->config.output.u.RGBA.rgba = (uint8_t*)lock->addr; data->config.output.u.RGBA.stride = lock->pitch; data->config.output.u.RGBA.size = lock->pitch * data->height; data->config.output.is_external_memory = 1; ret = DFB_OK; while (ret != DFB_EOF && buffer->HasData( buffer ) == DFB_OK) { ret = buffer->GetData( buffer, data->image_size, image, &read_size ); status = WebPIAppend( WebP_dec, image, read_size ); if (!(status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED)) break; } WebPIDelete( WebP_dec ); return (status == VP8_STATUS_OK) ? DFB_OK : DFB_FAILURE; }
bool WEBPImageDecoder::decode(bool onlySize) { #if PLATFORM(CHROMIUM) TRACE_EVENT("WEBPImageDecoder::decode", this, 0); #endif if (failed()) return false; const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(false); // FIXME: webp does not support alpha yet. buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(outputMode(), output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: buffer.setStatus(ImageFrame::FrameComplete); WebPIDelete(m_decoder); m_decoder = 0; return true; case VP8_STATUS_SUSPENDED: return false; default: WebPIDelete(m_decoder); m_decoder = 0; return setFailed(); } }
static int TWebPSetupEncode(TIFF* tif) { static const char module[] = "WebPSetupEncode"; uint16 nBitsPerSample = tif->tif_dir.td_bitspersample; uint16 sampleFormat = tif->tif_dir.td_sampleformat; WebPState* sp = EncoderState(tif); assert(sp != NULL); sp->nSamples = tif->tif_dir.td_samplesperpixel; /* check band count */ if ( sp->nSamples != 3 #if WEBP_ENCODER_ABI_VERSION >= 0x0100 && sp->nSamples != 4 #endif ) { TIFFErrorExt(tif->tif_clientdata, module, "WEBP driver doesn't support %d bands. Must be 3 (RGB) " #if WEBP_ENCODER_ABI_VERSION >= 0x0100 "or 4 (RGBA) " #endif "bands.", sp->nSamples ); return 0; } /* check bits per sample and data type */ if ((nBitsPerSample != 8) && (sampleFormat != 1)) { TIFFErrorExt(tif->tif_clientdata, module, "WEBP driver requires 8 bit unsigned data"); return 0; } if (sp->state & LSTATE_INIT_DECODE) { WebPIDelete(sp->psDecoder); WebPFreeDecBuffer(&sp->sDecBuffer); sp->psDecoder = NULL; sp->last_y = 0; sp->state = 0; } sp->state |= LSTATE_INIT_ENCODE; if (!WebPPictureInit(&sp->sPicture)) { TIFFErrorExt(tif->tif_clientdata, module, "Error initializing WebP picture."); return 0; } if (!WebPConfigInitInternal(&sp->sEncoderConfig, WEBP_PRESET_DEFAULT, sp->quality_level, WEBP_ENCODER_ABI_VERSION)) { TIFFErrorExt(tif->tif_clientdata, module, "Error creating WebP encoder configuration."); return 0; } // WebPConfigInitInternal above sets lossless to false #if WEBP_ENCODER_ABI_VERSION >= 0x0100 sp->sEncoderConfig.lossless = sp->lossless; if (sp->lossless) { sp->sPicture.use_argb = 1; } #endif if (!WebPValidateConfig(&sp->sEncoderConfig)) { TIFFErrorExt(tif->tif_clientdata, module, "Error with WebP encoder configuration."); return 0; } return 1; }
bool WEBPImageDecoder::decode(bool onlySize) { if (failed()) return false; #if defined(__LB_SHELL__) // We dont want progressive decoding. if (!isAllDataReceived()) return false; #endif const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; #if (WEBP_DECODER_ABI_VERSION >= 0x0163) WebPBitstreamFeatures features; if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) return setFailed(); width = features.width; height = features.height; m_hasAlpha = features.has_alpha; #else // Earlier version won't be able to display WebP files with alpha. if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); m_hasAlpha = false; #endif if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(m_hasAlpha); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(outputMode(m_hasAlpha), output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: buffer.setStatus(ImageFrame::FrameComplete); WebPIDelete(m_decoder); m_decoder = 0; return true; case VP8_STATUS_SUSPENDED: return false; default: WebPIDelete(m_decoder); m_decoder = 0; return setFailed(); } }
WEBPImageDecoder::~WEBPImageDecoder() { if (m_decoder) WebPIDelete(m_decoder); m_decoder = 0; }
static pixman_image_t * load_webp(FILE *fp) { WebPDecoderConfig config; uint8_t buffer[16 * 1024]; int len; VP8StatusCode status; WebPIDecoder *idec; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return NULL; } /* webp decoding api doesn't seem to specify a min size that's usable for GetFeatures, but 256 works... */ len = fread(buffer, 1, 256, fp); status = WebPGetFeatures(buffer, len, &config.input); if (status != VP8_STATUS_OK) { fprintf(stderr, "failed to parse webp header\n"); WebPFreeDecBuffer(&config.output); return NULL; } config.output.colorspace = MODE_BGRA; config.output.u.RGBA.stride = stride_for_width(config.input.width); config.output.u.RGBA.size = config.output.u.RGBA.stride * config.input.height; config.output.u.RGBA.rgba = malloc(config.output.u.RGBA.stride * config.input.height); config.output.is_external_memory = 1; if (!config.output.u.RGBA.rgba) { WebPFreeDecBuffer(&config.output); return NULL; } rewind(fp); idec = WebPINewDecoder(&config.output); if (!idec) { WebPFreeDecBuffer(&config.output); return NULL; } while (!feof(fp)) { len = fread(buffer, 1, sizeof buffer, fp); status = WebPIAppend(idec, buffer, len); if (status != VP8_STATUS_OK) { fprintf(stderr, "webp decode status %d\n", status); WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return NULL; } } WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return pixman_image_create_bits(PIXMAN_a8r8g8b8, config.input.width, config.input.height, (uint32_t *) config.output.u.RGBA.rgba, config.output.u.RGBA.stride); }
GthImage * _cairo_image_surface_create_from_webp (GInputStream *istream, GthFileData *file_data, int requested_size, int *original_width, int *original_height, gboolean *loaded_original, gpointer user_data, GCancellable *cancellable, GError **error) { GthImage *image; WebPDecoderConfig config; guchar *buffer; gssize bytes_read; int width, height; cairo_surface_t *surface; cairo_surface_metadata_t *metadata; WebPIDecoder *idec; image = gth_image_new (); if (! WebPInitDecoderConfig (&config)) return image; buffer = g_new (guchar, BUFFER_SIZE); bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error); if (WebPGetFeatures (buffer, bytes_read, &config.input) != VP8_STATUS_OK) { g_free (buffer); return image; } width = config.input.width; height = config.input.height; if (original_width != NULL) *original_width = width; if (original_height != NULL) *original_height = height; #if SCALING_WORKS if (requested_size > 0) scale_keeping_ratio (&width, &height, requested_size, requested_size, FALSE); #endif surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height); metadata = _cairo_image_surface_get_metadata (surface); _cairo_metadata_set_has_alpha (metadata, config.input.has_alpha); config.options.no_fancy_upsampling = 1; #if SCALING_WORKS if (requested_size > 0) { config.options.use_scaling = 1; config.options.scaled_width = width; config.options.scaled_height = height; } #endif #if G_BYTE_ORDER == G_LITTLE_ENDIAN config.output.colorspace = MODE_BGRA; #elif G_BYTE_ORDER == G_BIG_ENDIAN config.output.colorspace = MODE_ARGB; #endif config.output.u.RGBA.rgba = (uint8_t *) _cairo_image_surface_flush_and_get_data (surface); config.output.u.RGBA.stride = cairo_image_surface_get_stride (surface); config.output.u.RGBA.size = cairo_image_surface_get_stride (surface) * height; config.output.is_external_memory = 1; idec = WebPINewDecoder (&config.output); if (idec == NULL) { g_free (buffer); return image; } do { VP8StatusCode status = WebPIAppend (idec, buffer, bytes_read); if ((status != VP8_STATUS_OK) && (status != VP8_STATUS_SUSPENDED)) break; } while ((bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error)) > 0); cairo_surface_mark_dirty (surface); if (cairo_surface_status (surface) == CAIRO_STATUS_SUCCESS) gth_image_set_cairo_surface (image, surface); WebPIDelete (idec); WebPFreeDecBuffer (&config.output); g_free (buffer); return image; }
static int WEBPDecode(WEBPDec* pSrc, Vbitmap *vbitmap, YmagineFormatOptions *options) { int contentSize; int origWidth = 0; int origHeight = 0; int quality; unsigned char header[WEBP_HEADER_SIZE + 32]; int headerlen; int toRead; unsigned char *input = NULL; int inputlen; int oformat; int opitch; unsigned char *odata; int rc; Vrect srcrect; Vrect destrect; WebPIDecoder* idec; if (options == NULL) { /* Options argument is mandatory */ return 0; } headerlen = YchannelRead(pSrc->channel, (char *) header, sizeof(header)); if (headerlen < WEBP_HEADER_SIZE) { return 0; } /* Check WEBP header */ contentSize = WebpCheckHeader((const char*) header, headerlen); if (contentSize <= 0) { return 0; } if (WebPGetInfo(header, headerlen, &origWidth, &origHeight) == 0) { ALOGD("invalid VP8 header"); return 0; } if (origWidth <= 0 || origHeight <= 0) { return 0; } if (YmagineFormatOptions_invokeCallback(options, YMAGINE_IMAGEFORMAT_WEBP, origWidth, origHeight) != YMAGINE_OK) { return 0; } if (YmaginePrepareTransform(vbitmap, options, origWidth, origHeight, &srcrect, &destrect) != YMAGINE_OK) { return 0; } #if YMAGINE_DEBUG_WEBP ALOGD("size: %dx%d req: %dx%d %s -> output: %dx%d", origWidth, origHeight, destrect.width, destrect.height, (options->scalemode == YMAGINE_SCALE_CROP) ? "crop" : (options->scalemode == YMAGINE_SCALE_FIT ? "fit" : "letterbox"), destrect.width, destrect.height); #endif if (vbitmap != NULL) { if (options->resizable) { destrect.x = 0; destrect.y = 0; if (VbitmapResize(vbitmap, destrect.width, destrect.height) != YMAGINE_OK) { return 0; } } if (VbitmapType(vbitmap) == VBITMAP_NONE) { /* Decode bounds only, return positive number (number of lines) on success */ return VbitmapHeight(vbitmap); } } pSrc->bitmap = vbitmap; inputlen = contentSize; toRead = inputlen - headerlen; rc = VbitmapLock(vbitmap); if (rc != YMAGINE_OK) { ALOGE("VbitmapLock() failed (code %d)", rc); rc = YMAGINE_ERROR; } else { odata = VbitmapBuffer(vbitmap); opitch = VbitmapPitch(vbitmap); oformat = VbitmapColormode(vbitmap); pSrc->inwidth = origWidth; pSrc->inheight = origHeight; pSrc->outwidth = destrect.width; pSrc->outheight = destrect.height; if (odata == NULL) { ALOGD("failed to get reference to pixel buffer"); rc = YMAGINE_ERROR; } else { WebPDecoderConfig config; int supported = 1; int webpcolorspace; switch(oformat) { case VBITMAP_COLOR_RGBA: webpcolorspace = MODE_RGBA; break; case VBITMAP_COLOR_RGB: webpcolorspace = MODE_RGB; break; case VBITMAP_COLOR_rgbA: webpcolorspace = MODE_rgbA; break; case VBITMAP_COLOR_ARGB: webpcolorspace = MODE_ARGB; break; case VBITMAP_COLOR_Argb: webpcolorspace = MODE_Argb; break; case VBITMAP_COLOR_GRAYSCALE: case VBITMAP_COLOR_YUV: case VBITMAP_COLOR_CMYK: case VBITMAP_COLOR_YCbCr: default: supported = 0; break; } if (!supported) { ALOGD("currently only support RGB, RGBA webp decoding"); rc = YMAGINE_ERROR; } else { pSrc->isdirect = 1; pSrc->outformat = oformat; pSrc->outbpp = VbitmapBpp(vbitmap); pSrc->outstride = opitch; pSrc->outbuffer = odata + destrect.x * pSrc->outbpp + destrect.y * pSrc->outstride; WebPInitDecoderConfig(&config); quality = YmagineFormatOptions_normalizeQuality(options); if (quality < 90) { config.options.no_fancy_upsampling = 1; } if (quality < 60) { config.options.bypass_filtering = 1; } config.options.use_threads = 1; if (srcrect.x != 0 || srcrect.y != 0 || srcrect.width != origWidth || srcrect.height != origHeight) { /* Crop on source */ config.options.use_cropping = 1; config.options.crop_left = srcrect.x; config.options.crop_top = srcrect.y; config.options.crop_width = srcrect.width; config.options.crop_height = srcrect.height; } if (pSrc->outwidth != pSrc->inwidth || pSrc->outheight != pSrc->inheight) { config.options.use_scaling = 1; config.options.scaled_width = pSrc->outwidth; config.options.scaled_height = pSrc->outheight; } rc = YMAGINE_ERROR; // Specify the desired output colorspace: config.output.colorspace = webpcolorspace; // Have config.output point to an external buffer: config.output.u.RGBA.rgba = (uint8_t*) pSrc->outbuffer; config.output.u.RGBA.stride = pSrc->outstride; config.output.u.RGBA.size = pSrc->outstride * pSrc->outheight; config.output.is_external_memory = 1; idec = WebPIDecode(NULL, 0, &config); if (idec != NULL) { VP8StatusCode status; status = WebPIAppend(idec, header, headerlen); if (status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED) { int bytes_remaining = toRead; int bytes_read; int bytes_req; unsigned char rbuf[8192]; // See WebPIUpdate(idec, buffer, size_of_transmitted_buffer); bytes_req = sizeof(rbuf); while (bytes_remaining > 0) { if (bytes_req > bytes_remaining) { bytes_req = bytes_remaining; } bytes_read = YchannelRead(pSrc->channel, rbuf, bytes_req); if (bytes_read <= 0) { break; } status = WebPIAppend(idec, (uint8_t*) rbuf, bytes_read); if (status == VP8_STATUS_OK) { rc = YMAGINE_OK; break; } else if (status == VP8_STATUS_SUSPENDED) { if (bytes_remaining > 0) { bytes_remaining -= bytes_read; } } else { /* error */ break; } // The above call decodes the current available buffer. // Part of the image can now be refreshed by calling // WebPIDecGetRGB()/WebPIDecGetYUVA() etc. } } } // the object doesn't own the image memory, so it can now be deleted. WebPIDelete(idec); WebPFreeDecBuffer(&config.output); } } VbitmapUnlock(vbitmap); } if (input) { Ymem_free((char*) input); } if (!pSrc->isdirect) { Ymem_free(pSrc->outbuffer); } if (rc == YMAGINE_OK) { return origHeight; } return 0; }