void webp_reader<T>::read(unsigned x0, unsigned y0,image_data_32& image) { WebPDecoderConfig config; config_guard guard(config); if (!WebPInitDecoderConfig(&config)) { throw image_reader_exception("WEBP reader: WebPInitDecoderConfig failed"); } config.options.use_cropping = 1; config.options.crop_left = x0; config.options.crop_top = y0; config.options.crop_width = std::min(width_ - x0, image.width()); config.options.crop_height = std::min(height_ - y0, image.height()); if (WebPGetFeatures(buffer_->data(), buffer_->size(), &config.input) != VP8_STATUS_OK) { throw image_reader_exception("WEBP reader: WebPGetFeatures failed"); } config.output.colorspace = MODE_RGBA; config.output.u.RGBA.rgba = (uint8_t *)image.getBytes(); config.output.u.RGBA.stride = 4 * image.width(); config.output.u.RGBA.size = image.width() * image.height() * 4; config.output.is_external_memory = 1; if (WebPDecode(buffer_->data(), buffer_->size(), &config) != VP8_STATUS_OK) { throw image_reader_exception("WEBP reader: WebPDecode failed"); } }
int main(int argc, char *argv[]) { int c; WebPDecoderConfig* const config = &kParams.config; WebPIterator* const curr = &kParams.curr_frame; WebPIterator* const prev = &kParams.prev_frame; if (!WebPInitDecoderConfig(config)) { fprintf(stderr, "Library version mismatch!\n"); return -1; } config->options.dithering_strength = 50; config->options.alpha_dithering_strength = 100; kParams.use_color_profile = 1; for (c = 1; c < argc; ++c) { int parse_error = 0; if (!strcmp(argv[c], "-h") || !strcmp(argv[c], "-help")) { Help(); return 0; } else if (!strcmp(argv[c], "-noicc")) { kParams.use_color_profile = 0; } else if (!strcmp(argv[c], "-nofancy")) { config->options.no_fancy_upsampling = 1; } else if (!strcmp(argv[c], "-nofilter")) { config->options.bypass_filtering = 1; } else if (!strcmp(argv[c], "-noalphadither")) { config->options.alpha_dithering_strength = 0; } else if (!strcmp(argv[c], "-dither") && c + 1 < argc) { config->options.dithering_strength = ExUtilGetInt(argv[++c], 0, &parse_error); } else if (!strcmp(argv[c], "-info")) { kParams.print_info = 1; } else if (!strcmp(argv[c], "-version")) { const int dec_version = WebPGetDecoderVersion(); const int dmux_version = WebPGetDemuxVersion(); printf("WebP Decoder version: %d.%d.%d\nWebP Demux version: %d.%d.%d\n", (dec_version >> 16) & 0xff, (dec_version >> 8) & 0xff, dec_version & 0xff, (dmux_version >> 16) & 0xff, (dmux_version >> 8) & 0xff, dmux_version & 0xff); return 0; } else if (!strcmp(argv[c], "-mt")) { config->options.use_threads = 1; } else if (!strcmp(argv[c], "--")) { if (c < argc - 1) kParams.file_name = argv[++c]; break; } else if (argv[c][0] == '-') { printf("Unknown option '%s'\n", argv[c]); Help(); return -1; } else { kParams.file_name = argv[c]; } if (parse_error) { Help(); return -1; } }
PyObject* WebPDecode_wrapper(PyObject* self, PyObject* args) { PyBytesObject *webp_string; uint8_t *webp; Py_ssize_t size; PyObject *ret, *bytes, *pymode; WebPDecoderConfig config; VP8StatusCode vp8_status_code = VP8_STATUS_OK; char* mode = "RGB"; if (!PyArg_ParseTuple(args, "S", &webp_string)) { Py_RETURN_NONE; } if (!WebPInitDecoderConfig(&config)) { Py_RETURN_NONE; } PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size); vp8_status_code = WebPGetFeatures(webp, size, &config.input); if (vp8_status_code == VP8_STATUS_OK) { // If we don't set it, we don't get alpha. // Initialized to MODE_RGB if (config.input.has_alpha) { config.output.colorspace = MODE_RGBA; mode = "RGBA"; } vp8_status_code = WebPDecode(webp, size, &config); } if (vp8_status_code != VP8_STATUS_OK) { WebPFreeDecBuffer(&config.output); Py_RETURN_NONE; } if (config.output.colorspace < MODE_YUV) { bytes = PyBytes_FromStringAndSize((char *)config.output.u.RGBA.rgba, config.output.u.RGBA.size); } else { // Skipping YUV for now. Need Test Images. // UNDONE -- unclear if we'll ever get here if we set mode_rgb* bytes = PyBytes_FromStringAndSize((char *)config.output.u.YUVA.y, config.output.u.YUVA.y_size); } #if PY_VERSION_HEX >= 0x03000000 pymode = PyUnicode_FromString(mode); #else pymode = PyString_FromString(mode); #endif ret = Py_BuildValue("SiiS", bytes, config.output.width, config.output.height, pymode); WebPFreeDecBuffer(&config.output); return ret; }
static Read * read_new( const char *filename, void *data, size_t length ) { Read *read; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->data = data; read->length = length; read->fd = 0; read->idec = NULL; if( read->filename ) { /* libwebp makes streaming from a file source very hard. We * have to read to a full memory buffer, then copy to out. * * mmap the input file, it's slightly quicker. */ if( (read->fd = vips__open_image_read( read->filename )) < 0 || (read->length = vips_file_length( read->fd )) < 0 || !(read->data = vips__mmap( read->fd, FALSE, read->length, 0 )) ) { read_free( read ); return( NULL ); } } WebPInitDecoderConfig( &read->config ); if( WebPGetFeatures( read->data, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = TRUE; return( read ); }
NS_CC_BEGIN bool CCImage::_initWithWebpData(void *pData, int nDataLen) { #if ENABLE_WEBP bool bRet = false; do { WebPDecoderConfig config; if (WebPInitDecoderConfig(&config) == 0) break; if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break; if (config.input.width == 0 || config.input.height == 0) break; config.output.colorspace = MODE_RGBA; m_nBitsPerComponent = 8; m_nWidth = config.input.width; m_nHeight = config.input.height; m_bHasAlpha = true; int bufferSize = m_nWidth * m_nHeight * 4; m_pData = new unsigned char[bufferSize]; config.output.u.RGBA.rgba = (uint8_t*)m_pData; config.output.u.RGBA.stride = m_nWidth * 4; config.output.u.RGBA.size = bufferSize; config.output.is_external_memory = 1; if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK) { delete []m_pData; m_pData = NULL; break; } bRet = true; } while (0); return bRet; #else return false; #endif }
int main(int argc, char *argv[]) { WebPDecoderConfig config; int c; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return -1; } kParams.config = &config; kParams.use_color_profile = 1; for (c = 1; c < argc; ++c) { if (!strcmp(argv[c], "-h") || !strcmp(argv[c], "-help")) { Help(); return 0; } else if (!strcmp(argv[c], "-noicc")) { kParams.use_color_profile = 0; } else if (!strcmp(argv[c], "-nofancy")) { config.options.no_fancy_upsampling = 1; } else if (!strcmp(argv[c], "-nofilter")) { config.options.bypass_filtering = 1; } else if (!strcmp(argv[c], "-info")) { kParams.print_info = 1; } else if (!strcmp(argv[c], "-version")) { const int dec_version = WebPGetDecoderVersion(); const int dmux_version = WebPGetDemuxVersion(); printf("WebP Decoder version: %d.%d.%d\nWebP Demux version: %d.%d.%d\n", (dec_version >> 16) & 0xff, (dec_version >> 8) & 0xff, dec_version & 0xff, (dmux_version >> 16) & 0xff, (dmux_version >> 8) & 0xff, dmux_version & 0xff); return 0; } else if (!strcmp(argv[c], "-mt")) { config.options.use_threads = 1; } else if (argv[c][0] == '-') { printf("Unknown option '%s'\n", argv[c]); Help(); return -1; } else { kParams.file_name = argv[c]; } }
static Read * read_new( const char *filename, void *buf, size_t len ) { Read *read; unsigned char header[MINIMAL_HEADER]; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->buf = buf; read->len = len; read->idec = NULL; WebPInitDecoderConfig( &read->config ); if( filename ) { if( vips__get_bytes( filename, header, MINIMAL_HEADER ) && WebPGetFeatures( header, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } } else { if( WebPGetFeatures( read->buf, read->len, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = TRUE; return( read ); }
NS_CC_BEGIN bool Image::_initWithWebpData(void *pData, int nDataLen) { bool bRet = false; do { WebPDecoderConfig config; if (WebPInitDecoderConfig(&config) == 0) break; if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break; if (config.input.width == 0 || config.input.height == 0) break; config.output.colorspace = MODE_RGBA; _bitsPerComponent = 8; _width = config.input.width; _height = config.input.height; _hasAlpha = true; int bufferSize = _width * _height * 4; _data = new unsigned char[bufferSize]; config.output.u.RGBA.rgba = (uint8_t*)_data; config.output.u.RGBA.stride = _width * 4; config.output.u.RGBA.size = bufferSize; config.output.is_external_memory = 1; if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK) { delete []_data; _data = NULL; break; } bRet = true; } while (0); return bRet; }
int webp_decode(const char *in_file, const char *out_file, const FfiWebpDecodeConfig *decode_config) { int return_value = -1; WebPDecoderConfig config; WebPDecBuffer* const output_buffer = &config.output; WebPBitstreamFeatures* const bitstream = &config.input; OutputFileFormat format = PNG; if (!WebPInitDecoderConfig(&config)) { //fprintf(stderr, "Library version mismatch!\n"); return 1; } if (decode_config->output_format != format){ format = decode_config->output_format; } if (decode_config->no_fancy_upsampling > 0){ config.options.no_fancy_upsampling = 1; } if (decode_config->bypass_filtering > 0){ config.options.bypass_filtering = 1; } if (decode_config->use_threads > 0){ config.options.use_threads = 1; } if ((decode_config->crop_w | decode_config->crop_h) > 0){ config.options.use_cropping = 1; config.options.crop_left = decode_config->crop_x; config.options.crop_top = decode_config->crop_y; config.options.crop_width = decode_config->crop_w; config.options.crop_height = decode_config->crop_h; } if ((decode_config->resize_w | decode_config->resize_h) > 0){ config.options.use_scaling = 1; config.options.scaled_width = decode_config->resize_w; config.options.scaled_height = decode_config->resize_h; } VP8StatusCode status = VP8_STATUS_OK; size_t data_size = 0; const uint8_t* data = NULL; if (!UtilReadFile(in_file, &data, &data_size)) return -1; status = WebPGetFeatures(data, data_size, bitstream); if (status != VP8_STATUS_OK) { //fprintf(stderr, "This is invalid webp image!\n"); return_value = 2; goto Error; } switch (format) { case PNG: output_buffer->colorspace = bitstream->has_alpha ? MODE_RGBA : MODE_RGB; break; case PAM: output_buffer->colorspace = MODE_RGBA; break; case PPM: output_buffer->colorspace = MODE_RGB; // drops alpha for PPM break; case PGM: output_buffer->colorspace = bitstream->has_alpha ? MODE_YUVA : MODE_YUV; break; case ALPHA_PLANE_ONLY: output_buffer->colorspace = MODE_YUVA; break; default: free((void*)data); return 3; } status = WebPDecode(data, data_size, &config); if (status != VP8_STATUS_OK) { //fprintf(stderr, "Decoding of %s failed.\n", in_file); return_value = 4; goto Error; } UtilSaveOutput(output_buffer, format, out_file); return_value = 0; Error: free((void*)data); WebPFreeDecBuffer(output_buffer); return return_value; }
int main(int argc, const char *argv[]) { const char *in_file = NULL; const char *out_file = NULL; WebPDecoderConfig config; WebPDecBuffer* const output_buffer = &config.output; WebPBitstreamFeatures* const bitstream = &config.input; OutputFileFormat format = PNG; int c; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return -1; } for (c = 1; c < argc; ++c) { if (!strcmp(argv[c], "-h") || !strcmp(argv[c], "-help")) { Help(); return 0; } else if (!strcmp(argv[c], "-o") && c < argc - 1) { out_file = argv[++c]; } else if (!strcmp(argv[c], "-alpha")) { format = ALPHA_PLANE_ONLY; } else if (!strcmp(argv[c], "-nofancy")) { config.options.no_fancy_upsampling = 1; } else if (!strcmp(argv[c], "-nofilter")) { config.options.bypass_filtering = 1; } else if (!strcmp(argv[c], "-pam")) { format = PAM; } else if (!strcmp(argv[c], "-ppm")) { format = PPM; } else if (!strcmp(argv[c], "-version")) { const int version = WebPGetDecoderVersion(); printf("%d.%d.%d\n", (version >> 16) & 0xff, (version >> 8) & 0xff, version & 0xff); return 0; } else if (!strcmp(argv[c], "-pgm")) { format = PGM; } else if (!strcmp(argv[c], "-yuv")) { format = YUV; } else if (!strcmp(argv[c], "-mt")) { config.options.use_threads = 1; } else if (!strcmp(argv[c], "-crop") && c < argc - 4) { config.options.use_cropping = 1; config.options.crop_left = strtol(argv[++c], NULL, 0); config.options.crop_top = strtol(argv[++c], NULL, 0); config.options.crop_width = strtol(argv[++c], NULL, 0); config.options.crop_height = strtol(argv[++c], NULL, 0); } else if (!strcmp(argv[c], "-scale") && c < argc - 2) { config.options.use_scaling = 1; config.options.scaled_width = strtol(argv[++c], NULL, 0); config.options.scaled_height = strtol(argv[++c], NULL, 0); } else if (!strcmp(argv[c], "-v")) { verbose = 1; #ifndef WEBP_DLL } else if (!strcmp(argv[c], "-noasm")) { VP8GetCPUInfo = NULL; #endif } else if (argv[c][0] == '-') { fprintf(stderr, "Unknown option '%s'\n", argv[c]); Help(); return -1; } else { in_file = argv[c]; } }
int ReadWebP(const uint8_t* const data, size_t data_size, WebPPicture* const pic, int keep_alpha, Metadata* const metadata) { int ok = 0; VP8StatusCode status = VP8_STATUS_OK; WebPDecoderConfig config; WebPDecBuffer* const output_buffer = &config.output; WebPBitstreamFeatures* const bitstream = &config.input; if (data == NULL || data_size == 0 || pic == NULL) return 0; // TODO(jzern): add Exif/XMP/ICC extraction. if (metadata != NULL) { fprintf(stderr, "Warning: metadata extraction from WebP is unsupported.\n"); } if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return 0; } status = WebPGetFeatures(data, data_size, bitstream); if (status != VP8_STATUS_OK) { PrintWebPError("input data", status); return 0; } { const int has_alpha = keep_alpha && bitstream->has_alpha; if (pic->use_argb) { output_buffer->colorspace = has_alpha ? MODE_RGBA : MODE_RGB; } else { output_buffer->colorspace = has_alpha ? MODE_YUVA : MODE_YUV; } status = DecodeWebP(data, data_size, 0, &config); if (status == VP8_STATUS_OK) { pic->width = output_buffer->width; pic->height = output_buffer->height; if (pic->use_argb) { const uint8_t* const rgba = output_buffer->u.RGBA.rgba; const int stride = output_buffer->u.RGBA.stride; ok = has_alpha ? WebPPictureImportRGBA(pic, rgba, stride) : WebPPictureImportRGB(pic, rgba, stride); } else { pic->colorspace = has_alpha ? WEBP_YUV420A : WEBP_YUV420; ok = WebPPictureAlloc(pic); if (!ok) { status = VP8_STATUS_OUT_OF_MEMORY; } else { const WebPYUVABuffer* const yuva = &output_buffer->u.YUVA; const int uv_width = (pic->width + 1) >> 1; const int uv_height = (pic->height + 1) >> 1; ImgIoUtilCopyPlane(yuva->y, yuva->y_stride, pic->y, pic->y_stride, pic->width, pic->height); ImgIoUtilCopyPlane(yuva->u, yuva->u_stride, pic->u, pic->uv_stride, uv_width, uv_height); ImgIoUtilCopyPlane(yuva->v, yuva->v_stride, pic->v, pic->uv_stride, uv_width, uv_height); if (has_alpha) { ImgIoUtilCopyPlane(yuva->a, yuva->a_stride, pic->a, pic->a_stride, pic->width, pic->height); } } } } }
static void gth_metadata_provider_image_read (GthMetadataProvider *self, GthFileData *file_data, const char *attributes, GCancellable *cancellable) { gboolean format_recognized; GFileInputStream *stream; char *description = NULL; int width; int height; const char *mime_type = NULL; format_recognized = FALSE; stream = g_file_read (file_data->file, cancellable, NULL); if (stream != NULL) { int buffer_size; guchar *buffer; gssize size; buffer_size = BUFFER_SIZE; buffer = g_new (guchar, buffer_size); size = g_input_stream_read (G_INPUT_STREAM (stream), buffer, buffer_size, cancellable, NULL); if (size >= 0) { if ((size >= 24) /* PNG signature */ && (buffer[0] == 0x89) && (buffer[1] == 0x50) && (buffer[2] == 0x4E) && (buffer[3] == 0x47) && (buffer[4] == 0x0D) && (buffer[5] == 0x0A) && (buffer[6] == 0x1A) && (buffer[7] == 0x0A) /* IHDR Image header */ && (buffer[12] == 0x49) && (buffer[13] == 0x48) && (buffer[14] == 0x44) && (buffer[15] == 0x52)) { /* PNG */ width = (buffer[16] << 24) + (buffer[17] << 16) + (buffer[18] << 8) + buffer[19]; height = (buffer[20] << 24) + (buffer[21] << 16) + (buffer[22] << 8) + buffer[23]; description = _("PNG"); mime_type = "image/png"; format_recognized = TRUE; } #if HAVE_LIBJPEG else if ((size >= 4) && (buffer[0] == 0xff) && (buffer[1] == 0xd8) && (buffer[2] == 0xff)) { /* JPEG */ GthTransform orientation; if (g_seekable_can_seek (G_SEEKABLE (stream))) { g_seekable_seek (G_SEEKABLE (stream), 0, G_SEEK_SET, cancellable, NULL); } else { g_object_unref (stream); stream = g_file_read (file_data->file, cancellable, NULL); } if (_jpeg_get_image_info (G_INPUT_STREAM (stream), &width, &height, &orientation, cancellable, NULL)) { description = _("JPEG"); mime_type = "image/jpeg"; format_recognized = TRUE; if ((orientation == GTH_TRANSFORM_ROTATE_90) || (orientation == GTH_TRANSFORM_ROTATE_270) || (orientation == GTH_TRANSFORM_TRANSPOSE) || (orientation == GTH_TRANSFORM_TRANSVERSE)) { int tmp = width; width = height; height = tmp; } } } #endif /* HAVE_LIBJPEG */ #if HAVE_LIBWEBP else if ((size > 15) && (memcmp (buffer + 8, "WEBPVP8", 7) == 0)) { WebPDecoderConfig config; if (WebPInitDecoderConfig (&config)) { if (WebPGetFeatures (buffer, buffer_size, &config.input) == VP8_STATUS_OK) { width = config.input.width; height = config.input.height; description = _("WebP"); mime_type = "image/webp"; format_recognized = TRUE; } WebPFreeDecBuffer (&config.output); } } #endif /* HAVE_LIBWEBP */ else if ((size >= 26) && (strncmp ((char *) buffer, "gimp xcf ", 9) == 0)) { /* XCF */ GInputStream *mem_stream; GDataInputStream *data_stream; mem_stream = g_memory_input_stream_new_from_data (buffer, BUFFER_SIZE, NULL); data_stream = g_data_input_stream_new (mem_stream); g_data_input_stream_set_byte_order (data_stream, G_DATA_STREAM_BYTE_ORDER_BIG_ENDIAN); if (g_seekable_seek (G_SEEKABLE (data_stream), 14, G_SEEK_SET, NULL, NULL)) { int base_type; width = g_data_input_stream_read_uint32 (data_stream, NULL, NULL); height = g_data_input_stream_read_uint32 (data_stream, NULL, NULL); base_type = g_data_input_stream_read_uint32 (data_stream, NULL, NULL); if (base_type == 0) description = "XCF RGB"; else if (base_type == 1) description = "XCF grayscale"; else if (base_type == 2) description = "XCF indexed"; else description = "XCF"; mime_type = "image/x-xcf"; format_recognized = TRUE; } g_object_unref (data_stream); g_object_unref (mem_stream); } } g_free (buffer); g_object_unref (stream); } if (! format_recognized) { /* use gdk_pixbuf_get_file_info */ char *filename; filename = g_file_get_path (file_data->file); if (filename != NULL) { GdkPixbufFormat *format; format = gdk_pixbuf_get_file_info (filename, &width, &height); if (format != NULL) { format_recognized = TRUE; description = gdk_pixbuf_format_get_description (format); } g_free (filename); } } if (format_recognized) { char *size; g_file_info_set_attribute_string (file_data->info, "general::format", description); g_file_info_set_attribute_int32 (file_data->info, "image::width", width); g_file_info_set_attribute_int32 (file_data->info, "image::height", height); g_file_info_set_attribute_int32 (file_data->info, "frame::width", width); g_file_info_set_attribute_int32 (file_data->info, "frame::height", height); if (mime_type != NULL) gth_file_data_set_mime_type (file_data, mime_type); size = g_strdup_printf (_("%d × %d"), width, height); g_file_info_set_attribute_string (file_data->info, "general::dimensions", size); g_free (size); } }
static jobject WebPFactory_decodeBuffer (JNIEnv *jniEnv, const uint8_t *buffer, size_t length, jobject options) { // Validate image int bitmapWidth = 0; int bitmapHeight = 0; if (!WebPGetInfo(buffer, length, &bitmapWidth, &bitmapHeight)) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Invalid WebP format"); return NULL; } // Check if size is all what we were requested to do if(options && jniEnv->GetBooleanField(options, jrefs::android::graphics::BitmapFactory->Options.inJustDecodeBounds) == JNI_TRUE) { jniEnv->SetIntField(options, jrefs::android::graphics::BitmapFactory->Options.outWidth, bitmapWidth); jniEnv->SetIntField(options, jrefs::android::graphics::BitmapFactory->Options.outHeight, bitmapHeight); return NULL; } // Initialize decoder config and configure scaling if requested WebPDecoderConfig config; if (!WebPInitDecoderConfig(&config)) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Unable to init WebP decoder config"); return NULL; } if (options) { jint inSampleSize = jniEnv->GetIntField(options, jrefs::android::graphics::BitmapFactory->Options.inSampleSize); if (inSampleSize > 1) { config.options.use_scaling = 1; config.options.scaled_width = bitmapWidth /= inSampleSize; config.options.scaled_height = bitmapHeight /= inSampleSize; } } __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "Decoding into %dx%d bitmap", bitmapWidth, bitmapHeight); // Create bitmap jobject value__ARGB_8888 = jniEnv->GetStaticObjectField(jrefs::android::graphics::Bitmap->Config.jclassRef, jrefs::android::graphics::Bitmap->Config.ARGB_8888); jobject outputBitmap = jniEnv->CallStaticObjectMethod(jrefs::android::graphics::Bitmap->jclassRef, jrefs::android::graphics::Bitmap->createBitmap, (jint)bitmapWidth, (jint)bitmapHeight, value__ARGB_8888); if (!outputBitmap) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to allocate Bitmap"); return NULL; } // Get information about bitmap passed AndroidBitmapInfo bitmapInfo; if (AndroidBitmap_getInfo(jniEnv, outputBitmap, &bitmapInfo) != ANDROID_BITMAP_RESULT_SUCCESS) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to get Bitmap information"); return NULL; } // Lock pixels void *bitmapPixels = 0; if (AndroidBitmap_lockPixels(jniEnv, outputBitmap, &bitmapPixels) != ANDROID_BITMAP_RESULT_SUCCESS) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to lock Bitmap pixels"); return NULL; } // Decode to ARGB config.output.colorspace = MODE_RGBA; config.output.u.RGBA.rgba = (uint8_t*)bitmapPixels; config.output.u.RGBA.stride = bitmapInfo.stride; config.output.u.RGBA.size = bitmapInfo.height * bitmapInfo.stride; config.output.is_external_memory = 1; if (WebPDecode(buffer, length, &config) != VP8_STATUS_OK) { AndroidBitmap_unlockPixels(jniEnv, outputBitmap); jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to decode WebP pixel data"); return NULL; } // Unlock pixels if (AndroidBitmap_unlockPixels(jniEnv, outputBitmap) != ANDROID_BITMAP_RESULT_SUCCESS) { jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to unlock Bitmap pixels"); return NULL; } return outputBitmap; }
static int WEBPDecode(WEBPDec* pSrc, Vbitmap *vbitmap, YmagineFormatOptions *options) { int contentSize; int origWidth = 0; int origHeight = 0; int quality; unsigned char header[WEBP_HEADER_SIZE + 32]; int headerlen; int toRead; unsigned char *input = NULL; int inputlen; int oformat; int opitch; unsigned char *odata; int rc; Vrect srcrect; Vrect destrect; WebPIDecoder* idec; if (options == NULL) { /* Options argument is mandatory */ return 0; } headerlen = YchannelRead(pSrc->channel, (char *) header, sizeof(header)); if (headerlen < WEBP_HEADER_SIZE) { return 0; } /* Check WEBP header */ contentSize = WebpCheckHeader((const char*) header, headerlen); if (contentSize <= 0) { return 0; } if (WebPGetInfo(header, headerlen, &origWidth, &origHeight) == 0) { ALOGD("invalid VP8 header"); return 0; } if (origWidth <= 0 || origHeight <= 0) { return 0; } if (YmagineFormatOptions_invokeCallback(options, YMAGINE_IMAGEFORMAT_WEBP, origWidth, origHeight) != YMAGINE_OK) { return 0; } if (YmaginePrepareTransform(vbitmap, options, origWidth, origHeight, &srcrect, &destrect) != YMAGINE_OK) { return 0; } #if YMAGINE_DEBUG_WEBP ALOGD("size: %dx%d req: %dx%d %s -> output: %dx%d", origWidth, origHeight, destrect.width, destrect.height, (options->scalemode == YMAGINE_SCALE_CROP) ? "crop" : (options->scalemode == YMAGINE_SCALE_FIT ? "fit" : "letterbox"), destrect.width, destrect.height); #endif if (vbitmap != NULL) { if (options->resizable) { destrect.x = 0; destrect.y = 0; if (VbitmapResize(vbitmap, destrect.width, destrect.height) != YMAGINE_OK) { return 0; } } if (VbitmapType(vbitmap) == VBITMAP_NONE) { /* Decode bounds only, return positive number (number of lines) on success */ return VbitmapHeight(vbitmap); } } pSrc->bitmap = vbitmap; inputlen = contentSize; toRead = inputlen - headerlen; rc = VbitmapLock(vbitmap); if (rc != YMAGINE_OK) { ALOGE("VbitmapLock() failed (code %d)", rc); rc = YMAGINE_ERROR; } else { odata = VbitmapBuffer(vbitmap); opitch = VbitmapPitch(vbitmap); oformat = VbitmapColormode(vbitmap); pSrc->inwidth = origWidth; pSrc->inheight = origHeight; pSrc->outwidth = destrect.width; pSrc->outheight = destrect.height; if (odata == NULL) { ALOGD("failed to get reference to pixel buffer"); rc = YMAGINE_ERROR; } else { WebPDecoderConfig config; int supported = 1; int webpcolorspace; switch(oformat) { case VBITMAP_COLOR_RGBA: webpcolorspace = MODE_RGBA; break; case VBITMAP_COLOR_RGB: webpcolorspace = MODE_RGB; break; case VBITMAP_COLOR_rgbA: webpcolorspace = MODE_rgbA; break; case VBITMAP_COLOR_ARGB: webpcolorspace = MODE_ARGB; break; case VBITMAP_COLOR_Argb: webpcolorspace = MODE_Argb; break; case VBITMAP_COLOR_GRAYSCALE: case VBITMAP_COLOR_YUV: case VBITMAP_COLOR_CMYK: case VBITMAP_COLOR_YCbCr: default: supported = 0; break; } if (!supported) { ALOGD("currently only support RGB, RGBA webp decoding"); rc = YMAGINE_ERROR; } else { pSrc->isdirect = 1; pSrc->outformat = oformat; pSrc->outbpp = VbitmapBpp(vbitmap); pSrc->outstride = opitch; pSrc->outbuffer = odata + destrect.x * pSrc->outbpp + destrect.y * pSrc->outstride; WebPInitDecoderConfig(&config); quality = YmagineFormatOptions_normalizeQuality(options); if (quality < 90) { config.options.no_fancy_upsampling = 1; } if (quality < 60) { config.options.bypass_filtering = 1; } config.options.use_threads = 1; if (srcrect.x != 0 || srcrect.y != 0 || srcrect.width != origWidth || srcrect.height != origHeight) { /* Crop on source */ config.options.use_cropping = 1; config.options.crop_left = srcrect.x; config.options.crop_top = srcrect.y; config.options.crop_width = srcrect.width; config.options.crop_height = srcrect.height; } if (pSrc->outwidth != pSrc->inwidth || pSrc->outheight != pSrc->inheight) { config.options.use_scaling = 1; config.options.scaled_width = pSrc->outwidth; config.options.scaled_height = pSrc->outheight; } rc = YMAGINE_ERROR; // Specify the desired output colorspace: config.output.colorspace = webpcolorspace; // Have config.output point to an external buffer: config.output.u.RGBA.rgba = (uint8_t*) pSrc->outbuffer; config.output.u.RGBA.stride = pSrc->outstride; config.output.u.RGBA.size = pSrc->outstride * pSrc->outheight; config.output.is_external_memory = 1; idec = WebPIDecode(NULL, 0, &config); if (idec != NULL) { VP8StatusCode status; status = WebPIAppend(idec, header, headerlen); if (status == VP8_STATUS_OK || status == VP8_STATUS_SUSPENDED) { int bytes_remaining = toRead; int bytes_read; int bytes_req; unsigned char rbuf[8192]; // See WebPIUpdate(idec, buffer, size_of_transmitted_buffer); bytes_req = sizeof(rbuf); while (bytes_remaining > 0) { if (bytes_req > bytes_remaining) { bytes_req = bytes_remaining; } bytes_read = YchannelRead(pSrc->channel, rbuf, bytes_req); if (bytes_read <= 0) { break; } status = WebPIAppend(idec, (uint8_t*) rbuf, bytes_read); if (status == VP8_STATUS_OK) { rc = YMAGINE_OK; break; } else if (status == VP8_STATUS_SUSPENDED) { if (bytes_remaining > 0) { bytes_remaining -= bytes_read; } } else { /* error */ break; } // The above call decodes the current available buffer. // Part of the image can now be refreshed by calling // WebPIDecGetRGB()/WebPIDecGetYUVA() etc. } } } // the object doesn't own the image memory, so it can now be deleted. WebPIDelete(idec); WebPFreeDecBuffer(&config.output); } } VbitmapUnlock(vbitmap); } if (input) { Ymem_free((char*) input); } if (!pSrc->isdirect) { Ymem_free(pSrc->outbuffer); } if (rc == YMAGINE_OK) { return origHeight; } return 0; }
/** Decode a WebP image and returns a FIBITMAP image @param webp_image Raw WebP image @param flags FreeImage load flags @return Returns a dib if successfull, returns NULL otherwise */ static FIBITMAP * DecodeImage(WebPData *webp_image, int flags) { FIBITMAP *dib = NULL; const uint8_t* data = webp_image->bytes; // raw image data const size_t data_size = webp_image->size; // raw image size VP8StatusCode webp_status = VP8_STATUS_OK; BOOL header_only = (flags & FIF_LOAD_NOPIXELS) == FIF_LOAD_NOPIXELS; // Main object storing the configuration for advanced decoding WebPDecoderConfig decoder_config; // Output buffer WebPDecBuffer* const output_buffer = &decoder_config.output; // Features gathered from the bitstream WebPBitstreamFeatures* const bitstream = &decoder_config.input; try { // Initialize the configuration as empty // This function must always be called first, unless WebPGetFeatures() is to be called if(!WebPInitDecoderConfig(&decoder_config)) { throw "Library version mismatch"; } // Retrieve features from the bitstream // The bitstream structure is filled with information gathered from the bitstream webp_status = WebPGetFeatures(data, data_size, bitstream); if(webp_status != VP8_STATUS_OK) { throw FI_MSG_ERROR_PARSING; } // Allocate output dib unsigned bpp = bitstream->has_alpha ? 32 : 24; unsigned width = (unsigned)bitstream->width; unsigned height = (unsigned)bitstream->height; dib = FreeImage_AllocateHeader(header_only, width, height, bpp, FI_RGBA_RED_MASK, FI_RGBA_GREEN_MASK, FI_RGBA_BLUE_MASK); if(!dib) { throw FI_MSG_ERROR_DIB_MEMORY; } if(header_only) { WebPFreeDecBuffer(output_buffer); return dib; } // --- Set decoding options --- // use multi-threaded decoding decoder_config.options.use_threads = 1; // set output color space output_buffer->colorspace = bitstream->has_alpha ? MODE_BGRA : MODE_BGR; // --- // decode the input stream, taking 'config' into account. webp_status = WebPDecode(data, data_size, &decoder_config); if(webp_status != VP8_STATUS_OK) { throw FI_MSG_ERROR_PARSING; } // fill the dib with the decoded data const BYTE *src_bitmap = output_buffer->u.RGBA.rgba; const unsigned src_pitch = (unsigned)output_buffer->u.RGBA.stride; switch(bpp) { case 24: for(unsigned y = 0; y < height; y++) { const BYTE *src_bits = src_bitmap + y * src_pitch; BYTE *dst_bits = (BYTE*)FreeImage_GetScanLine(dib, height-1-y); for(unsigned x = 0; x < width; x++) { dst_bits[FI_RGBA_BLUE] = src_bits[0]; // B dst_bits[FI_RGBA_GREEN] = src_bits[1]; // G dst_bits[FI_RGBA_RED] = src_bits[2]; // R src_bits += 3; dst_bits += 3; } } break; case 32: for(unsigned y = 0; y < height; y++) { const BYTE *src_bits = src_bitmap + y * src_pitch; BYTE *dst_bits = (BYTE*)FreeImage_GetScanLine(dib, height-1-y); for(unsigned x = 0; x < width; x++) { dst_bits[FI_RGBA_BLUE] = src_bits[0]; // B dst_bits[FI_RGBA_GREEN] = src_bits[1]; // G dst_bits[FI_RGBA_RED] = src_bits[2]; // R dst_bits[FI_RGBA_ALPHA] = src_bits[3]; // A src_bits += 4; dst_bits += 4; } } break; } // Free the decoder WebPFreeDecBuffer(output_buffer); return dib; } catch (const char *text) { if(dib) { FreeImage_Unload(dib); } WebPFreeDecBuffer(output_buffer); if(NULL != text) { FreeImage_OutputMessageProc(s_format_id, text); } return NULL; } }
GDALDataset *WEBPDataset::Open( GDALOpenInfo * poOpenInfo ) { if( !Identify( poOpenInfo ) || poOpenInfo->fpL == NULL ) return NULL; int nWidth, nHeight; if (!WebPGetInfo((const uint8_t*)poOpenInfo->pabyHeader, (uint32_t)poOpenInfo->nHeaderBytes, &nWidth, &nHeight)) return NULL; int nBands = 3; #if WEBP_DECODER_ABI_VERSION >= 0x0002 WebPDecoderConfig config; if (!WebPInitDecoderConfig(&config)) return NULL; int bOK = WebPGetFeatures(poOpenInfo->pabyHeader, poOpenInfo->nHeaderBytes, &config.input) == VP8_STATUS_OK; if (config.input.has_alpha) nBands = 4; WebPFreeDecBuffer(&config.output); if (!bOK) return NULL; #endif if( poOpenInfo->eAccess == GA_Update ) { CPLError( CE_Failure, CPLE_NotSupported, "The WEBP driver does not support update access to existing" " datasets.\n" ); return NULL; } /* -------------------------------------------------------------------- */ /* Create a corresponding GDALDataset. */ /* -------------------------------------------------------------------- */ WEBPDataset *poDS; poDS = new WEBPDataset(); poDS->nRasterXSize = nWidth; poDS->nRasterYSize = nHeight; poDS->fpImage = poOpenInfo->fpL; poOpenInfo->fpL = NULL; /* -------------------------------------------------------------------- */ /* Create band information objects. */ /* -------------------------------------------------------------------- */ for( int iBand = 0; iBand < nBands; iBand++ ) poDS->SetBand( iBand+1, new WEBPRasterBand( poDS, iBand+1 ) ); /* -------------------------------------------------------------------- */ /* Initialize any PAM information. */ /* -------------------------------------------------------------------- */ poDS->SetDescription( poOpenInfo->pszFilename ); poDS->TryLoadXML( poOpenInfo->GetSiblingFiles() ); /* -------------------------------------------------------------------- */ /* Open overviews. */ /* -------------------------------------------------------------------- */ poDS->oOvManager.Initialize( poDS, poOpenInfo->pszFilename, poOpenInfo->GetSiblingFiles() ); return poDS; }
PyObject* WebPDecode_wrapper(PyObject* self, PyObject* args) { PyBytesObject *webp_string; uint8_t *webp; Py_ssize_t size; PyObject *ret, *bytes, *pymode, *icc_profile = Py_None, *exif = Py_None; WebPDecoderConfig config; VP8StatusCode vp8_status_code = VP8_STATUS_OK; char* mode = "RGB"; if (!PyArg_ParseTuple(args, "S", &webp_string)) { Py_RETURN_NONE; } if (!WebPInitDecoderConfig(&config)) { Py_RETURN_NONE; } PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size); vp8_status_code = WebPGetFeatures(webp, size, &config.input); if (vp8_status_code == VP8_STATUS_OK) { // If we don't set it, we don't get alpha. // Initialized to MODE_RGB if (config.input.has_alpha) { config.output.colorspace = MODE_RGBA; mode = "RGBA"; } #ifndef HAVE_WEBPMUX vp8_status_code = WebPDecode(webp, size, &config); #else { int copy_data = 0; WebPData data = { webp, size }; WebPMuxFrameInfo image; WebPData icc_profile_data = {0}; WebPData exif_data = {0}; WebPMux* mux = WebPMuxCreate(&data, copy_data); WebPMuxGetFrame(mux, 1, &image); webp = (uint8_t*)image.bitstream.bytes; size = image.bitstream.size; vp8_status_code = WebPDecode(webp, size, &config); WebPMuxGetChunk(mux, "ICCP", &icc_profile_data); if (icc_profile_data.size > 0) { icc_profile = PyBytes_FromStringAndSize((const char*)icc_profile_data.bytes, icc_profile_data.size); } WebPMuxGetChunk(mux, "EXIF", &exif_data); if (exif_data.size > 0) { exif = PyBytes_FromStringAndSize((const char*)exif_data.bytes, exif_data.size); } WebPMuxDelete(mux); } #endif } if (vp8_status_code != VP8_STATUS_OK) { WebPFreeDecBuffer(&config.output); Py_RETURN_NONE; } if (config.output.colorspace < MODE_YUV) { bytes = PyBytes_FromStringAndSize((char *)config.output.u.RGBA.rgba, config.output.u.RGBA.size); } else { // Skipping YUV for now. Need Test Images. // UNDONE -- unclear if we'll ever get here if we set mode_rgb* bytes = PyBytes_FromStringAndSize((char *)config.output.u.YUVA.y, config.output.u.YUVA.y_size); } #if PY_VERSION_HEX >= 0x03000000 pymode = PyUnicode_FromString(mode); #else pymode = PyString_FromString(mode); #endif ret = Py_BuildValue("SiiSSS", bytes, config.output.width, config.output.height, pymode, icc_profile, exif); WebPFreeDecBuffer(&config.output); return ret; }
static void prepare (GeglOperation *operation) { GeglProperties *o = GEGL_PROPERTIES (operation); Priv *p = (o->user_data) ? o->user_data : g_new0 (Priv, 1); GError *error = NULL; GFile *file = NULL; guchar *buffer; gsize read; g_assert (p != NULL); if (p->file != NULL && (o->uri || o->path)) { if (o->uri && strlen (o->uri) > 0) file = g_file_new_for_uri (o->uri); else if (o->path && strlen (o->path) > 0) file = g_file_new_for_path (o->path); if (file != NULL) { if (!g_file_equal (p->file, file)) cleanup (operation); g_object_unref (file); } } o->user_data = (void*) p; if (p->config == NULL) { p->stream = gegl_gio_open_input_stream (o->uri, o->path, &p->file, &error); if (p->stream == NULL) { g_warning (error->message); g_error_free (error); cleanup (operation); return; } p->config = g_try_new (WebPDecoderConfig, 1); p->decoder = WebPINewDecoder (&p->config->output); g_assert (p->config != NULL); if (!WebPInitDecoderConfig (p->config)) { g_warning ("could not initialise WebP decoder configuration"); cleanup (operation); return; } read = read_from_stream (p->stream, &buffer, IO_BUFFER_SIZE); if (WebPGetFeatures (buffer, read, &p->config->input) != VP8_STATUS_OK) { g_warning ("failed reading WebP image file"); cleanup (operation); g_free (buffer); return; } if (!query_webp (operation)) { g_warning ("could not query WebP image file"); cleanup (operation); g_free (buffer); return; } WebPIAppend (p->decoder, buffer, read); g_free (buffer); } gegl_operation_set_format (operation, "output", p->format); }
static pixman_image_t * load_webp(FILE *fp) { WebPDecoderConfig config; uint8_t buffer[16 * 1024]; int len; VP8StatusCode status; WebPIDecoder *idec; if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return NULL; } /* webp decoding api doesn't seem to specify a min size that's usable for GetFeatures, but 256 works... */ len = fread(buffer, 1, 256, fp); status = WebPGetFeatures(buffer, len, &config.input); if (status != VP8_STATUS_OK) { fprintf(stderr, "failed to parse webp header\n"); WebPFreeDecBuffer(&config.output); return NULL; } config.output.colorspace = MODE_BGRA; config.output.u.RGBA.stride = stride_for_width(config.input.width); config.output.u.RGBA.size = config.output.u.RGBA.stride * config.input.height; config.output.u.RGBA.rgba = malloc(config.output.u.RGBA.stride * config.input.height); config.output.is_external_memory = 1; if (!config.output.u.RGBA.rgba) { WebPFreeDecBuffer(&config.output); return NULL; } rewind(fp); idec = WebPINewDecoder(&config.output); if (!idec) { WebPFreeDecBuffer(&config.output); return NULL; } while (!feof(fp)) { len = fread(buffer, 1, sizeof buffer, fp); status = WebPIAppend(idec, buffer, len); if (status != VP8_STATUS_OK) { fprintf(stderr, "webp decode status %d\n", status); WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return NULL; } } WebPIDelete(idec); WebPFreeDecBuffer(&config.output); return pixman_image_create_bits(PIXMAN_a8r8g8b8, config.input.width, config.input.height, (uint32_t *) config.output.u.RGBA.rgba, config.output.u.RGBA.stride); }
GthImage * _cairo_image_surface_create_from_webp (GInputStream *istream, GthFileData *file_data, int requested_size, int *original_width, int *original_height, gboolean *loaded_original, gpointer user_data, GCancellable *cancellable, GError **error) { GthImage *image; WebPDecoderConfig config; guchar *buffer; gssize bytes_read; int width, height; cairo_surface_t *surface; cairo_surface_metadata_t *metadata; WebPIDecoder *idec; image = gth_image_new (); if (! WebPInitDecoderConfig (&config)) return image; buffer = g_new (guchar, BUFFER_SIZE); bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error); if (WebPGetFeatures (buffer, bytes_read, &config.input) != VP8_STATUS_OK) { g_free (buffer); return image; } width = config.input.width; height = config.input.height; if (original_width != NULL) *original_width = width; if (original_height != NULL) *original_height = height; #if SCALING_WORKS if (requested_size > 0) scale_keeping_ratio (&width, &height, requested_size, requested_size, FALSE); #endif surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height); metadata = _cairo_image_surface_get_metadata (surface); _cairo_metadata_set_has_alpha (metadata, config.input.has_alpha); config.options.no_fancy_upsampling = 1; #if SCALING_WORKS if (requested_size > 0) { config.options.use_scaling = 1; config.options.scaled_width = width; config.options.scaled_height = height; } #endif #if G_BYTE_ORDER == G_LITTLE_ENDIAN config.output.colorspace = MODE_BGRA; #elif G_BYTE_ORDER == G_BIG_ENDIAN config.output.colorspace = MODE_ARGB; #endif config.output.u.RGBA.rgba = (uint8_t *) _cairo_image_surface_flush_and_get_data (surface); config.output.u.RGBA.stride = cairo_image_surface_get_stride (surface); config.output.u.RGBA.size = cairo_image_surface_get_stride (surface) * height; config.output.is_external_memory = 1; idec = WebPINewDecoder (&config.output); if (idec == NULL) { g_free (buffer); return image; } do { VP8StatusCode status = WebPIAppend (idec, buffer, bytes_read); if ((status != VP8_STATUS_OK) && (status != VP8_STATUS_SUSPENDED)) break; } while ((bytes_read = g_input_stream_read (istream, buffer, BUFFER_SIZE, cancellable, error)) > 0); cairo_surface_mark_dirty (surface); if (cairo_surface_status (surface) == CAIRO_STATUS_SUCCESS) gth_image_set_cairo_surface (image, surface); WebPIDelete (idec); WebPFreeDecBuffer (&config.output); g_free (buffer); return image; }
static Read * read_new( const char *filename, const void *data, size_t length, int shrink ) { Read *read; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->data = data; read->length = length; read->shrink = shrink; read->fd = 0; read->idec = NULL; if( read->filename ) { /* libwebp makes streaming from a file source very hard. We * have to read to a full memory buffer, then copy to out. * * mmap the input file, it's slightly quicker. */ if( (read->fd = vips__open_image_read( read->filename )) < 0 || (read->length = vips_file_length( read->fd )) < 0 || !(read->data = vips__mmap( read->fd, FALSE, read->length, 0 )) ) { read_free( read ); return( NULL ); } } WebPInitDecoderConfig( &read->config ); if( WebPGetFeatures( read->data, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = 1; read->width = read->config.input.width / read->shrink; read->height = read->config.input.height / read->shrink; if( read->width == 0 || read->height == 0 ) { vips_error( "webp", "%s", _( "bad setting for shrink" ) ); return( NULL ); } if( read->shrink > 1 ) { read->config.options.use_scaling = 1; read->config.options.scaled_width = read->width; read->config.options.scaled_height = read->height; } return( read ); }
jobject doDecode( JNIEnv* env, uint8_t* encoded_image, unsigned encoded_image_length, jobject bitmapOptions, jfloat scale) { // Options manipulation is taken from https://github.com/android/platform_frameworks_base/blob/master/core/jni/android/graphics/BitmapFactory.cpp int image_width = 0; int image_height = 0; jobject bitmap = nullptr; WebPGetInfo( encoded_image, encoded_image_length, &image_width, &image_height); WebPDecoderConfig config; WebPInitDecoderConfig(&config); if ((bitmapOptions != nullptr) && (setOutDimensions(env, bitmapOptions, image_width, image_height))) { return {}; } if (scale != 1.0f) { image_width = int(image_width * scale + 0.5f); image_height = int(image_height * scale + 0.5f); config.options.use_scaling = 1; config.options.scaled_width = image_width; config.options.scaled_height = image_height; } bitmap = createBitmap(env, image_width, image_height, bitmapOptions); RETURN_NULL_IF_EXCEPTION(env); void* raw_pixels = nullptr; int rc = AndroidBitmap_lockPixels(env, bitmap, (void**) &raw_pixels); if (rc != ANDROID_BITMAP_RESULT_SUCCESS) { env->ThrowNew(runtimeExceptionClass, "Decode error locking pixels"); return JNI_FALSE; } config.output.colorspace = MODE_RGBA; config.output.u.RGBA.rgba = (uint8_t*) raw_pixels; config.output.u.RGBA.stride = image_width * 4; config.output.u.RGBA.size = image_width * image_height * 4; config.output.is_external_memory = 1; WebPDecode(encoded_image, encoded_image_length, &config); rc = AndroidBitmap_unlockPixels(env, bitmap); if (rc != ANDROID_BITMAP_RESULT_SUCCESS) { env->ThrowNew(runtimeExceptionClass, "Decode error unlocking pixels"); return {}; } if (bitmapOptions != nullptr) { setBitmapSize(env, bitmapOptions, image_width, image_height); } return bitmap; }