Beispiel #1
0
void webp_reader<T>::read(unsigned x0, unsigned y0,image_data_32& image)
{
    WebPDecoderConfig config;
    config_guard guard(config);
    if (!WebPInitDecoderConfig(&config))
    {
        throw image_reader_exception("WEBP reader: WebPInitDecoderConfig failed");
    }

    config.options.use_cropping = 1;
    config.options.crop_left = x0;
    config.options.crop_top = y0;
    config.options.crop_width = std::min(width_ - x0, image.width());
    config.options.crop_height = std::min(height_ - y0, image.height());

    if (WebPGetFeatures(buffer_->data(), buffer_->size(), &config.input) != VP8_STATUS_OK)
    {
        throw image_reader_exception("WEBP reader: WebPGetFeatures failed");
    }

    config.output.colorspace = MODE_RGBA;
    config.output.u.RGBA.rgba = (uint8_t *)image.getBytes();
    config.output.u.RGBA.stride = 4 * image.width();
    config.output.u.RGBA.size = image.width() * image.height() * 4;
    config.output.is_external_memory = 1;
    if (WebPDecode(buffer_->data(), buffer_->size(), &config) != VP8_STATUS_OK)
    {
        throw image_reader_exception("WEBP reader: WebPDecode failed");
    }
}
Beispiel #2
0
static int
read_image( Read *read, VipsImage *out )
{
	VipsImage **t = (VipsImage **) 
		vips_object_local_array( VIPS_OBJECT( out ), 3 );

	t[0] = vips_image_new_memory();
	if( read_header( read, t[0] ) )
		return( -1 );
	if( vips_image_write_prepare( t[0] ) ) 
		return( -1 );

	read->config.output.u.RGBA.rgba = VIPS_IMAGE_ADDR( t[0], 0, 0 );
	read->config.output.u.RGBA.stride = VIPS_IMAGE_SIZEOF_LINE( t[0] );
	read->config.output.u.RGBA.size = VIPS_IMAGE_SIZEOF_IMAGE( t[0] );
	read->config.output.is_external_memory = 1;

	if( WebPDecode( (uint8_t *) read->data, read->length, 
		&read->config) != VP8_STATUS_OK ) {
		vips_error( "webp2vips", "%s", _( "unable to read pixels" ) ); 
		return( -1 );
	}

	if( vips_image_write( t[0], out ) )
		return( -1 );

	return( 0 );
}
Beispiel #3
0
PyObject* WebPDecode_wrapper(PyObject* self, PyObject* args)
{
    PyBytesObject *webp_string;
    uint8_t *webp;
    Py_ssize_t size;
    PyObject *ret, *bytes, *pymode;
    WebPDecoderConfig config;
    VP8StatusCode vp8_status_code = VP8_STATUS_OK;
    char* mode = "RGB";

    if (!PyArg_ParseTuple(args, "S", &webp_string)) {
        Py_RETURN_NONE;
    }

    if (!WebPInitDecoderConfig(&config)) {
        Py_RETURN_NONE;
    }       

    PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size);

    vp8_status_code = WebPGetFeatures(webp, size, &config.input);
    if (vp8_status_code == VP8_STATUS_OK) {
        // If we don't set it, we don't get alpha. 
        // Initialized to MODE_RGB
        if (config.input.has_alpha) {
            config.output.colorspace = MODE_RGBA;
            mode = "RGBA";
        }
        vp8_status_code = WebPDecode(webp, size, &config);
    }   
    
    if (vp8_status_code != VP8_STATUS_OK) {
        WebPFreeDecBuffer(&config.output);
        Py_RETURN_NONE;
    }   
    
    if (config.output.colorspace < MODE_YUV) {
        bytes = PyBytes_FromStringAndSize((char *)config.output.u.RGBA.rgba, 
                                          config.output.u.RGBA.size);
    } else {
        // Skipping YUV for now. Need Test Images.
        // UNDONE -- unclear if we'll ever get here if we set mode_rgb*
        bytes = PyBytes_FromStringAndSize((char *)config.output.u.YUVA.y, 
                                          config.output.u.YUVA.y_size);
    }

#if PY_VERSION_HEX >= 0x03000000
    pymode = PyUnicode_FromString(mode);
#else
    pymode = PyString_FromString(mode);
#endif
    ret = Py_BuildValue("SiiS", bytes, config.output.width, 
                        config.output.height, pymode);
    WebPFreeDecBuffer(&config.output);
    return ret;
}
Beispiel #4
0
CGImageRef NZCGImageCreateUsingWebPData(CFDataRef webPData)
{
	uint8 *y = NULL, *u = NULL, *v = NULL;
	int32_t width, height;
	
	if (CFDataGetLength(webPData) > INT_MAX)	// highly unlikely to happen; just checking anyway
		return NULL;
	
	// Step 1: Decode the data.
	if (WebPDecode(CFDataGetBytePtr(webPData), (int)CFDataGetLength(webPData), &y, &u, &v, &width, &height) == webp_success)
	{
		const int32_t depth = 32;
		const int wordsPerLine = (width*depth+31)/32;
		size_t pixelBytesLength = 4*height*wordsPerLine;	// Google's documentation is incorrect here; the length has to be quadrupled or we'll have an overrun
		uint32 *pixelBytes = malloc(pixelBytesLength);
		CFDataRef pixelData;
		CGDataProviderRef dataProvider;
		CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
		CGImageRef theImage;
		
		// Step 2: Convert the YUV data into RGB.
		YUV420toRGBA(y, u, v, wordsPerLine, width, height, pixelBytes);
		
		// Step 3: Convert the RGB data into a CGImageRef.
		pixelData = CFDataCreateWithBytesNoCopy(NULL, (const UInt8 *)pixelBytes, pixelBytesLength, NULL);
		dataProvider = CGDataProviderCreateWithCFData(pixelData);
		theImage = CGImageCreate(width,
								 height,
								 8,		// each component is one byte or 8 bits large
								 32,	// our data has four components
								 wordsPerLine*4,	// there are 32 bits or 4 bytes in a word
								 colorSpace,
								 kCGBitmapByteOrder32Host,	// our data is in host-endian format
								 dataProvider,
								 NULL,	// we don't care about decode arrays
								 true,	// sure, why not interpolate?
								 kCGRenderingIntentDefault);
		
		// Finally, clean up memory.
		CGColorSpaceRelease(colorSpace);
		CGDataProviderRelease(dataProvider);
		CFRelease(pixelData);
		free(y);
		return theImage;
	}
	fprintf(stderr, "NZCGWebPFunctions: The data provided is not in WebP format.\n");
	return NULL;
}
Beispiel #5
0
VP8StatusCode DecodeWebP(const uint8_t* const data, size_t data_size,
                         int verbose, WebPDecoderConfig* const config) {
  Stopwatch stop_watch;
  VP8StatusCode status = VP8_STATUS_OK;
  if (config == NULL) return VP8_STATUS_INVALID_PARAM;

  PrintAnimationWarning(config);

  StopwatchReset(&stop_watch);

  // Decoding call.
  status = WebPDecode(data, data_size, config);

  if (verbose) {
    const double decode_time = StopwatchReadAndReset(&stop_watch);
    fprintf(stderr, "Time to decode picture: %.3fs\n", decode_time);
  }
  return status;
}
Beispiel #6
0
NS_CC_BEGIN

bool CCImage::_initWithWebpData(void *pData, int nDataLen)
{
#if ENABLE_WEBP
	bool bRet = false;
	do
	{
        WebPDecoderConfig config;
        if (WebPInitDecoderConfig(&config) == 0) break;
        if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break;
        if (config.input.width == 0 || config.input.height == 0) break;
        
        config.output.colorspace = MODE_RGBA;
        m_nBitsPerComponent = 8;
        m_nWidth    = config.input.width;
        m_nHeight   = config.input.height;
        m_bHasAlpha = true;
        
        int bufferSize = m_nWidth * m_nHeight * 4;
        m_pData = new unsigned char[bufferSize];
        
        config.output.u.RGBA.rgba = (uint8_t*)m_pData;
        config.output.u.RGBA.stride = m_nWidth * 4;
        config.output.u.RGBA.size = bufferSize;
        config.output.is_external_memory = 1;

        if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK)
        {
            delete []m_pData;
            m_pData = NULL;
            break;
        }
               
        bRet = true;
	} while (0);
	return bRet;
#else
	return false;
#endif
}
NS_CC_BEGIN

bool Image::_initWithWebpData(void *pData, int nDataLen)
{
	bool bRet = false;
	do
	{
        WebPDecoderConfig config;
        if (WebPInitDecoderConfig(&config) == 0) break;
        if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break;
        if (config.input.width == 0 || config.input.height == 0) break;
        
        config.output.colorspace = MODE_RGBA;
        _bitsPerComponent = 8;
        _width    = config.input.width;
        _height   = config.input.height;
        _hasAlpha = true;
        
        int bufferSize = _width * _height * 4;
        _data = new unsigned char[bufferSize];
        
        config.output.u.RGBA.rgba = (uint8_t*)_data;
        config.output.u.RGBA.stride = _width * 4;
        config.output.u.RGBA.size = bufferSize;
        config.output.is_external_memory = 1;

        if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK)
        {
            delete []_data;
            _data = NULL;
            break;
        }
               
        bRet = true;
	} while (0);
	return bRet;
}
Beispiel #8
0
static int Decode(void) {   // Fills kParams.curr_frame
  const WebPIterator* const curr = &kParams.curr_frame;
  WebPDecoderConfig* const config = &kParams.config;
  WebPDecBuffer* const output_buffer = &config->output;
  int ok = 0;

  ClearPreviousPic();
  output_buffer->colorspace = MODE_RGBA;
  ok = (WebPDecode(curr->fragment.bytes, curr->fragment.size,
                   config) == VP8_STATUS_OK);
  if (!ok) {
    fprintf(stderr, "Decoding of frame #%d failed!\n", curr->frame_num);
  } else {
    kParams.pic = output_buffer;
    if (kParams.use_color_profile) {
      ok = ApplyColorProfile(&kParams.iccp.chunk, output_buffer);
      if (!ok) {
        fprintf(stderr, "Applying color profile to frame #%d failed!\n",
                curr->frame_num);
      }
    }
  }
  return ok;
}
static jobject WebPFactory_decodeBuffer
  (JNIEnv *jniEnv, const uint8_t *buffer, size_t length, jobject options)
{
	// Validate image
	int bitmapWidth = 0;
	int bitmapHeight = 0;
	if (!WebPGetInfo(buffer, length, &bitmapWidth, &bitmapHeight))
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Invalid WebP format");
		return NULL;
	}

	// Check if size is all what we were requested to do
	if(options && jniEnv->GetBooleanField(options, jrefs::android::graphics::BitmapFactory->Options.inJustDecodeBounds) == JNI_TRUE)
	{
		jniEnv->SetIntField(options, jrefs::android::graphics::BitmapFactory->Options.outWidth, bitmapWidth);
		jniEnv->SetIntField(options, jrefs::android::graphics::BitmapFactory->Options.outHeight, bitmapHeight);
		return NULL;
	}

	// Initialize decoder config and configure scaling if requested
	WebPDecoderConfig config;
	if (!WebPInitDecoderConfig(&config))
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Unable to init WebP decoder config");
		return NULL;
	}

	if (options)
	{
		jint inSampleSize = jniEnv->GetIntField(options, jrefs::android::graphics::BitmapFactory->Options.inSampleSize);
		if (inSampleSize > 1)
		{
			config.options.use_scaling = 1;
			config.options.scaled_width = bitmapWidth /= inSampleSize;
			config.options.scaled_height = bitmapHeight /= inSampleSize;
		}
	}

	__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "Decoding into %dx%d bitmap", bitmapWidth, bitmapHeight);

	// Create bitmap
	jobject value__ARGB_8888 = jniEnv->GetStaticObjectField(jrefs::android::graphics::Bitmap->Config.jclassRef, jrefs::android::graphics::Bitmap->Config.ARGB_8888);
	jobject outputBitmap = jniEnv->CallStaticObjectMethod(jrefs::android::graphics::Bitmap->jclassRef, jrefs::android::graphics::Bitmap->createBitmap,
		(jint)bitmapWidth, (jint)bitmapHeight,
		value__ARGB_8888);
	if (!outputBitmap)
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to allocate Bitmap");
		return NULL;
	}

	// Get information about bitmap passed
	AndroidBitmapInfo bitmapInfo;
	if (AndroidBitmap_getInfo(jniEnv, outputBitmap, &bitmapInfo) != ANDROID_BITMAP_RESULT_SUCCESS)
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to get Bitmap information");
		return NULL;
	}

	// Lock pixels
	void *bitmapPixels = 0;
	if (AndroidBitmap_lockPixels(jniEnv, outputBitmap, &bitmapPixels) != ANDROID_BITMAP_RESULT_SUCCESS)
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to lock Bitmap pixels");
		return NULL;
	}

	// Decode to ARGB
	config.output.colorspace = MODE_RGBA;
	config.output.u.RGBA.rgba = (uint8_t*)bitmapPixels;
	config.output.u.RGBA.stride = bitmapInfo.stride;
	config.output.u.RGBA.size = bitmapInfo.height * bitmapInfo.stride;
	config.output.is_external_memory = 1;
	if (WebPDecode(buffer, length, &config) != VP8_STATUS_OK)
	{
		AndroidBitmap_unlockPixels(jniEnv, outputBitmap);
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to decode WebP pixel data");
		return NULL;
	}

	// Unlock pixels
	if (AndroidBitmap_unlockPixels(jniEnv, outputBitmap) != ANDROID_BITMAP_RESULT_SUCCESS)
	{
		jniEnv->ThrowNew(jrefs::java::lang::RuntimeException->jclassRef, "Failed to unlock Bitmap pixels");
		return NULL;
	}

	return outputBitmap;
}
Beispiel #10
0
/**
Decode a WebP image and returns a FIBITMAP image
@param webp_image Raw WebP image
@param flags FreeImage load flags
@return Returns a dib if successfull, returns NULL otherwise
*/
static FIBITMAP *
DecodeImage(WebPData *webp_image, int flags) {
	FIBITMAP *dib = NULL;

	const uint8_t* data = webp_image->bytes;	// raw image data
	const size_t data_size = webp_image->size;	// raw image size

    VP8StatusCode webp_status = VP8_STATUS_OK;

	BOOL header_only = (flags & FIF_LOAD_NOPIXELS) == FIF_LOAD_NOPIXELS;

	// Main object storing the configuration for advanced decoding
	WebPDecoderConfig decoder_config;
	// Output buffer
	WebPDecBuffer* const output_buffer = &decoder_config.output;
	// Features gathered from the bitstream
	WebPBitstreamFeatures* const bitstream = &decoder_config.input;

	try {
		// Initialize the configuration as empty
		// This function must always be called first, unless WebPGetFeatures() is to be called
		if(!WebPInitDecoderConfig(&decoder_config)) {
			throw "Library version mismatch";
		}

		// Retrieve features from the bitstream
		// The bitstream structure is filled with information gathered from the bitstream
		webp_status = WebPGetFeatures(data, data_size, bitstream);
		if(webp_status != VP8_STATUS_OK) {
			throw FI_MSG_ERROR_PARSING;
		}

		// Allocate output dib

		unsigned bpp = bitstream->has_alpha ? 32 : 24;	
		unsigned width = (unsigned)bitstream->width;
		unsigned height = (unsigned)bitstream->height;

		dib = FreeImage_AllocateHeader(header_only, width, height, bpp, FI_RGBA_RED_MASK, FI_RGBA_GREEN_MASK, FI_RGBA_BLUE_MASK);
		if(!dib) {
			throw FI_MSG_ERROR_DIB_MEMORY;
		}

		if(header_only) {
			WebPFreeDecBuffer(output_buffer);
			return dib;
		}

		// --- Set decoding options ---

		// use multi-threaded decoding
		decoder_config.options.use_threads = 1;
		// set output color space
		output_buffer->colorspace = bitstream->has_alpha ? MODE_BGRA : MODE_BGR;

		// ---

		// decode the input stream, taking 'config' into account. 
		
		webp_status = WebPDecode(data, data_size, &decoder_config);
		if(webp_status != VP8_STATUS_OK) {
			throw FI_MSG_ERROR_PARSING;
		}

		// fill the dib with the decoded data

		const BYTE *src_bitmap = output_buffer->u.RGBA.rgba;
		const unsigned src_pitch = (unsigned)output_buffer->u.RGBA.stride;

		switch(bpp) {
			case 24:
				for(unsigned y = 0; y < height; y++) {
					const BYTE *src_bits = src_bitmap + y * src_pitch;						
					BYTE *dst_bits = (BYTE*)FreeImage_GetScanLine(dib, height-1-y);
					for(unsigned x = 0; x < width; x++) {
						dst_bits[FI_RGBA_BLUE]	= src_bits[0];	// B
						dst_bits[FI_RGBA_GREEN]	= src_bits[1];	// G
						dst_bits[FI_RGBA_RED]	= src_bits[2];	// R
						src_bits += 3;
						dst_bits += 3;
					}
				}
				break;
			case 32:
				for(unsigned y = 0; y < height; y++) {
					const BYTE *src_bits = src_bitmap + y * src_pitch;						
					BYTE *dst_bits = (BYTE*)FreeImage_GetScanLine(dib, height-1-y);
					for(unsigned x = 0; x < width; x++) {
						dst_bits[FI_RGBA_BLUE]	= src_bits[0];	// B
						dst_bits[FI_RGBA_GREEN]	= src_bits[1];	// G
						dst_bits[FI_RGBA_RED]	= src_bits[2];	// R
						dst_bits[FI_RGBA_ALPHA]	= src_bits[3];	// A
						src_bits += 4;
						dst_bits += 4;
					}
				}
				break;
		}

		// Free the decoder
		WebPFreeDecBuffer(output_buffer);

		return dib;

	} catch (const char *text) {
		if(dib) {
			FreeImage_Unload(dib);
		}
		WebPFreeDecBuffer(output_buffer);

		if(NULL != text) {
			FreeImage_OutputMessageProc(s_format_id, text);
		}

		return NULL;
	}
}
Beispiel #11
0
int webp_decode(const char *in_file, const char *out_file, const FfiWebpDecodeConfig *decode_config) {
  int return_value = -1;
  WebPDecoderConfig config;
  WebPDecBuffer* const output_buffer = &config.output;
  WebPBitstreamFeatures* const bitstream = &config.input;
  OutputFileFormat format = PNG;

  if (!WebPInitDecoderConfig(&config)) {
    //fprintf(stderr, "Library version mismatch!\n");
    return 1;
  }
  
  if (decode_config->output_format != format){
    format = decode_config->output_format;
  }
  if (decode_config->no_fancy_upsampling > 0){
    config.options.no_fancy_upsampling = 1;
  }
  if (decode_config->bypass_filtering > 0){
    config.options.bypass_filtering = 1;
  }
  if (decode_config->use_threads > 0){
    config.options.use_threads = 1;
  }
  if ((decode_config->crop_w | decode_config->crop_h) > 0){
    config.options.use_cropping = 1;
    config.options.crop_left   = decode_config->crop_x;
    config.options.crop_top    = decode_config->crop_y;
    config.options.crop_width  = decode_config->crop_w;
    config.options.crop_height = decode_config->crop_h;
  }
  if ((decode_config->resize_w | decode_config->resize_h) > 0){
    config.options.use_scaling = 1;
    config.options.scaled_width  = decode_config->resize_w;
    config.options.scaled_height = decode_config->resize_h;
  }
  
  VP8StatusCode status = VP8_STATUS_OK;
  size_t data_size = 0;
  const uint8_t* data = NULL;
  
  if (!UtilReadFile(in_file, &data, &data_size)) return -1;
  
  status = WebPGetFeatures(data, data_size, bitstream);
  if (status != VP8_STATUS_OK) {
    //fprintf(stderr, "This is invalid webp image!\n");
    return_value = 2;
    goto Error;
  }
  
  switch (format) {
    case PNG:
      output_buffer->colorspace = bitstream->has_alpha ? MODE_RGBA : MODE_RGB;
      break;
    case PAM:
      output_buffer->colorspace = MODE_RGBA;
      break;
    case PPM:
      output_buffer->colorspace = MODE_RGB;  // drops alpha for PPM
      break;
    case PGM:
      output_buffer->colorspace = bitstream->has_alpha ? MODE_YUVA : MODE_YUV;
      break;
    case ALPHA_PLANE_ONLY:
      output_buffer->colorspace = MODE_YUVA;
      break;
    default:
      free((void*)data);
      return 3;
  }
  status = WebPDecode(data, data_size, &config);
  
  if (status != VP8_STATUS_OK) {
    //fprintf(stderr, "Decoding of %s failed.\n", in_file);
    return_value = 4;
    goto Error;
  }
  UtilSaveOutput(output_buffer, format, out_file);
  return_value = 0;
  
Error:  
  free((void*)data);
  WebPFreeDecBuffer(output_buffer);
  return return_value;
}
Beispiel #12
0
PyObject* WebPDecode_wrapper(PyObject* self, PyObject* args)
{
    PyBytesObject *webp_string;
    uint8_t *webp;
    Py_ssize_t size;
    PyObject *ret, *bytes, *pymode, *icc_profile = Py_None, *exif = Py_None;
    WebPDecoderConfig config;
    VP8StatusCode vp8_status_code = VP8_STATUS_OK;
    char* mode = "RGB";

    if (!PyArg_ParseTuple(args, "S", &webp_string)) {
        Py_RETURN_NONE;
    }

    if (!WebPInitDecoderConfig(&config)) {
        Py_RETURN_NONE;
    }

    PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size);

    vp8_status_code = WebPGetFeatures(webp, size, &config.input);
    if (vp8_status_code == VP8_STATUS_OK) {
        // If we don't set it, we don't get alpha.
        // Initialized to MODE_RGB
        if (config.input.has_alpha) {
            config.output.colorspace = MODE_RGBA;
            mode = "RGBA";
        }

#ifndef HAVE_WEBPMUX
        vp8_status_code = WebPDecode(webp, size, &config);
#else
       {
        int copy_data = 0;
        WebPData data = { webp, size };
        WebPMuxFrameInfo image;
        WebPData icc_profile_data = {0};
        WebPData exif_data = {0};

        WebPMux* mux = WebPMuxCreate(&data, copy_data);
        WebPMuxGetFrame(mux, 1, &image);
        webp = (uint8_t*)image.bitstream.bytes;
        size = image.bitstream.size;

        vp8_status_code = WebPDecode(webp, size, &config);

        WebPMuxGetChunk(mux, "ICCP", &icc_profile_data);
        if (icc_profile_data.size > 0) {
            icc_profile = PyBytes_FromStringAndSize((const char*)icc_profile_data.bytes, icc_profile_data.size);
        }

        WebPMuxGetChunk(mux, "EXIF", &exif_data);
        if (exif_data.size > 0) {
            exif = PyBytes_FromStringAndSize((const char*)exif_data.bytes, exif_data.size);
        }

        WebPMuxDelete(mux);
        }
#endif
    }

    if (vp8_status_code != VP8_STATUS_OK) {
        WebPFreeDecBuffer(&config.output);
        Py_RETURN_NONE;
    }

    if (config.output.colorspace < MODE_YUV) {
        bytes = PyBytes_FromStringAndSize((char *)config.output.u.RGBA.rgba,
                                          config.output.u.RGBA.size);
    } else {
        // Skipping YUV for now. Need Test Images.
        // UNDONE -- unclear if we'll ever get here if we set mode_rgb*
        bytes = PyBytes_FromStringAndSize((char *)config.output.u.YUVA.y,
                                          config.output.u.YUVA.y_size);
    }

#if PY_VERSION_HEX >= 0x03000000
    pymode = PyUnicode_FromString(mode);
#else
    pymode = PyString_FromString(mode);
#endif
    ret = Py_BuildValue("SiiSSS", bytes, config.output.width,
                        config.output.height, pymode, icc_profile, exif);
    WebPFreeDecBuffer(&config.output);
    return ret;
}
Beispiel #13
0
jobject doDecode(
    JNIEnv* env,
    uint8_t* encoded_image,
    unsigned encoded_image_length,
    jobject bitmapOptions,
    jfloat scale) {

  // Options manipulation is taken from https://github.com/android/platform_frameworks_base/blob/master/core/jni/android/graphics/BitmapFactory.cpp
  int image_width = 0;
  int image_height = 0;

  jobject bitmap = nullptr;

  WebPGetInfo(
      encoded_image,
      encoded_image_length,
      &image_width,
      &image_height);

  WebPDecoderConfig config;
  WebPInitDecoderConfig(&config);

  if ((bitmapOptions != nullptr) &&
      (setOutDimensions(env, bitmapOptions, image_width, image_height))) {
    return {};
  }

  if (scale != 1.0f) {
    image_width = int(image_width * scale + 0.5f);
    image_height = int(image_height  * scale + 0.5f);
    config.options.use_scaling = 1;
    config.options.scaled_width = image_width;
    config.options.scaled_height = image_height;
  }

  bitmap = createBitmap(env, image_width, image_height, bitmapOptions);
  RETURN_NULL_IF_EXCEPTION(env);

  void* raw_pixels = nullptr;

  int rc = AndroidBitmap_lockPixels(env, bitmap, (void**) &raw_pixels);
  if (rc != ANDROID_BITMAP_RESULT_SUCCESS) {
    env->ThrowNew(runtimeExceptionClass, "Decode error locking pixels");
    return JNI_FALSE;
  }

  config.output.colorspace = MODE_RGBA;
  config.output.u.RGBA.rgba = (uint8_t*) raw_pixels;
  config.output.u.RGBA.stride = image_width * 4;
  config.output.u.RGBA.size = image_width * image_height * 4;
  config.output.is_external_memory = 1;

  WebPDecode(encoded_image, encoded_image_length, &config);

  rc = AndroidBitmap_unlockPixels(env, bitmap);
  if (rc != ANDROID_BITMAP_RESULT_SUCCESS) {
    env->ThrowNew(runtimeExceptionClass, "Decode error unlocking pixels");
    return {};
  }

  if (bitmapOptions != nullptr) {
    setBitmapSize(env, bitmapOptions, image_width, image_height);
  }

  return bitmap;
}