bool QWebpHandler::read(QImage *image) { if (!ensureScanned() || device()->isSequential()) return false; if (m_hasFirstFrameRead) { if (!WebPDemuxNextFrame(&m_iter)) return false; } else { m_hasFirstFrameRead = true; } WebPBitstreamFeatures features; VP8StatusCode status = WebPGetFeatures(m_iter.fragment.bytes, m_iter.fragment.size, &features); if (status != VP8_STATUS_OK) return false; QImage result(features.width, features.height, QImage::Format_ARGB32); uint8_t *output = result.bits(); size_t output_size = result.byteCount(); #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN if (!WebPDecodeBGRAInto( reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), m_iter.fragment.size, output, output_size, result.bytesPerLine())) #else if (!WebPDecodeARGBInto( reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), m_iter.fragment.size, output, output_size, result.bytesPerLine())) #endif return false; *image = result; return true; }
PyObject* WebPGetFeatures_wrapper(PyObject* self, PyObject* args) { PyBytesObject *webp_string; const uint8_t* webp = NULL; VP8StatusCode vp8_status_code = VP8_STATUS_OK; Py_ssize_t size; WebPBitstreamFeatures* const features; if (!PyArg_ParseTuple(args, "S", &webp_string)) { Py_INCREF(Py_None); return Py_None; } PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size); vp8_status_code = WebPGetFeatures(webp, size, features); if (vp8_status_code == VP8_STATUS_OK) { printf("%i", features->has_alpha); } else { // TODO: raise some sort of error printf("Error occured checking webp file with code: %d\n", vp8_status_code); Py_INCREF(Py_None); return Py_None; } free((void*)webp); return Py_BuildValue("b", features->has_alpha); }
static ParseStatus CreateRawImageDemuxer(MemBuffer* const mem, WebPDemuxer** demuxer) { WebPBitstreamFeatures features; const VP8StatusCode status = WebPGetFeatures(mem->buf_, mem->buf_size_, &features); *demuxer = NULL; if (status != VP8_STATUS_OK) { return (status == VP8_STATUS_NOT_ENOUGH_DATA) ? PARSE_NEED_MORE_DATA : PARSE_ERROR; } { WebPDemuxer* const dmux = (WebPDemuxer*)WebPSafeCalloc(1ULL, sizeof(*dmux)); Frame* const frame = (Frame*)WebPSafeCalloc(1ULL, sizeof(*frame)); if (dmux == NULL || frame == NULL) goto Error; InitDemux(dmux, mem); SetFrameInfo(0, mem->buf_size_, 1 /*frame_num*/, 1 /*complete*/, &features, frame); if (!AddFrame(dmux, frame)) goto Error; dmux->state_ = WEBP_DEMUX_DONE; dmux->canvas_width_ = frame->width_; dmux->canvas_height_ = frame->height_; dmux->feature_flags_ |= frame->has_alpha_ ? ALPHA_FLAG : 0; dmux->num_frames_ = 1; assert(IsValidSimpleFormat(dmux)); *demuxer = dmux; return PARSE_OK; Error: WebPSafeFree(dmux); WebPSafeFree(frame); return PARSE_ERROR; } }
void webp_reader<T>::read(unsigned x0, unsigned y0,image_data_32& image) { WebPDecoderConfig config; config_guard guard(config); if (!WebPInitDecoderConfig(&config)) { throw image_reader_exception("WEBP reader: WebPInitDecoderConfig failed"); } config.options.use_cropping = 1; config.options.crop_left = x0; config.options.crop_top = y0; config.options.crop_width = std::min(width_ - x0, image.width()); config.options.crop_height = std::min(height_ - y0, image.height()); if (WebPGetFeatures(buffer_->data(), buffer_->size(), &config.input) != VP8_STATUS_OK) { throw image_reader_exception("WEBP reader: WebPGetFeatures failed"); } config.output.colorspace = MODE_RGBA; config.output.u.RGBA.rgba = (uint8_t *)image.getBytes(); config.output.u.RGBA.stride = 4 * image.width(); config.output.u.RGBA.size = image.width() * image.height() * 4; config.output.is_external_memory = 1; if (WebPDecode(buffer_->data(), buffer_->size(), &config) != VP8_STATUS_OK) { throw image_reader_exception("WEBP reader: WebPDecode failed"); } }
/*! * readHeaderMemWebP() * * Input: data * size (100 bytes is sufficient) * &w (<return> width) * &h (<return> height) * &spp (<return> spp (3 or 4)) * Return: 0 if OK, 1 on error */ l_int32 readHeaderMemWebP(const l_uint8 *data, size_t size, l_int32 *pw, l_int32 *ph, l_int32 *pspp) { WebPBitstreamFeatures features; PROCNAME("readHeaderWebP"); if (pw) *pw = 0; if (ph) *ph = 0; if (pspp) *pspp = 0; if (!data) return ERROR_INT("data not defined", procName, 1); if (!pw || !ph || !pspp) return ERROR_INT("input ptr(s) not defined", procName, 1); if (WebPGetFeatures(data, (l_int32)size, &features)) return ERROR_INT("invalid WebP file", procName, 1); *pw = features.width; *ph = features.height; *pspp = (features.has_alpha) ? 4 : 3; return 0; }
DEFINE_FUNC_1(webp_get_features, data_buffer_value) { if (!val_is_buffer(data_buffer_value)) { val_throw(alloc_string("webp_get_features: Expected to be a buffer")); return alloc_null(); } buffer data_buffer = val_to_buffer(data_buffer_value); int data_len = buffer_size(data_buffer); char *data_ptr = buffer_data(data_buffer); WebPBitstreamFeatures features = {0}; VP8StatusCode code = WebPGetFeatures((const unsigned char *)data_ptr, data_len, &features); if (code != VP8_STATUS_OK) { val_throw(alloc_string("webp_get_features: Error: (code != VP8_STATUS_OK)")); return alloc_null(); } value array = alloc_array(7); val_array_set_i(array, 0, alloc_int(features.width)); val_array_set_i(array, 1, alloc_int(features.height)); val_array_set_i(array, 2, alloc_int(features.has_alpha)); //val_array_set_i(array, 3, alloc_int(features.bitstream_version)); val_array_set_i(array, 3, alloc_int(0)); val_array_set_i(array, 4, alloc_int(features.no_incremental_decoding)); val_array_set_i(array, 5, alloc_int(features.rotate)); val_array_set_i(array, 6, alloc_int(features.uv_sampling)); return array; }
Error webp_load_image_from_buffer(Image *p_image, const uint8_t *p_buffer, int p_buffer_len) { ERR_FAIL_NULL_V(p_image, ERR_INVALID_PARAMETER); WebPBitstreamFeatures features; if (WebPGetFeatures(p_buffer, p_buffer_len, &features) != VP8_STATUS_OK) { // ERR_EXPLAIN("Error decoding WEBP image"); ERR_FAIL_V(ERR_FILE_CORRUPT); } PoolVector<uint8_t> dst_image; int datasize = features.width * features.height * (features.has_alpha ? 4 : 3); dst_image.resize(datasize); PoolVector<uint8_t>::Write dst_w = dst_image.write(); bool errdec = false; if (features.has_alpha) { errdec = WebPDecodeRGBAInto(p_buffer, p_buffer_len, dst_w.ptr(), datasize, 4 * features.width) == NULL; } else { errdec = WebPDecodeRGBInto(p_buffer, p_buffer_len, dst_w.ptr(), datasize, 3 * features.width) == NULL; } dst_w = PoolVector<uint8_t>::Write(); //ERR_EXPLAIN("Error decoding webp!"); ERR_FAIL_COND_V(errdec, ERR_FILE_CORRUPT); p_image->create(features.width, features.height, 0, features.has_alpha ? Image::FORMAT_RGBA8 : Image::FORMAT_RGB8, dst_image); return OK; }
WebPImageReader::WebPImageReader(unsigned char* src, size_t len) : ImageReader(src, len) { WebPDecoderConfig config; if (WebPGetFeatures(source, length, &config.input) == VP8_STATUS_OK) { alpha = config.input.has_alpha; width = config.input.width; height = config.input.height; } }
PyObject* WebPDecode_wrapper(PyObject* self, PyObject* args) { PyBytesObject *webp_string; uint8_t *webp; Py_ssize_t size; PyObject *ret, *bytes, *pymode; WebPDecoderConfig config; VP8StatusCode vp8_status_code = VP8_STATUS_OK; char* mode = "RGB"; if (!PyArg_ParseTuple(args, "S", &webp_string)) { Py_RETURN_NONE; } if (!WebPInitDecoderConfig(&config)) { Py_RETURN_NONE; } PyBytes_AsStringAndSize((PyObject *) webp_string, (char**)&webp, &size); vp8_status_code = WebPGetFeatures(webp, size, &config.input); if (vp8_status_code == VP8_STATUS_OK) { // If we don't set it, we don't get alpha. // Initialized to MODE_RGB if (config.input.has_alpha) { config.output.colorspace = MODE_RGBA; mode = "RGBA"; } vp8_status_code = WebPDecode(webp, size, &config); } if (vp8_status_code != VP8_STATUS_OK) { WebPFreeDecBuffer(&config.output); Py_RETURN_NONE; } if (config.output.colorspace < MODE_YUV) { bytes = PyBytes_FromStringAndSize((char *)config.output.u.RGBA.rgba, config.output.u.RGBA.size); } else { // Skipping YUV for now. Need Test Images. // UNDONE -- unclear if we'll ever get here if we set mode_rgb* bytes = PyBytes_FromStringAndSize((char *)config.output.u.YUVA.y, config.output.u.YUVA.y_size); } #if PY_VERSION_HEX >= 0x03000000 pymode = PyUnicode_FromString(mode); #else pymode = PyString_FromString(mode); #endif ret = Py_BuildValue("SiiS", bytes, config.output.width, config.output.height, pymode); WebPFreeDecBuffer(&config.output); return ret; }
char load(ImlibImage * im, ImlibProgressFunction progress, char progress_granularity, char immediate_load) { uint8_t *data; size_t size; int w,h; int has_alpha; #if (WEBP_DECODER_ABI_VERSION >= 0x200) WebPBitstreamFeatures features; #endif char ret = 0; if(im->data) return 0; if(!(data = read_file(im->real_file, &size, progress))) return 0; #if (WEBP_DECODER_ABI_VERSION >= 0x200) if(WebPGetFeatures(data, size, &features) != VP8_STATUS_OK) goto EXIT; w = features.width; h = features.height; has_alpha = features.has_alpha; #else /* compatibility with versions <= 0.1.3 */ if (!WebPGetInfo(data, size, &w, &h)) goto EXIT; has_alpha = 0; #endif if(!im->loader && !im->data) { im->w = w; im->h = h; if(!IMAGE_DIMENSIONS_OK(w, h)) goto EXIT; if(!has_alpha) UNSET_FLAGS(im->flags, F_HAS_ALPHA); else SET_FLAGS(im->flags, F_HAS_ALPHA); im->format = strdup("webp"); } if((!im->data && im->loader) || immediate_load || progress) im->data = (DATA32*)WebPDecodeBGRA(data, size, &w, &h); if(progress) progress(im, 100, 0, 0, 0, 0); ret = 1; EXIT: free(data); return ret; }
static Read * read_new( const char *filename, void *buf, size_t len ) { Read *read; unsigned char header[MINIMAL_HEADER]; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->buf = buf; read->len = len; read->idec = NULL; WebPInitDecoderConfig( &read->config ); if( filename ) { if( vips__get_bytes( filename, header, MINIMAL_HEADER ) && WebPGetFeatures( header, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } } else { if( WebPGetFeatures( read->buf, read->len, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = TRUE; return( read ); }
bool WebPDecoder::checkSignature(const String & signature) const { bool ret = false; if(signature.size() >= WEBP_HEADER_SIZE) { WebPBitstreamFeatures features; if(VP8_STATUS_OK == WebPGetFeatures((uint8_t *)signature.c_str(), WEBP_HEADER_SIZE, &features)) { ret = true; } } return ret; }
DEF_TEST(Encode_WebpQuality, r) { SkBitmap bm; bm.allocN32Pixels(100, 100); bm.eraseColor(SK_ColorBLUE); auto dataLossy = SkEncodeBitmap(bm, SkEncodedImageFormat::kWEBP, 99); auto dataLossLess = SkEncodeBitmap(bm, SkEncodedImageFormat::kWEBP, 100); enum Format { kMixed = 0, kLossy = 1, kLossless = 2, }; auto test = [&r](const sk_sp<SkData>& data, Format expected) { auto printFormat = [](int f) { switch (f) { case kMixed: return "mixed"; case kLossy: return "lossy"; case kLossless: return "lossless"; default: return "unknown"; } }; if (!data) { ERRORF(r, "Failed to encode. Expected %s", printFormat(expected)); return; } WebPBitstreamFeatures features; auto status = WebPGetFeatures(data->bytes(), data->size(), &features); if (status != VP8_STATUS_OK) { ERRORF(r, "Encode had an error %i. Expected %s", status, printFormat(expected)); return; } if (expected != features.format) { ERRORF(r, "Expected %s encode, but got format %s", printFormat(expected), printFormat(features.format)); } }; test(dataLossy, kLossy); test(dataLossLess, kLossless); }
bool WebPDecoder::readHeader() { uint8_t header[WEBP_HEADER_SIZE] = { 0 }; if (m_buf.empty()) { fs.open(m_filename.c_str(), std::ios::binary); fs.seekg(0, std::ios::end); fs_size = safeCastToSizeT(fs.tellg(), "File is too large"); fs.seekg(0, std::ios::beg); CV_Assert(fs && "File stream error"); CV_CheckGE(fs_size, WEBP_HEADER_SIZE, "File is too small"); CV_CheckLE(fs_size, param_maxFileSize, "File is too large. Increase OPENCV_IMGCODECS_WEBP_MAX_FILE_SIZE parameter if you want to process large files"); fs.read((char*)header, sizeof(header)); CV_Assert(fs && "Can't read WEBP_HEADER_SIZE bytes"); } else { CV_CheckGE(m_buf.total(), WEBP_HEADER_SIZE, "Buffer is too small"); memcpy(header, m_buf.ptr(), sizeof(header)); data = m_buf; } WebPBitstreamFeatures features; if (VP8_STATUS_OK == WebPGetFeatures(header, sizeof(header), &features)) { m_width = features.width; m_height = features.height; if (features.has_alpha) { m_type = CV_8UC4; channels = 4; } else { m_type = CV_8UC3; channels = 3; } return true; } return false; }
/*! * \brief pixReadMemWebP() * * \param[in] filedata webp compressed data in memory * \param[in] filesize number of bytes in data * \return pix 32 bpp, or NULL on error * * <pre> * Notes: * (1) When the encoded data only has 3 channels (no alpha), * WebPDecodeRGBAInto() generates a raster of 32-bit pixels, with * the alpha channel set to opaque (255). * (2) We don't need to use the gnu runtime functions like fmemopen() * for redirecting data from a stream to memory, because * the webp library has been written with memory-to-memory * functions at the lowest level (which is good!). And, in * any event, fmemopen() doesn't work with l_binaryReadStream(). * </pre> */ PIX * pixReadMemWebP(const l_uint8 *filedata, size_t filesize) { l_uint8 *out = NULL; l_int32 w, h, has_alpha, wpl, stride; l_uint32 *data; size_t size; PIX *pix; WebPBitstreamFeatures features; PROCNAME("pixReadMemWebP"); if (!filedata) return (PIX *)ERROR_PTR("filedata not defined", procName, NULL); if (WebPGetFeatures(filedata, filesize, &features)) return (PIX *)ERROR_PTR("Invalid WebP file", procName, NULL); w = features.width; h = features.height; has_alpha = features.has_alpha; /* Write from compressed Y,U,V arrays to pix raster data */ pix = pixCreate(w, h, 32); pixSetInputFormat(pix, IFF_WEBP); if (has_alpha) pixSetSpp(pix, 4); data = pixGetData(pix); wpl = pixGetWpl(pix); stride = wpl * 4; size = stride * h; out = WebPDecodeRGBAInto(filedata, filesize, (uint8_t *)data, size, stride); if (out == NULL) { /* error: out should also point to data */ pixDestroy(&pix); return (PIX *)ERROR_PTR("WebP decode failed", procName, NULL); } /* The WebP API expects data in RGBA order. The pix stores * in host-dependent order with R as the MSB and A as the LSB. * On little-endian machines, the bytes in the word must * be swapped; e.g., R goes from byte 0 (LSB) to byte 3 (MSB). * No swapping is necessary for big-endians. */ pixEndianByteSwap(pix); return pix; }
static Read * read_new( const char *filename, void *data, size_t length ) { Read *read; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->data = data; read->length = length; read->fd = 0; read->idec = NULL; if( read->filename ) { /* libwebp makes streaming from a file source very hard. We * have to read to a full memory buffer, then copy to out. * * mmap the input file, it's slightly quicker. */ if( (read->fd = vips__open_image_read( read->filename )) < 0 || (read->length = vips_file_length( read->fd )) < 0 || !(read->data = vips__mmap( read->fd, FALSE, read->length, 0 )) ) { read_free( read ); return( NULL ); } } WebPInitDecoderConfig( &read->config ); if( WebPGetFeatures( read->data, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = TRUE; return( read ); }
// Decoder functions PyObject* _anim_decoder_new(PyObject* self, PyObject* args) { PyBytesObject *webp_string; const uint8_t *webp; Py_ssize_t size; WebPData webp_src; char* mode; WebPDecoderConfig config; WebPAnimDecoderObject* decp = NULL; WebPAnimDecoder* dec = NULL; if (!PyArg_ParseTuple(args, "S", &webp_string)) { return NULL; } PyBytes_AsStringAndSize((PyObject *)webp_string, (char**)&webp, &size); webp_src.bytes = webp; webp_src.size = size; // Sniff the mode, since the decoder API doesn't tell us mode = "RGBA"; if (WebPGetFeatures(webp, size, &config.input) == VP8_STATUS_OK) { if (!config.input.has_alpha) { mode = "RGBX"; } } // Create the decoder (default mode is RGBA, if no options passed) decp = PyObject_New(WebPAnimDecoderObject, &WebPAnimDecoder_Type); if (decp) { decp->mode = mode; if (WebPDataCopy(&webp_src, &(decp->data))) { dec = WebPAnimDecoderNew(&(decp->data), NULL); if (dec) { if (WebPAnimDecoderGetInfo(dec, &(decp->info))) { decp->dec = dec; return (PyObject*)decp; } } } PyObject_Del(decp); } PyErr_SetString(PyExc_RuntimeError, "could not create decoder object"); return NULL; }
int webpGetInfo( const uint8_t* data, size_t data_size, int* width, int* height, int* has_alpha ) { WebPBitstreamFeatures features; if (WebPGetFeatures(data, data_size, &features) != VP8_STATUS_OK) { return 0; } if(width != NULL) { *width = features.width; } if(height != NULL) { *height = features.height; } if(has_alpha != NULL) { *has_alpha = features.has_alpha; } return 1; }
NS_CC_BEGIN bool CCImage::_initWithWebpData(void *pData, int nDataLen) { #if ENABLE_WEBP bool bRet = false; do { WebPDecoderConfig config; if (WebPInitDecoderConfig(&config) == 0) break; if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break; if (config.input.width == 0 || config.input.height == 0) break; config.output.colorspace = MODE_RGBA; m_nBitsPerComponent = 8; m_nWidth = config.input.width; m_nHeight = config.input.height; m_bHasAlpha = true; int bufferSize = m_nWidth * m_nHeight * 4; m_pData = new unsigned char[bufferSize]; config.output.u.RGBA.rgba = (uint8_t*)m_pData; config.output.u.RGBA.stride = m_nWidth * 4; config.output.u.RGBA.size = bufferSize; config.output.is_external_memory = 1; if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK) { delete []m_pData; m_pData = NULL; break; } bRet = true; } while (0); return bRet; #else return false; #endif }
int LoadWebP(const char* const in_file, const uint8_t** data, size_t* data_size, WebPBitstreamFeatures* bitstream) { VP8StatusCode status; WebPBitstreamFeatures local_features; if (!ImgIoUtilReadFile(in_file, data, data_size)) return 0; if (bitstream == NULL) { bitstream = &local_features; } status = WebPGetFeatures(*data, *data_size, bitstream); if (status != VP8_STATUS_OK) { free((void*)*data); *data = NULL; *data_size = 0; PrintWebPError(in_file, status); return 0; } return 1; }
PyObject* WebPGetFeatures_wrapper(PyObject* self, PyObject* args) { PyStringObject *webp_string; if (!PyArg_ParseTuple(args, "S", &webp_string)) { Py_INCREF(Py_None); return Py_None; } uint8_t *webp; Py_ssize_t size; PyString_AsStringAndSize((struct PyObject *) webp_string, &webp, &size); WebPBitstreamFeatures features; VP8StatusCode err = WebPGetFeatures(webp, size, &features); if (VP8_STATUS_OK != err) { return PyErr_Format(PyExc_RuntimeError, "WebPGetFeatures returned error: %d", err); } return Py_BuildValue("iii", features.width, features.height, features.has_alpha); }
static Ref<Image> _webp_lossy_unpack(const PoolVector<uint8_t> &p_buffer) { int size = p_buffer.size() - 4; ERR_FAIL_COND_V(size <= 0, Ref<Image>()); PoolVector<uint8_t>::Read r = p_buffer.read(); ERR_FAIL_COND_V(r[0] != 'W' || r[1] != 'E' || r[2] != 'B' || r[3] != 'P', Ref<Image>()); WebPBitstreamFeatures features; if (WebPGetFeatures(&r[4], size, &features) != VP8_STATUS_OK) { ERR_EXPLAIN("Error unpacking WEBP image:"); ERR_FAIL_V(Ref<Image>()); } /* print_line("width: "+itos(features.width)); print_line("height: "+itos(features.height)); print_line("alpha: "+itos(features.has_alpha)); */ PoolVector<uint8_t> dst_image; int datasize = features.width * features.height * (features.has_alpha ? 4 : 3); dst_image.resize(datasize); PoolVector<uint8_t>::Write dst_w = dst_image.write(); bool errdec = false; if (features.has_alpha) { errdec = WebPDecodeRGBAInto(&r[4], size, dst_w.ptr(), datasize, 4 * features.width) == NULL; } else { errdec = WebPDecodeRGBInto(&r[4], size, dst_w.ptr(), datasize, 3 * features.width) == NULL; } //ERR_EXPLAIN("Error decoding webp! - "+p_file); ERR_FAIL_COND_V(errdec, Ref<Image>()); dst_w = PoolVector<uint8_t>::Write(); Ref<Image> img = memnew(Image(features.width, features.height, 0, features.has_alpha ? Image::FORMAT_RGBA8 : Image::FORMAT_RGB8, dst_image)); return img; }
/*! * pixReadMemWebP() * * Input: filedata (webp compressed data in memory) * filesize (number of bytes in data) * Return: pix (32 bpp), or null on error * * Notes: * (1) When the encoded data only has 3 channels (no alpha), * WebPDecodeRGBAInto() generates a raster of 32-bit pixels, with * the alpha channel set to opaque (255). * (2) We don't need to use the gnu runtime functions like fmemopen() * for redirecting data from a stream to memory, because * the webp library has been written with memory-to-memory * functions at the lowest level (which is good!). And, in * any event, fmemopen() doesn't work with l_binaryReadStream(). */ PIX * pixReadMemWebP(const l_uint8 *filedata, size_t filesize) { l_uint8 *out = NULL; l_int32 w, h, has_alpha, wpl, stride; l_uint32 *data; size_t size; PIX *pix; WebPBitstreamFeatures features; PROCNAME("pixReadMemWebP"); if (!filedata) return (PIX *)ERROR_PTR("filedata not defined", procName, NULL); if (WebPGetFeatures(filedata, filesize, &features)) return (PIX *)ERROR_PTR("Invalid WebP file", procName, NULL); w = features.width; h = features.height; has_alpha = features.has_alpha; /* Write from compressed Y,U,V arrays to pix raster data */ pix = pixCreate(w, h, 32); if (has_alpha) pixSetSpp(pix, 4); data = pixGetData(pix); wpl = pixGetWpl(pix); stride = wpl * 4; size = stride * h; out = WebPDecodeRGBAInto(filedata, filesize, (uint8_t *)data, size, stride); if (out == NULL) { /* error: out should also point to data */ pixDestroy(&pix); return (PIX *)ERROR_PTR("WebP decode failed", procName, NULL); } /* WebP decoder emits opposite byte order for RGBA components */ pixEndianByteSwap(pix); return pix; }
NS_CC_BEGIN bool Image::_initWithWebpData(void *pData, int nDataLen) { bool bRet = false; do { WebPDecoderConfig config; if (WebPInitDecoderConfig(&config) == 0) break; if (WebPGetFeatures((uint8_t*)pData, nDataLen, &config.input) != VP8_STATUS_OK) break; if (config.input.width == 0 || config.input.height == 0) break; config.output.colorspace = MODE_RGBA; _bitsPerComponent = 8; _width = config.input.width; _height = config.input.height; _hasAlpha = true; int bufferSize = _width * _height * 4; _data = new unsigned char[bufferSize]; config.output.u.RGBA.rgba = (uint8_t*)_data; config.output.u.RGBA.stride = _width * 4; config.output.u.RGBA.size = bufferSize; config.output.is_external_memory = 1; if (WebPDecode((uint8_t*)pData, nDataLen, &config) != VP8_STATUS_OK) { delete []_data; _data = NULL; break; } bRet = true; } while (0); return bRet; }
int ReadWebP(const uint8_t* const data, size_t data_size, WebPPicture* const pic, int keep_alpha, Metadata* const metadata) { int ok = 0; VP8StatusCode status = VP8_STATUS_OK; WebPDecoderConfig config; WebPDecBuffer* const output_buffer = &config.output; WebPBitstreamFeatures* const bitstream = &config.input; if (data == NULL || data_size == 0 || pic == NULL) return 0; // TODO(jzern): add Exif/XMP/ICC extraction. if (metadata != NULL) { fprintf(stderr, "Warning: metadata extraction from WebP is unsupported.\n"); } if (!WebPInitDecoderConfig(&config)) { fprintf(stderr, "Library version mismatch!\n"); return 0; } status = WebPGetFeatures(data, data_size, bitstream); if (status != VP8_STATUS_OK) { PrintWebPError("input data", status); return 0; } { const int has_alpha = keep_alpha && bitstream->has_alpha; if (pic->use_argb) { output_buffer->colorspace = has_alpha ? MODE_RGBA : MODE_RGB; } else { output_buffer->colorspace = has_alpha ? MODE_YUVA : MODE_YUV; } status = DecodeWebP(data, data_size, 0, &config); if (status == VP8_STATUS_OK) { pic->width = output_buffer->width; pic->height = output_buffer->height; if (pic->use_argb) { const uint8_t* const rgba = output_buffer->u.RGBA.rgba; const int stride = output_buffer->u.RGBA.stride; ok = has_alpha ? WebPPictureImportRGBA(pic, rgba, stride) : WebPPictureImportRGB(pic, rgba, stride); } else { pic->colorspace = has_alpha ? WEBP_YUV420A : WEBP_YUV420; ok = WebPPictureAlloc(pic); if (!ok) { status = VP8_STATUS_OUT_OF_MEMORY; } else { const WebPYUVABuffer* const yuva = &output_buffer->u.YUVA; const int uv_width = (pic->width + 1) >> 1; const int uv_height = (pic->height + 1) >> 1; ImgIoUtilCopyPlane(yuva->y, yuva->y_stride, pic->y, pic->y_stride, pic->width, pic->height); ImgIoUtilCopyPlane(yuva->u, yuva->u_stride, pic->u, pic->uv_stride, uv_width, uv_height); ImgIoUtilCopyPlane(yuva->v, yuva->v_stride, pic->v, pic->uv_stride, uv_width, uv_height); if (has_alpha) { ImgIoUtilCopyPlane(yuva->a, yuva->a_stride, pic->a, pic->a_stride, pic->width, pic->height); } } } } }
static Read * read_new( const char *filename, const void *data, size_t length, int shrink ) { Read *read; if( !(read = VIPS_NEW( NULL, Read )) ) return( NULL ); read->filename = g_strdup( filename ); read->data = data; read->length = length; read->shrink = shrink; read->fd = 0; read->idec = NULL; if( read->filename ) { /* libwebp makes streaming from a file source very hard. We * have to read to a full memory buffer, then copy to out. * * mmap the input file, it's slightly quicker. */ if( (read->fd = vips__open_image_read( read->filename )) < 0 || (read->length = vips_file_length( read->fd )) < 0 || !(read->data = vips__mmap( read->fd, FALSE, read->length, 0 )) ) { read_free( read ); return( NULL ); } } WebPInitDecoderConfig( &read->config ); if( WebPGetFeatures( read->data, MINIMAL_HEADER, &read->config.input ) != VP8_STATUS_OK ) { read_free( read ); return( NULL ); } if( read->config.input.has_alpha ) read->config.output.colorspace = MODE_RGBA; else read->config.output.colorspace = MODE_RGB; read->config.options.use_threads = 1; read->width = read->config.input.width / read->shrink; read->height = read->config.input.height / read->shrink; if( read->width == 0 || read->height == 0 ) { vips_error( "webp", "%s", _( "bad setting for shrink" ) ); return( NULL ); } if( read->shrink > 1 ) { read->config.options.use_scaling = 1; read->config.options.scaled_width = read->width; read->config.options.scaled_height = read->height; } return( read ); }
static WebPMuxError DisplayInfo(const WebPMux* mux) { int width, height; uint32_t flag; WebPMuxError err = WebPMuxGetCanvasSize(mux, &width, &height); assert(err == WEBP_MUX_OK); // As WebPMuxCreate() was successful earlier. printf("Canvas size: %d x %d\n", width, height); err = WebPMuxGetFeatures(mux, &flag); if (flag & FRAGMENTS_FLAG) err = WEBP_MUX_INVALID_ARGUMENT; RETURN_IF_ERROR("Failed to retrieve features\n"); if (flag == 0) { fprintf(stderr, "No features present.\n"); return err; } // Print the features present. printf("Features present:"); if (flag & ANIMATION_FLAG) printf(" animation"); if (flag & FRAGMENTS_FLAG) printf(" image fragments"); if (flag & ICCP_FLAG) printf(" ICC profile"); if (flag & EXIF_FLAG) printf(" EXIF metadata"); if (flag & XMP_FLAG) printf(" XMP metadata"); if (flag & ALPHA_FLAG) printf(" transparency"); printf("\n"); if ((flag & ANIMATION_FLAG) || (flag & FRAGMENTS_FLAG)) { const int is_anim = !!(flag & ANIMATION_FLAG); const WebPChunkId id = is_anim ? WEBP_CHUNK_ANMF : WEBP_CHUNK_FRGM; const char* const type_str = is_anim ? "frame" : "fragment"; int nFrames; if (is_anim) { WebPMuxAnimParams params; err = WebPMuxGetAnimationParams(mux, ¶ms); assert(err == WEBP_MUX_OK); printf("Background color : 0x%.8X Loop Count : %d\n", params.bgcolor, params.loop_count); } err = WebPMuxNumChunks(mux, id, &nFrames); assert(err == WEBP_MUX_OK); printf("Number of %ss: %d\n", type_str, nFrames); if (nFrames > 0) { int i; printf("No.: width height alpha x_offset y_offset "); if (is_anim) printf("duration dispose blend "); printf("image_size\n"); for (i = 1; i <= nFrames; i++) { WebPMuxFrameInfo frame; err = WebPMuxGetFrame(mux, i, &frame); if (err == WEBP_MUX_OK) { WebPBitstreamFeatures features; const VP8StatusCode status = WebPGetFeatures( frame.bitstream.bytes, frame.bitstream.size, &features); assert(status == VP8_STATUS_OK); // Checked by WebPMuxCreate(). (void)status; printf("%3d: %5d %5d %5s %8d %8d ", i, features.width, features.height, features.has_alpha ? "yes" : "no", frame.x_offset, frame.y_offset); if (is_anim) { const char* const dispose = (frame.dispose_method == WEBP_MUX_DISPOSE_NONE) ? "none" : "background"; const char* const blend = (frame.blend_method == WEBP_MUX_BLEND) ? "yes" : "no"; printf("%8d %10s %5s ", frame.duration, dispose, blend); } printf("%10d\n", (int)frame.bitstream.size); } WebPDataClear(&frame.bitstream); RETURN_IF_ERROR3("Failed to retrieve %s#%d\n", type_str, i); } } } if (flag & ICCP_FLAG) { WebPData icc_profile; err = WebPMuxGetChunk(mux, "ICCP", &icc_profile); assert(err == WEBP_MUX_OK); printf("Size of the ICC profile data: %d\n", (int)icc_profile.size); } if (flag & EXIF_FLAG) { WebPData exif; err = WebPMuxGetChunk(mux, "EXIF", &exif); assert(err == WEBP_MUX_OK); printf("Size of the EXIF metadata: %d\n", (int)exif.size); } if (flag & XMP_FLAG) { WebPData xmp; err = WebPMuxGetChunk(mux, "XMP ", &xmp); assert(err == WEBP_MUX_OK); printf("Size of the XMP metadata: %d\n", (int)xmp.size); } if ((flag & ALPHA_FLAG) && !(flag & (ANIMATION_FLAG | FRAGMENTS_FLAG))) { WebPMuxFrameInfo image; err = WebPMuxGetFrame(mux, 1, &image); if (err == WEBP_MUX_OK) { printf("Size of the image (with alpha): %d\n", (int)image.bitstream.size); } WebPDataClear(&image.bitstream); RETURN_IF_ERROR("Failed to retrieve the image\n"); } return WEBP_MUX_OK; }
GDALDataset *WEBPDataset::Open( GDALOpenInfo * poOpenInfo ) { if( !Identify( poOpenInfo ) || poOpenInfo->fpL == NULL ) return NULL; int nWidth, nHeight; if (!WebPGetInfo((const uint8_t*)poOpenInfo->pabyHeader, (uint32_t)poOpenInfo->nHeaderBytes, &nWidth, &nHeight)) return NULL; int nBands = 3; #if WEBP_DECODER_ABI_VERSION >= 0x0002 WebPDecoderConfig config; if (!WebPInitDecoderConfig(&config)) return NULL; int bOK = WebPGetFeatures(poOpenInfo->pabyHeader, poOpenInfo->nHeaderBytes, &config.input) == VP8_STATUS_OK; if (config.input.has_alpha) nBands = 4; WebPFreeDecBuffer(&config.output); if (!bOK) return NULL; #endif if( poOpenInfo->eAccess == GA_Update ) { CPLError( CE_Failure, CPLE_NotSupported, "The WEBP driver does not support update access to existing" " datasets.\n" ); return NULL; } /* -------------------------------------------------------------------- */ /* Create a corresponding GDALDataset. */ /* -------------------------------------------------------------------- */ WEBPDataset *poDS; poDS = new WEBPDataset(); poDS->nRasterXSize = nWidth; poDS->nRasterYSize = nHeight; poDS->fpImage = poOpenInfo->fpL; poOpenInfo->fpL = NULL; /* -------------------------------------------------------------------- */ /* Create band information objects. */ /* -------------------------------------------------------------------- */ for( int iBand = 0; iBand < nBands; iBand++ ) poDS->SetBand( iBand+1, new WEBPRasterBand( poDS, iBand+1 ) ); /* -------------------------------------------------------------------- */ /* Initialize any PAM information. */ /* -------------------------------------------------------------------- */ poDS->SetDescription( poOpenInfo->pszFilename ); poDS->TryLoadXML( poOpenInfo->GetSiblingFiles() ); /* -------------------------------------------------------------------- */ /* Open overviews. */ /* -------------------------------------------------------------------- */ poDS->oOvManager.Initialize( poDS, poOpenInfo->pszFilename, poOpenInfo->GetSiblingFiles() ); return poDS; }
// Store image bearing chunks to 'frame'. static ParseStatus StoreFrame(int frame_num, uint32_t min_size, MemBuffer* const mem, Frame* const frame) { int alpha_chunks = 0; int image_chunks = 0; int done = (MemDataSize(mem) < min_size); ParseStatus status = PARSE_OK; if (done) return PARSE_NEED_MORE_DATA; do { const size_t chunk_start_offset = mem->start_; const uint32_t fourcc = ReadLE32(mem); const uint32_t payload_size = ReadLE32(mem); const uint32_t payload_size_padded = payload_size + (payload_size & 1); const size_t payload_available = (payload_size_padded > MemDataSize(mem)) ? MemDataSize(mem) : payload_size_padded; const size_t chunk_size = CHUNK_HEADER_SIZE + payload_available; if (payload_size > MAX_CHUNK_PAYLOAD) return PARSE_ERROR; if (SizeIsInvalid(mem, payload_size_padded)) return PARSE_ERROR; if (payload_size_padded > MemDataSize(mem)) status = PARSE_NEED_MORE_DATA; switch (fourcc) { case MKFOURCC('A', 'L', 'P', 'H'): if (alpha_chunks == 0) { ++alpha_chunks; frame->img_components_[1].offset_ = chunk_start_offset; frame->img_components_[1].size_ = chunk_size; frame->has_alpha_ = 1; frame->frame_num_ = frame_num; Skip(mem, payload_available); } else { goto Done; } break; case MKFOURCC('V', 'P', '8', 'L'): if (alpha_chunks > 0) return PARSE_ERROR; // VP8L has its own alpha // fall through case MKFOURCC('V', 'P', '8', ' '): if (image_chunks == 0) { // Extract the bitstream features, tolerating failures when the data // is incomplete. WebPBitstreamFeatures features; const VP8StatusCode vp8_status = WebPGetFeatures(mem->buf_ + chunk_start_offset, chunk_size, &features); if (status == PARSE_NEED_MORE_DATA && vp8_status == VP8_STATUS_NOT_ENOUGH_DATA) { return PARSE_NEED_MORE_DATA; } else if (vp8_status != VP8_STATUS_OK) { // We have enough data, and yet WebPGetFeatures() failed. return PARSE_ERROR; } ++image_chunks; frame->img_components_[0].offset_ = chunk_start_offset; frame->img_components_[0].size_ = chunk_size; frame->width_ = features.width; frame->height_ = features.height; frame->has_alpha_ |= features.has_alpha; frame->frame_num_ = frame_num; frame->complete_ = (status == PARSE_OK); Skip(mem, payload_available); } else { goto Done; } break; Done: default: // Restore fourcc/size when moving up one level in parsing. Rewind(mem, CHUNK_HEADER_SIZE); done = 1; break; } if (mem->start_ == mem->riff_end_) { done = 1; } else if (MemDataSize(mem) < CHUNK_HEADER_SIZE) { status = PARSE_NEED_MORE_DATA; } } while (!done && status == PARSE_OK); return status; }
bool WEBPImageDecoder::decode(bool onlySize) { if (failed()) return false; const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; #ifdef QCMS_WEBP_COLOR_CORRECTION WebPData inputData = { dataBytes, dataSize }; WebPDemuxState state; WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state); if (!demuxer) return setFailed(); width = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); height = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); m_formatFlags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS); m_hasAlpha = !!(m_formatFlags & ALPHA_FLAG); WebPDemuxDelete(demuxer); if (state <= WEBP_DEMUX_PARSING_HEADER) return false; #elif (WEBP_DECODER_ABI_VERSION >= 0x0163) WebPBitstreamFeatures features; if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) return setFailed(); width = features.width; height = features.height; m_hasAlpha = features.has_alpha; #else // Earlier version won't be able to display WebP files with alpha. if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); m_hasAlpha = false; #endif if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(m_hasAlpha); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_hasAlpha); if (!m_premultiplyAlpha) mode = outputMode(false); if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) mode = MODE_RGBA; // Decode to RGBA for input to libqcms. int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(mode, output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); buffer.setStatus(ImageFrame::FrameComplete); clear(); return true; case VP8_STATUS_SUSPENDED: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); return false; default: clear(); return setFailed(); } }