Exemplo n.º 1
0
int ReadTIFF(const char* const filename,
             WebPPicture* const pic, int keep_alpha,
             Metadata* const metadata) {
  TIFF* const tif = TIFFOpen(filename, "r");
  uint32 width, height;
  uint32* raster;
  int ok = 0;
  tdir_t dircount;

  if (tif == NULL) {
    fprintf(stderr, "Error! Cannot open TIFF file '%s'\n", filename);
    return 0;
  }

  dircount = TIFFNumberOfDirectories(tif);
  if (dircount > 1) {
    fprintf(stderr, "Warning: multi-directory TIFF files are not supported.\n"
                    "Only the first will be used, %d will be ignored.\n",
                    dircount - 1);
  }

  if (!(TIFFGetField(tif, TIFFTAG_IMAGEWIDTH, &width) &&
        TIFFGetField(tif, TIFFTAG_IMAGELENGTH, &height))) {
    fprintf(stderr, "Error! Cannot retrieve TIFF image dimensions.\n");
    return 0;
  }
  raster = (uint32*)_TIFFmalloc(width * height * sizeof(*raster));
  if (raster != NULL) {
    if (TIFFReadRGBAImageOriented(tif, width, height, raster,
                                  ORIENTATION_TOPLEFT, 1)) {
      const int stride = width * sizeof(*raster);
      pic->width = width;
      pic->height = height;
      // TIFF data is ABGR
#ifdef WORDS_BIGENDIAN
      TIFFSwabArrayOfLong(raster, width * height);
#endif
      ok = keep_alpha
         ? WebPPictureImportRGBA(pic, (const uint8_t*)raster, stride)
         : WebPPictureImportRGBX(pic, (const uint8_t*)raster, stride);
    }
    _TIFFfree(raster);
  } else {
    fprintf(stderr, "Error allocating TIFF RGBA memory!\n");
  }

  if (ok) {
    if (metadata != NULL) {
      ok = ExtractMetadataFromTIFF(tif, metadata);
      if (!ok) {
        fprintf(stderr, "Error extracting TIFF metadata!\n");
        MetadataFree(metadata);
        WebPPictureFree(pic);
      }
    }
  }

  TIFFClose(tif);
  return ok;
}
Exemplo n.º 2
0
void WebPAnimEncoderDelete(WebPAnimEncoder* enc) {
  if (enc != NULL) {;
    WebPPictureFree(&enc->curr_canvas_copy_);
    WebPPictureFree(&enc->prev_canvas_);
    WebPPictureFree(&enc->prev_canvas_disposed_);
    if (enc->encoded_frames_ != NULL) {
      size_t i;
      for (i = 0; i < enc->size_; ++i) {
        FrameRelease(&enc->encoded_frames_[i]);
      }
      WebPSafeFree(enc->encoded_frames_);
    }
    WebPMuxDelete(enc->mux_);
    WebPSafeFree(enc);
  }
}
Exemplo n.º 3
0
static void
TWebPCleanup(TIFF* tif)
{
  WebPState* sp = LState(tif);

  assert(sp != 0);

  tif->tif_tagmethods.vgetfield = sp->vgetparent;
  tif->tif_tagmethods.vsetfield = sp->vsetparent;

  if (sp->state & LSTATE_INIT_ENCODE) {
    WebPPictureFree(&sp->sPicture);
  }

  if (sp->psDecoder != NULL) {
    WebPIDelete(sp->psDecoder);
    WebPFreeDecBuffer(&sp->sDecBuffer);
    sp->psDecoder = NULL;
    sp->last_y = 0;
  }
  
  if (sp->pBuffer != NULL) {
      _TIFFfree(sp->pBuffer);
      sp->pBuffer = NULL;    
  }

  _TIFFfree(tif->tif_data);
  tif->tif_data = NULL;

  _TIFFSetDefaultCompressionState(tif);
}
Exemplo n.º 4
0
Arquivo: _webp.c Projeto: dvska/Pillow
PyObject* _anim_encoder_dealloc(PyObject* self)
{
    WebPAnimEncoderObject* encp = (WebPAnimEncoderObject*)self;
    WebPPictureFree(&(encp->frame));
    WebPAnimEncoderDelete(encp->enc);
    Py_RETURN_NONE;
}
static bool encodePixels(IntSize imageSize, const unsigned char* pixels, bool premultiplied, int quality, Vector<unsigned char>* output)
{
    if (imageSize.width() <= 0 || imageSize.width() > WEBP_MAX_DIMENSION)
        return false;
    if (imageSize.height() <= 0 || imageSize.height() > WEBP_MAX_DIMENSION)
        return false;

    WebPConfig config;
    if (!WebPConfigInit(&config))
        return false;
    WebPPicture picture;
    if (!WebPPictureInit(&picture))
        return false;

    picture.width = imageSize.width();
    picture.height = imageSize.height();

    if (premultiplied && !platformPremultipliedImportPicture(pixels, &picture))
        return false;
    if (!premultiplied && !importPictureRGBX<false>(pixels, &picture))
        return false;

    picture.custom_ptr = output;
    picture.writer = &writeOutput;
    config.quality = quality;
    config.method = 3;

    bool success = WebPEncode(&config, &picture);
    WebPPictureFree(&picture);
    return success;
}
Exemplo n.º 6
0
static size_t encode(const uint8_t* rgba, int width, int height,
                     uint8_t** output)
{
    WebPPicture pic;
    WebPConfig config;
    WebPMemoryWriter wrt;
    int ok;

    if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, 100) ||
            !WebPPictureInit(&pic))
    {
        return 0;
    }

    config.lossless = true;
    config.method = 6;
    pic.use_argb = true;
    pic.width = width;
    pic.height = height;
    pic.writer = WebPMemoryWrite;
    pic.custom_ptr = &wrt;
    WebPMemoryWriterInit(&wrt);

    ok = WebPPictureImportRGBA(&pic, rgba, width * 4) &&
         WebPEncode(&config, &pic);
    WebPPictureFree(&pic);
    if (!ok)
        return 0;
    *output = wrt.mem;
    return wrt.size;
}
Exemplo n.º 7
0
int WebPPictureDistortion(const WebPPicture* src, const WebPPicture* ref,
                          int type, float results[5]) {
  int w, h, c;
  int ok = 0;
  WebPPicture p0, p1;
  double total_size = 0., total_distortion = 0.;
  if (src == NULL || ref == NULL ||
      src->width != ref->width || src->height != ref->height ||
      results == NULL) {
    return 0;
  }

  VP8SSIMDspInit();
  if (!WebPPictureInit(&p0) || !WebPPictureInit(&p1)) return 0;
  w = src->width;
  h = src->height;
  if (!WebPPictureView(src, 0, 0, w, h, &p0)) goto Error;
  if (!WebPPictureView(ref, 0, 0, w, h, &p1)) goto Error;

  // We always measure distortion in ARGB space.
  if (p0.use_argb == 0 && !WebPPictureYUVAToARGB(&p0)) goto Error;
  if (p1.use_argb == 0 && !WebPPictureYUVAToARGB(&p1)) goto Error;
  for (c = 0; c < 4; ++c) {
    float distortion;
    const size_t stride0 = 4 * (size_t)p0.argb_stride;
    const size_t stride1 = 4 * (size_t)p1.argb_stride;
    if (!WebPPlaneDistortion((const uint8_t*)p0.argb + c, stride0,
                             (const uint8_t*)p1.argb + c, stride1,
                             w, h, 4, type, &distortion, results + c)) {
      goto Error;
    }
    total_distortion += distortion;
    total_size += w * h;
  }

  results[4] = (type == 1) ? (float)GetLogSSIM(total_distortion, total_size)
                           : (float)GetPSNR(total_distortion, total_size);
  ok = 1;

 Error:
  WebPPictureFree(&p0);
  WebPPictureFree(&p1);
  return ok;
}
Exemplo n.º 8
0
bool
WebpOutput::close()
{
    if (m_file)
    {
        WebPPictureFree(&m_webp_picture);
        fclose(m_file);
        m_file = NULL;
    }
    return true;
}
Exemplo n.º 9
0
int GIFReadFrame(GifFileType* const gif, int transparent_index,
                 GIFFrameRect* const gif_rect, WebPPicture* const picture) {
  WebPPicture sub_image;
  const GifImageDesc* const image_desc = &gif->Image;
  uint32_t* dst = NULL;
  uint8_t* tmp = NULL;
  int ok = 0;
  GIFFrameRect rect = {
      image_desc->Left, image_desc->Top, image_desc->Width, image_desc->Height
  };
  *gif_rect = rect;

  // Use a view for the sub-picture:
  if (!WebPPictureView(picture, rect.x_offset, rect.y_offset,
                       rect.width, rect.height, &sub_image)) {
    fprintf(stderr, "Sub-image %dx%d at position %d,%d is invalid!\n",
            rect.width, rect.height, rect.x_offset, rect.y_offset);
    return 0;
  }
  dst = sub_image.argb;

  tmp = (uint8_t*)malloc(rect.width * sizeof(*tmp));
  if (tmp == NULL) goto End;

  if (image_desc->Interlace) {  // Interlaced image.
    // We need 4 passes, with the following offsets and jumps.
    const int interlace_offsets[] = { 0, 4, 2, 1 };
    const int interlace_jumps[]   = { 8, 8, 4, 2 };
    int pass;
    for (pass = 0; pass < 4; ++pass) {
      int y;
      for (y = interlace_offsets[pass]; y < rect.height;
           y += interlace_jumps[pass]) {
        if (DGifGetLine(gif, tmp, rect.width) == GIF_ERROR) goto End;
        Remap(gif, tmp, rect.width, transparent_index,
              dst + y * sub_image.argb_stride);
      }
    }
  } else {  // Non-interlaced image.
    int y;
    for (y = 0; y < rect.height; ++y) {
      if (DGifGetLine(gif, tmp, rect.width) == GIF_ERROR) goto End;
      Remap(gif, tmp, rect.width, transparent_index,
            dst + y * sub_image.argb_stride);
    }
  }
  ok = 1;

 End:
  if (!ok) picture->error_code = sub_image.error_code;
  WebPPictureFree(&sub_image);
  free(tmp);
  return ok;
}
Exemplo n.º 10
0
Arquivo: _webp.c Projeto: dvska/Pillow
// Encoder functions
PyObject* _anim_encoder_new(PyObject* self, PyObject* args)
{
    int width, height;
    uint32_t bgcolor;
    int loop_count;
    int minimize_size;
    int kmin, kmax;
    int allow_mixed;
    int verbose;
    WebPAnimEncoderOptions enc_options;
    WebPAnimEncoderObject* encp = NULL;
    WebPAnimEncoder* enc = NULL;

    if (!PyArg_ParseTuple(args, "iiIiiiiii",
        &width, &height, &bgcolor, &loop_count, &minimize_size,
        &kmin, &kmax, &allow_mixed, &verbose)) {
        return NULL;
    }

    // Setup and configure the encoder's options (these are animation-specific)
    if (!WebPAnimEncoderOptionsInit(&enc_options)) {
        PyErr_SetString(PyExc_RuntimeError, "failed to initialize encoder options");
        return NULL;
    }
    enc_options.anim_params.bgcolor = bgcolor;
    enc_options.anim_params.loop_count = loop_count;
    enc_options.minimize_size = minimize_size;
    enc_options.kmin = kmin;
    enc_options.kmax = kmax;
    enc_options.allow_mixed = allow_mixed;
    enc_options.verbose = verbose;

    // Validate canvas dimensions
    if (width <= 0 || height <= 0) {
        PyErr_SetString(PyExc_ValueError, "invalid canvas dimensions");
        return NULL;
    }

    // Create a new animation encoder and picture frame
    encp = PyObject_New(WebPAnimEncoderObject, &WebPAnimEncoder_Type);
    if (encp) {
        if (WebPPictureInit(&(encp->frame))) {
            enc = WebPAnimEncoderNew(width, height, &enc_options);
            if (enc) {
                encp->enc = enc;
                return (PyObject*) encp;
            }
            WebPPictureFree(&(encp->frame));
        }
        PyObject_Del(encp);
    }
    PyErr_SetString(PyExc_RuntimeError, "could not create encoder object");
    return NULL;
}
Exemplo n.º 11
0
static gboolean
export_webp (GeglOperation       *operation,
             GeglBuffer          *input,
             const GeglRectangle *result,
             GOutputStream       *stream)
{
  GeglProperties *o = GEGL_PROPERTIES (operation);
  WebPConfig config;
  WebPPicture picture;
  const Babl *format;
  gint status;

  g_return_val_if_fail (stream != NULL, FALSE);

  if (!WebPConfigInit (&config) || !WebPPictureInit (&picture))
    {
      g_warning ("could not initialize WebP encoder");
      return FALSE;
    }

  if (!WebPConfigPreset (&config, WEBP_PRESET_DEFAULT, o->quality))
    {
      g_warning("could not load WebP default preset");
      return FALSE;
    }

  picture.width = result->width;
  picture.height = result->height;

  format = babl_format ("R'G'B'A u8");

  if (!WebPValidateConfig (&config))
    {
      g_warning ("WebP encoder configuration is invalid");
      return FALSE;
    }

  picture.writer = write_to_stream;
  picture.custom_ptr = stream;

  if (save_RGBA (&picture, input, result, format))
    {
      g_warning ("could not pass pixels data to WebP encoder");
      return FALSE;
    }

  status = WebPEncode (&config, &picture);

  WebPPictureFree (&picture);

  return status ? TRUE : FALSE;
}
Exemplo n.º 12
0
static int
TWebPSetupDecode(TIFF* tif)
{
  static const char module[] = "WebPSetupDecode";
  uint16 nBitsPerSample = tif->tif_dir.td_bitspersample;
  uint16 sampleFormat = tif->tif_dir.td_sampleformat;

  WebPState* sp = DecoderState(tif);
  assert(sp != NULL);

  sp->nSamples = tif->tif_dir.td_samplesperpixel;

  /* check band count */
  if ( sp->nSamples != 3
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
    && sp->nSamples != 4
#endif
  )
  {
    TIFFErrorExt(tif->tif_clientdata, module,
      "WEBP driver doesn't support %d bands. Must be 3 (RGB) "
  #if WEBP_ENCODER_ABI_VERSION >= 0x0100
      "or 4 (RGBA) "
  #endif
    "bands.",
    sp->nSamples );
    return 0;
  }

  /* check bits per sample and data type */
  if ((nBitsPerSample != 8) && (sampleFormat != 1)) {
    TIFFErrorExt(tif->tif_clientdata, module,
                "WEBP driver requires 8 bit unsigned data");
    return 0;
  }
  
  /* if we were last encoding, terminate this mode */
  if (sp->state & LSTATE_INIT_ENCODE) {
      WebPPictureFree(&sp->sPicture);
      if (sp->pBuffer != NULL) {
        _TIFFfree(sp->pBuffer);
        sp->pBuffer = NULL;
      }
      sp->buffer_offset = 0;
      sp->state = 0;
  }

  sp->state |= LSTATE_INIT_DECODE;

  return 1;
}
Exemplo n.º 13
0
static int EncodeLossless(const uint8_t* const data, int width, int height,
                          int effort_level,  // in [0..6] range
                          VP8BitWriter* const bw,
                          WebPAuxStats* const stats) {
  int ok = 0;
  WebPConfig config;
  WebPPicture picture;
  VP8LBitWriter tmp_bw;

  WebPPictureInit(&picture);
  picture.width = width;
  picture.height = height;
  picture.use_argb = 1;
  picture.stats = stats;
  if (!WebPPictureAlloc(&picture)) return 0;

  // Transfer the alpha values to the green channel.
  {
    int i, j;
    uint32_t* dst = picture.argb;
    const uint8_t* src = data;
    for (j = 0; j < picture.height; ++j) {
      for (i = 0; i < picture.width; ++i) {
        dst[i] = (src[i] << 8) | 0xff000000u;
      }
      src += width;
      dst += picture.argb_stride;
    }
  }

  WebPConfigInit(&config);
  config.lossless = 1;
  config.method = effort_level;  // impact is very small
  // Set moderate default quality setting for alpha. Higher qualities (80 and
  // above) could be very slow.
  config.quality = 10.f + 15.f * effort_level;
  if (config.quality > 100.f) config.quality = 100.f;

  ok = VP8LBitWriterInit(&tmp_bw, (width * height) >> 3);
  ok = ok && (VP8LEncodeStream(&config, &picture, &tmp_bw) == VP8_ENC_OK);
  WebPPictureFree(&picture);
  if (ok) {
    const uint8_t* const data = VP8LBitWriterFinish(&tmp_bw);
    const size_t data_size = VP8LBitWriterNumBytes(&tmp_bw);
    VP8BitWriterAppend(bw, data, data_size);
  }
  VP8LBitWriterDestroy(&tmp_bw);
  return ok && !bw->error_;
}
Exemplo n.º 14
0
static int EncodeLossless(const uint8_t* const data, int width, int height,
                          int effort_level,  // in [0..6] range
                          VP8LBitWriter* const bw,
                          WebPAuxStats* const stats) {
  int ok = 0;
  WebPConfig config;
  WebPPicture picture;

  WebPPictureInit(&picture);
  picture.width = width;
  picture.height = height;
  picture.use_argb = 1;
  picture.stats = stats;
  if (!WebPPictureAlloc(&picture)) return 0;

  // Transfer the alpha values to the green channel.
  {
    int i, j;
    uint32_t* dst = picture.argb;
    const uint8_t* src = data;
    for (j = 0; j < picture.height; ++j) {
      for (i = 0; i < picture.width; ++i) {
        dst[i] = src[i] << 8;  // we leave A/R/B channels zero'd.
      }
      src += width;
      dst += picture.argb_stride;
    }
  }

  WebPConfigInit(&config);
  config.lossless = 1;
  config.method = effort_level;  // impact is very small
  // Set a low default quality for encoding alpha. Ensure that Alpha quality at
  // lower methods (3 and below) is less than the threshold for triggering
  // costly 'BackwardReferencesTraceBackwards'.
  config.quality = 8.f * effort_level;
  assert(config.quality >= 0 && config.quality <= 100.f);

  ok = (VP8LEncodeStream(&config, &picture, bw) == VP8_ENC_OK);
  WebPPictureFree(&picture);
  ok = ok && !bw->error_;
  if (!ok) {
    VP8LBitWriterDestroy(bw);
    return 0;
  }
  return 1;

}
Exemplo n.º 15
0
static int EncodeLossless(const uint8_t* const data, int width, int height,
                          int effort_level,  // in [0..6] range
                          VP8LBitWriter* const bw,
                          WebPAuxStats* const stats) {
  int ok = 0;
  WebPConfig config;
  WebPPicture picture;

  WebPPictureInit(&picture);
  picture.width = width;
  picture.height = height;
  picture.use_argb = 1;
  picture.stats = stats;
  if (!WebPPictureAlloc(&picture)) return 0;

  // Transfer the alpha values to the green channel.
  WebPDispatchAlphaToGreen(data, width, picture.width, picture.height,
                           picture.argb, picture.argb_stride);

  WebPConfigInit(&config);
  config.lossless = 1;
  // Enable exact, or it would alter RGB values of transparent alpha, which is
  // normally OK but not here since we are not encoding the input image but  an
  // internal encoding-related image containing necessary exact information in
  // RGB channels.
  config.exact = 1;
  config.method = effort_level;  // impact is very small
  // Set a low default quality for encoding alpha. Ensure that Alpha quality at
  // lower methods (3 and below) is less than the threshold for triggering
  // costly 'BackwardReferencesTraceBackwards'.
  config.quality = 8.f * effort_level;
  assert(config.quality >= 0 && config.quality <= 100.f);

  // TODO(urvang): Temporary fix to avoid generating images that trigger
  // a decoder bug related to alpha with color cache.
  // See: https://code.google.com/p/webp/issues/detail?id=239
  // Need to re-enable this later.
  ok = (VP8LEncodeStream(&config, &picture, bw, 0 /*use_cache*/) == VP8_ENC_OK);
  WebPPictureFree(&picture);
  ok = ok && !bw->error_;
  if (!ok) {
    VP8LBitWriterWipeOut(bw);
    return 0;
  }
  return 1;
}
Exemplo n.º 16
0
static int libwebp_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
                                const AVFrame *frame, int *got_packet)
{
    LibWebPContext *s  = avctx->priv_data;
    WebPPicture *pic = NULL;
    AVFrame *alt_frame = NULL;
    WebPMemoryWriter mw = { 0 };

    int ret = ff_libwebp_get_frame(avctx, s, frame, &alt_frame, &pic);
    if (ret < 0)
        goto end;

    WebPMemoryWriterInit(&mw);
    pic->custom_ptr = &mw;
    pic->writer     = WebPMemoryWrite;

    ret = WebPEncode(&s->config, pic);
    if (!ret) {
        av_log(avctx, AV_LOG_ERROR, "WebPEncode() failed with error: %d\n",
               pic->error_code);
        ret = ff_libwebp_error_to_averror(pic->error_code);
        goto end;
    }

    ret = ff_alloc_packet(pkt, mw.size);
    if (ret < 0)
        goto end;
    memcpy(pkt->data, mw.mem, mw.size);

    pkt->flags |= AV_PKT_FLAG_KEY;
    *got_packet = 1;

end:
#if (WEBP_ENCODER_ABI_VERSION > 0x0203)
    WebPMemoryWriterClear(&mw);
#else
    free(mw.mem); /* must use free() according to libwebp documentation */
#endif
    WebPPictureFree(pic);
    av_freep(&pic);
    av_frame_free(&alt_frame);

    return ret;
}
Exemplo n.º 17
0
bool
WebpOutput::close()
{
    if (! m_file)
        return true;   // already closed

    bool ok = true;
    if (m_spec.tile_width) {
        // We've been emulating tiles; now dump as scanlines.
        ASSERT (m_uncompressed_image.size());
        ok &= write_scanlines (m_spec.y, m_spec.y+m_spec.height, 0,
                               m_spec.format, &m_uncompressed_image[0]);
        std::vector<uint8_t>().swap (m_uncompressed_image);
    }

    WebPPictureFree(&m_webp_picture);
    fclose(m_file);
    m_file = NULL;
    return true;
}
Exemplo n.º 18
0
static gboolean
_cairo_surface_write_as_webp (cairo_surface_t  *image,
			      char            **buffer,
			      gsize            *buffer_size,
			      char            **keys,
			      char            **values,
			      GError          **error)
{
	gboolean       lossless;
	int            quality;
	int            method;
	WebPConfig     config;
	CairoWebpData *cairo_webp_data;
	WebPPicture    pic;

	lossless = TRUE;
	quality = 75;
	method = 4;

	if (keys && *keys) {
		char **kiter = keys;
		char **viter = values;

		while (*kiter) {
			if (strcmp (*kiter, "lossless") == 0) {
				if (*viter == NULL) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Must specify a value for the 'lossless' option");
					return FALSE;
				}

				lossless = atoi (*viter);

				if (lossless < 0 || lossless > 1) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Invalid value set for the 'lossless' option of the WebP saver");
					return FALSE;
				}
			}
			else if (strcmp (*kiter, "quality") == 0) {
				if (*viter == NULL) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Must specify a quality value to the WebP saver");
					return FALSE;
				}

				quality = atoi (*viter);

				if (quality < 0 || quality > 100) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Unsupported quality value passed to the WebP saver");
					return FALSE;
				}
			}
			else if (strcmp (*kiter, "method") == 0) {
				if (*viter == NULL) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Must specify a method value to the WebP saver");
					return FALSE;
				}

				method = atoi (*viter);

				if (method < 0 || method > 6) {
					g_set_error (error,
						     G_IO_ERROR,
						     G_IO_ERROR_INVALID_DATA,
						     "Unsupported method value passed to the WebP saver");
					return FALSE;
				}
			}
			else {
				g_warning ("Bad option name '%s' passed to the WebP saver", *kiter);
				return FALSE;
			}

			++kiter;
			++viter;
		}
	}

	if (! WebPConfigInit (&config)) {
		g_set_error (error,
			     G_IO_ERROR,
			     G_IO_ERROR_INVALID_DATA,
			     "Version error");
		return FALSE;
	}

	config.lossless = lossless;
	config.quality = quality;
	config.method = method;

	if (! WebPValidateConfig (&config)) {
		g_set_error (error,
			     G_IO_ERROR,
			     G_IO_ERROR_INVALID_DATA,
			     "Config error");
		return FALSE;
	}

	cairo_webp_data = g_new0 (CairoWebpData, 1);
	cairo_webp_data->error = error;
	cairo_webp_data->buffer_data = gth_buffer_data_new ();
	cairo_webp_data->success = FALSE;

	WebPPictureInit (&pic);
	pic.width = cairo_image_surface_get_width (image);
	pic.height = cairo_image_surface_get_height (image);
	pic.writer = cairo_webp_writer_func;
	pic.custom_ptr = cairo_webp_data;
	pic.use_argb = TRUE;

	if (_WebPPictureImportCairoSurface (&pic, image)) {
		int ok = WebPEncode (&config, &pic);
		WebPPictureFree (&pic);

		if (cairo_webp_data->success && ! ok) {
			g_set_error (cairo_webp_data->error,
				     G_IO_ERROR,
				     G_IO_ERROR_INVALID_DATA,
				     "Encoding error: %d", pic.error_code);
			cairo_webp_data->success = FALSE;
		}
	}
	else {
		g_set_error (cairo_webp_data->error,
			     G_IO_ERROR,
			     G_IO_ERROR_INVALID_DATA,
			     "Memory error");
		cairo_webp_data->success = FALSE;
	}

	if (cairo_webp_data->success)
		gth_buffer_data_get (cairo_webp_data->buffer_data, buffer, buffer_size);

	_cairo_webp_data_destroy (cairo_webp_data);

	return TRUE;
}
Exemplo n.º 19
0
// Optimize the image frame for WebP and encode it.
static int OptimizeAndEncodeFrame(
    const WebPConfig* const config, const WebPFrameRect* const gif_rect,
    WebPPicture* const curr, WebPPicture* const prev_canvas,
    WebPPicture* const curr_canvas, WebPPicture* const sub_image,
    WebPMuxFrameInfo* const info, WebPFrameCache* const cache) {
  WebPFrameRect rect = *gif_rect;

  // Snap to even offsets (and adjust dimensions if needed).
  rect.width += (rect.x_offset & 1);
  rect.height += (rect.y_offset & 1);
  rect.x_offset &= ~1;
  rect.y_offset &= ~1;

  if (!WebPPictureView(curr, rect.x_offset, rect.y_offset,
                       rect.width, rect.height, sub_image)) {
    return 0;
  }
  info->x_offset = rect.x_offset;
  info->y_offset = rect.y_offset;

  if (is_first_frame || WebPUtilIsKeyFrame(curr, &rect, prev_canvas)) {
    // Add this as a key frame.
    if (!WebPFrameCacheAddFrame(cache, config, NULL, NULL, info, sub_image)) {
      return 0;
    }
    // Update prev_canvas by simply copying from 'curr'.
    WebPUtilCopyPixels(curr, prev_canvas);
  } else {
    if (!config->lossless) {
      // For lossy compression, it's better to replace transparent pixels of
      // 'curr' with actual RGB values, whenever possible.
      WebPUtilReduceTransparency(prev_canvas, &rect, curr);
      WebPUtilFlattenSimilarBlocks(prev_canvas, &rect, curr);
    }
    if (!WebPFrameCacheShouldTryKeyFrame(cache)) {
      // Add this as a frame rectangle.
      if (!WebPFrameCacheAddFrame(cache, config, info, sub_image, NULL, NULL)) {
        return 0;
      }
      // Update prev_canvas by blending 'curr' into it.
      WebPUtilBlendPixels(curr, gif_rect, prev_canvas);
    } else {
      WebPPicture full_image;
      WebPMuxFrameInfo full_image_info;
      int ok;

      // Convert to a key frame.
      WebPUtilCopyPixels(curr, curr_canvas);
      WebPUtilConvertToKeyFrame(prev_canvas, &rect, curr_canvas);
      if (!WebPPictureView(curr_canvas, rect.x_offset, rect.y_offset,
                           rect.width, rect.height, &full_image)) {
        return 0;
      }
      full_image_info = *info;
      full_image_info.x_offset = rect.x_offset;
      full_image_info.y_offset = rect.y_offset;

      // Add both variants to cache: frame rectangle and key frame.
      ok = WebPFrameCacheAddFrame(cache, config, info, sub_image,
                                  &full_image_info, &full_image);
      WebPPictureFree(&full_image);
      if (!ok) return 0;

      // Update prev_canvas by simply copying from 'curr_canvas'.
      WebPUtilCopyPixels(curr_canvas, prev_canvas);
    }
  }
  return 1;
}
Exemplo n.º 20
0
int main(int argc, const char *argv[]) {
  WebPPicture pic1, pic2;
  int ret = 1;
  float disto[5];
  size_t size1 = 0, size2 = 0;
  int type = 0;
  int c;
  int help = 0;
  int keep_alpha = 0;
  int scale = 0;
  int use_gray = 0;
  const char* name1 = NULL;
  const char* name2 = NULL;
  const char* output = NULL;

  if (!WebPPictureInit(&pic1) || !WebPPictureInit(&pic2)) {
    fprintf(stderr, "Can't init pictures\n");
    return 1;
  }

  for (c = 1; c < argc; ++c) {
    if (!strcmp(argv[c], "-ssim")) {
      type = 1;
    } else if (!strcmp(argv[c], "-psnr")) {
      type = 0;
    } else if (!strcmp(argv[c], "-alpha")) {
      keep_alpha = 1;
    } else if (!strcmp(argv[c], "-scale")) {
      scale = 1;
    } else if (!strcmp(argv[c], "-gray")) {
      use_gray = 1;
    } else if (!strcmp(argv[c], "-h")) {
      help = 1;
      ret = 0;
    } else if (!strcmp(argv[c], "-o")) {
      if (++c == argc) {
        fprintf(stderr, "missing file name after %s option.\n", argv[c - 1]);
        goto End;
      }
      output = argv[c];
    } else if (name1 == NULL) {
      name1 = argv[c];
    } else {
      name2 = argv[c];
    }
  }
  if (help || name1 == NULL || name2 == NULL) {
    if (!help) {
      fprintf(stderr, "Error: missing arguments.\n");
    }
    Help();
    goto End;
  }
  if ((size1 = ReadPicture(name1, &pic1, 1)) == 0) {
    goto End;
  }
  if ((size2 = ReadPicture(name2, &pic2, 1)) == 0) {
    goto End;
  }
  if (!keep_alpha) {
    WebPBlendAlpha(&pic1, 0x00000000);
    WebPBlendAlpha(&pic2, 0x00000000);
  }

  if (!WebPPictureDistortion(&pic1, &pic2, type, disto)) {
    fprintf(stderr, "Error while computing the distortion.\n");
    goto End;
  }
  printf("%u %.2f    %.2f %.2f %.2f %.2f\n",
         (unsigned int)size1, disto[4],
         disto[0], disto[1], disto[2], disto[3]);

  if (output != NULL) {
    uint8_t* data = NULL;
    size_t data_size = 0;
    if (pic1.use_argb != pic2.use_argb) {
      fprintf(stderr, "Pictures are not in the same argb format. "
                      "Can't save the difference map.\n");
      goto End;
    }
    if (pic1.use_argb) {
      int n;
      fprintf(stderr, "max differences per channel: ");
      for (n = 0; n < 3; ++n) {    // skip the alpha channel
        const int range = (type == 1) ?
          SSIMScaleChannel((uint8_t*)pic1.argb + n, pic1.argb_stride * 4,
                           (const uint8_t*)pic2.argb + n, pic2.argb_stride * 4,
                           4, pic1.width, pic1.height, scale) :
          DiffScaleChannel((uint8_t*)pic1.argb + n, pic1.argb_stride * 4,
                           (const uint8_t*)pic2.argb + n, pic2.argb_stride * 4,
                           4, pic1.width, pic1.height, scale);
        if (range < 0) fprintf(stderr, "\nError computing diff map\n");
        fprintf(stderr, "[%d]", range);
      }
      fprintf(stderr, "\n");
      if (use_gray) ConvertToGray(&pic1);
    } else {
      fprintf(stderr, "Can only compute the difference map in ARGB format.\n");
      goto End;
    }
    data_size = WebPEncodeLosslessBGRA((const uint8_t*)pic1.argb,
                                       pic1.width, pic1.height,
                                       pic1.argb_stride * 4,
                                       &data);
    if (data_size == 0) {
      fprintf(stderr, "Error during lossless encoding.\n");
      goto End;
    }
    ret = ImgIoUtilWriteFile(output, data, data_size) ? 0 : 1;
    WebPFree(data);
    if (ret) goto End;
  }
  ret = 0;

 End:
  WebPPictureFree(&pic1);
  WebPPictureFree(&pic2);
  return ret;
}
Exemplo n.º 21
0
/**
Encode a FIBITMAP to a WebP image
@param hmem Memory output stream, containing on return the encoded image
@param dib The FIBITMAP to encode
@param flags FreeImage save flags
@return Returns TRUE if successfull, returns FALSE otherwise
*/
static BOOL
EncodeImage(FIMEMORY *hmem, FIBITMAP *dib, int flags) {
	WebPPicture picture;	// Input buffer
	WebPConfig config;		// Coding parameters

	BOOL bIsFlipped = FALSE;

	try {
		const unsigned width = FreeImage_GetWidth(dib);
		const unsigned height = FreeImage_GetHeight(dib);
		const unsigned bpp = FreeImage_GetBPP(dib);
		const unsigned pitch = FreeImage_GetPitch(dib);

		// check image type
		FREE_IMAGE_TYPE image_type = FreeImage_GetImageType(dib);

		if( !((image_type == FIT_BITMAP) && ((bpp == 24) || (bpp == 32))) )  {
			throw FI_MSG_ERROR_UNSUPPORTED_FORMAT;
		}

		// check format limits
		if(MAX(width, height) > WEBP_MAX_DIMENSION) {
			FreeImage_OutputMessageProc(s_format_id, "Unsupported image size: width x height = %d x %d", width, height);
			return FALSE;
		}

		// Initialize output I/O
		if(WebPPictureInit(&picture) == 1) {
			picture.writer = WebP_MemoryWriter;
			picture.custom_ptr = hmem;
			picture.width = (int)width;
			picture.height = (int)height;
		} else {
			throw "Couldn't initialize WebPPicture";
		}

		// --- Set encoding parameters ---

		// Initialize encoding parameters to default values
		WebPConfigInit(&config);

		// quality/speed trade-off (0=fast, 6=slower-better)
		config.method = 6;

		if((flags & WEBP_LOSSLESS) == WEBP_LOSSLESS) {
			// lossless encoding
			config.lossless = 1;
			picture.use_argb = 1;
		} else if((flags & 0x7F) > 0) {
			// lossy encoding
			config.lossless = 0;
			// quality is between 1 (smallest file) and 100 (biggest) - default to 75
			config.quality = (float)(flags & 0x7F);
			if(config.quality > 100) {
				config.quality = 100;
			}
		}

		// validate encoding parameters
		if(WebPValidateConfig(&config) == 0) {
			throw "Failed to initialize encoder";
		}

		// --- Perform encoding ---
		
		// Invert dib scanlines
		bIsFlipped = FreeImage_FlipVertical(dib);


		// convert dib buffer to output stream

		const BYTE *bits = FreeImage_GetBits(dib);

#if FREEIMAGE_COLORORDER == FREEIMAGE_COLORORDER_BGR
		switch(bpp) {
			case 24:
				WebPPictureImportBGR(&picture, bits, pitch);
				break;
			case 32:
				WebPPictureImportBGRA(&picture, bits, pitch);
				break;
		}
#else
		switch(bpp) {
			case 24:
				WebPPictureImportRGB(&picture, bits, pitch);
				break;
			case 32:
				WebPPictureImportRGBA(&picture, bits, pitch);
				break;
		}

#endif // FREEIMAGE_COLORORDER == FREEIMAGE_COLORORDER_BGR

		if(!WebPEncode(&config, &picture)) {
			throw "Failed to encode image";
		}

		WebPPictureFree(&picture);

		if(bIsFlipped) {
			// invert dib scanlines
			FreeImage_FlipVertical(dib);
		}

		return TRUE;

	} catch (const char* text) {

		WebPPictureFree(&picture);

		if(bIsFlipped) {
			// invert dib scanlines
			FreeImage_FlipVertical(dib);
		}

		if(NULL != text) {
			FreeImage_OutputMessageProc(s_format_id, text);
		}
	}

	return FALSE;
}
Exemplo n.º 22
0
status_t
WebPTranslator::_TranslateFromBits(BPositionIO* stream, BMessage* ioExtension,
		uint32 outType, BPositionIO* target)
{
	if (!outType)
		outType = WEBP_IMAGE_FORMAT;
	if (outType != WEBP_IMAGE_FORMAT)
		return B_NO_TRANSLATOR;

	TranslatorBitmap bitsHeader;
	status_t status;

	status = identify_bits_header(stream, NULL, &bitsHeader);
	if (status != B_OK)
		return status;

	if (bitsHeader.colors == B_CMAP8) {
		// TODO: support whatever colospace by intermediate colorspace conversion
		printf("Error! Colorspace not supported\n");
		return B_NO_TRANSLATOR;
	}

	int32 bitsBytesPerPixel = 0;
	switch (bitsHeader.colors) {
		case B_RGB32:
		case B_RGB32_BIG:
		case B_RGBA32:
		case B_RGBA32_BIG:
		case B_CMY32:
		case B_CMYA32:
		case B_CMYK32:
			bitsBytesPerPixel = 4;
			break;

		case B_RGB24:
		case B_RGB24_BIG:
		case B_CMY24:
			bitsBytesPerPixel = 3;
			break;

		case B_RGB16:
		case B_RGB16_BIG:
		case B_RGBA15:
		case B_RGBA15_BIG:
		case B_RGB15:
		case B_RGB15_BIG:
			bitsBytesPerPixel = 2;
			break;

		case B_CMAP8:
		case B_GRAY8:
			bitsBytesPerPixel = 1;
			break;

		default:
			return B_ERROR;
	}

	if (bitsBytesPerPixel < 3) {
		// TODO support
		return B_NO_TRANSLATOR;
	}

	WebPPicture picture;
	WebPConfig config;

	if (!WebPPictureInit(&picture) || !WebPConfigInit(&config)) {
		printf("Error! Version mismatch!\n");
  		return B_ERROR;
	}

	WebPPreset preset = (WebPPreset)fSettings->SetGetInt32(WEBP_SETTING_PRESET);
	config.quality = (float)fSettings->SetGetInt32(WEBP_SETTING_QUALITY);

	if (!WebPConfigPreset(&config, (WebPPreset)preset, config.quality)) {
		printf("Error! Could initialize configuration with preset.");
		return B_ERROR;
	}

	config.method = fSettings->SetGetInt32(WEBP_SETTING_METHOD);
	config.preprocessing = fSettings->SetGetBool(WEBP_SETTING_PREPROCESSING);

	if (!WebPValidateConfig(&config)) {
		printf("Error! Invalid configuration.\n");
 		return B_ERROR;
	}

	picture.width = bitsHeader.bounds.IntegerWidth() + 1;
	picture.height = bitsHeader.bounds.IntegerHeight() + 1;

	int stride = bitsHeader.rowBytes;
	int bitsSize = picture.height * stride;
	uint8* bits = (uint8*)malloc(bitsSize);
	if (bits == NULL)
		return B_NO_MEMORY;

	if (stream->Read(bits, bitsSize) != bitsSize) {
		free(bits);
		return B_IO_ERROR;
	}

	if (!WebPPictureImportBGRA(&picture, bits, stride)) {
		printf("Error! WebPEncode() failed!\n");
		free(bits);
		return B_ERROR;
	}
	free(bits);

	picture.writer = _EncodedWriter;
	picture.custom_ptr = target;
	picture.stats = NULL;

	if (!WebPEncode(&config, &picture)) {
		printf("Error! WebPEncode() failed!\n");
		status = B_NO_TRANSLATOR;
	} else
		status = B_OK;

	WebPPictureFree(&picture);
	return status;
}
Exemplo n.º 23
0
int webp_encode(const char *in_file, const char *out_file, const FfiWebpEncodeConfig *encode_config) {
  int return_value = -1;
  FILE *out = NULL;
  int keep_alpha = 1;
  WebPPicture picture;
  WebPConfig config;
  // OPTIONS BEGIN
  if (encode_config->lossless == 0 || encode_config->lossless == 1){
    config.lossless = encode_config->lossless;
  }
  if (encode_config->quality >= 0 && encode_config->quality <= 100){
    config.quality = encode_config->quality;
  }
  if (encode_config->method >= 0 && encode_config->method <= 6){
    config.method = encode_config->method;
  }
  if (encode_config->target_size > 0){
    config.target_size = encode_config->target_size;
  }
  if (encode_config->target_PSNR > 0){
    config.target_PSNR = encode_config->target_PSNR;
  }
  if (encode_config->segments >= 0 && encode_config->segments <= 4){
    config.segments = encode_config->segments;
  }
  if (encode_config->sns_strength >= 0 && encode_config->sns_strength <= 100){
    config.sns_strength = encode_config->sns_strength;
  }
  if (encode_config->filter_strength >= 0 && encode_config->filter_strength <= 100){
    config.filter_strength = encode_config->filter_strength;
  }
  if (encode_config->filter_sharpness >= 0 && encode_config->filter_sharpness <= 7){
    config.filter_sharpness = encode_config->filter_sharpness;
  }
  if (encode_config->filter_type == 0 || encode_config->filter_type == 1){
    config.filter_type = encode_config->filter_type;
  }
  if (encode_config->autofilter == 0 || encode_config->autofilter == 1){
    config.autofilter = encode_config->autofilter;
  }
  if (encode_config->alpha_compression == 0 || encode_config->alpha_compression == 1){
    config.alpha_compression = encode_config->alpha_compression;
  }
  if (encode_config->alpha_filtering >= 0 && encode_config->alpha_filtering <= 2){
    config.alpha_filtering = encode_config->alpha_filtering;
  }
  if (encode_config->alpha_quality >= 0 && encode_config->alpha_quality <= 100){
    config.alpha_quality = encode_config->alpha_quality;
  }
  if (encode_config->pass >= 0 && encode_config->pass <= 10){
    config.pass = encode_config->pass;
  }
  if (encode_config->show_compressed >= 0){
    config.show_compressed = encode_config->show_compressed;
  }
  if (encode_config->preprocessing == 0 || encode_config->preprocessing == 1){
    config.preprocessing = encode_config->preprocessing;
  }
  if (encode_config->partitions >= 0 && encode_config->partitions <= 3){
    config.partitions = encode_config->partitions;
  }
  if (encode_config->partition_limit >= 0 && encode_config->partition_limit <= 100){
    config.partition_limit = encode_config->partition_limit;
  }
  if ((encode_config->width | encode_config->height) > 0){
    picture.width = encode_config->width;
    picture.height = encode_config->height;
  }
  // OPTIONS END
  
  if (!WebPPictureInit(&picture) ||
      !WebPConfigInit(&config)) {
    //fprintf(stderr, "Error! Version mismatch!\n");
    return 1;
  }
  
  if (!WebPValidateConfig(&config)) {
    //fprintf(stderr, "Error! Invalid configuration.\n");
    return_value = 2;
    goto Error;
  }
  
  if (!UtilReadPicture(in_file, &picture, keep_alpha)) {
    //fprintf(stderr, "Error! Cannot read input picture file '%s'\n", in_file);
    return_value = 3;
    goto Error;
  }
  
  out = fopen(out_file, "wb");
  if (out == NULL) {
    //fprintf(stderr, "Error! Cannot open output file '%s'\n", out_file);
    return_value = 4;
    goto Error;
  }
  picture.writer = EncodeWriter;
  picture.custom_ptr = (void*)out;
  
  if ((encode_config->crop_w | encode_config->crop_h) > 0){
    if (!WebPPictureView(&picture, encode_config->crop_x, encode_config->crop_y, encode_config->crop_w, encode_config->crop_h, &picture)) {
      //fprintf(stderr, "Error! Cannot crop picture\n");
      return_value = 5;
      goto Error;
    }
  }
  
  if ((encode_config->resize_w | encode_config->resize_h) > 0) {
    if (!WebPPictureRescale(&picture, encode_config->resize_w, encode_config->resize_h)) {
      //fprintf(stderr, "Error! Cannot resize picture\n");
      return_value = 6;
      goto Error;
    }
  }
  
  if (picture.extra_info_type > 0) {
    AllocExtraInfo(&picture);
  }
  
  if (!WebPEncode(&config, &picture)) {
    //fprintf(stderr, "Error! Cannot encode picture as WebP\n");
    return_value = 7;
    goto Error;
  }
  return_value = 0;
  
Error:
  free(picture.extra_info);
  WebPPictureFree(&picture);
  if (out != NULL) {
    fclose(out);
  }

  return return_value;
}
Exemplo n.º 24
0
GDALDataset *
WEBPDataset::CreateCopy( const char * pszFilename, GDALDataset *poSrcDS,
                        int bStrict, char ** papszOptions,
                        GDALProgressFunc pfnProgress, void * pProgressData )

{
    int  nBands = poSrcDS->GetRasterCount();
    int  nXSize = poSrcDS->GetRasterXSize();
    int  nYSize = poSrcDS->GetRasterYSize();

/* -------------------------------------------------------------------- */
/*      WEBP library initialization                                     */
/* -------------------------------------------------------------------- */

    WebPPicture sPicture;
    if (!WebPPictureInit(&sPicture))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureInit() failed");
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Some some rudimentary checks                                    */
/* -------------------------------------------------------------------- */

    if( nXSize > 16383 || nYSize > 16383 )
    {
        CPLError( CE_Failure, CPLE_NotSupported,
                  "WEBP maximum image dimensions are 16383 x 16383.");

        return NULL;
    }

    if( nBands != 3 )
    {
        CPLError( CE_Failure, CPLE_NotSupported,
                  "WEBP driver doesn't support %d bands. Must be 3 (RGB) bands.",
                  nBands );

        return NULL;
    }

    GDALDataType eDT = poSrcDS->GetRasterBand(1)->GetRasterDataType();

    if( eDT != GDT_Byte )
    {
        CPLError( (bStrict) ? CE_Failure : CE_Warning, CPLE_NotSupported,
                  "WEBP driver doesn't support data type %s. "
                  "Only eight bit byte bands supported.",
                  GDALGetDataTypeName(
                      poSrcDS->GetRasterBand(1)->GetRasterDataType()) );

        if (bStrict)
            return NULL;
    }

/* -------------------------------------------------------------------- */
/*      What options has the user selected?                             */
/* -------------------------------------------------------------------- */
    float fQuality = 75.0f;
    const char* pszQUALITY = CSLFetchNameValue(papszOptions, "QUALITY"); 
    if( pszQUALITY != NULL )
    {
        fQuality = (float) atof(pszQUALITY);
        if( fQuality < 0.0f || fQuality > 100.0f )
        {
            CPLError( CE_Failure, CPLE_IllegalArg,
                      "%s=%s is not a legal value.", "QUALITY", pszQUALITY);
            return NULL;
        }
    }

    WebPPreset nPreset = WEBP_PRESET_DEFAULT;
    const char* pszPRESET = CSLFetchNameValueDef(papszOptions, "PRESET", "DEFAULT");
    if (EQUAL(pszPRESET, "DEFAULT"))
        nPreset = WEBP_PRESET_DEFAULT;
    else if (EQUAL(pszPRESET, "PICTURE"))
        nPreset = WEBP_PRESET_PICTURE;
    else if (EQUAL(pszPRESET, "PHOTO"))
        nPreset = WEBP_PRESET_PHOTO;
    else if (EQUAL(pszPRESET, "PICTURE"))
        nPreset = WEBP_PRESET_PICTURE;
    else if (EQUAL(pszPRESET, "DRAWING"))
        nPreset = WEBP_PRESET_DRAWING;
    else if (EQUAL(pszPRESET, "ICON"))
        nPreset = WEBP_PRESET_ICON;
    else if (EQUAL(pszPRESET, "TEXT"))
        nPreset = WEBP_PRESET_TEXT;
    else
    {
        CPLError( CE_Failure, CPLE_IllegalArg,
                  "%s=%s is not a legal value.", "PRESET", pszPRESET );
        return NULL;
    }

    WebPConfig sConfig;
    if (!WebPConfigInitInternal(&sConfig, nPreset, fQuality, WEBP_ENCODER_ABI_VERSION))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPConfigInit() failed");
        return NULL;
    }


#define FETCH_AND_SET_OPTION_INT(name, fieldname, minval, maxval) \
{ \
    const char* pszVal = CSLFetchNameValue(papszOptions, name); \
    if (pszVal != NULL) \
    { \
        sConfig.fieldname = atoi(pszVal); \
        if (sConfig.fieldname < minval || sConfig.fieldname > maxval) \
        { \
            CPLError( CE_Failure, CPLE_IllegalArg, \
                      "%s=%s is not a legal value.", name, pszVal ); \
            return NULL; \
        } \
    } \
}

    FETCH_AND_SET_OPTION_INT("TARGETSIZE", target_size, 0, INT_MAX);

    const char* pszPSNR = CSLFetchNameValue(papszOptions, "PSNR");
    if (pszPSNR)
    {
        sConfig.target_PSNR = atof(pszPSNR);
        if (sConfig.target_PSNR < 0)
        {
            CPLError( CE_Failure, CPLE_IllegalArg,
                      "PSNR=%s is not a legal value.", pszPSNR );
            return NULL;
        }
    }

    FETCH_AND_SET_OPTION_INT("METHOD", method, 0, 6);
    FETCH_AND_SET_OPTION_INT("SEGMENTS", segments, 1, 4);
    FETCH_AND_SET_OPTION_INT("SNS_STRENGTH", sns_strength, 0, 100);
    FETCH_AND_SET_OPTION_INT("FILTER_STRENGTH", filter_strength, 0, 100);
    FETCH_AND_SET_OPTION_INT("FILTER_SHARPNESS", filter_sharpness, 0, 7);
    FETCH_AND_SET_OPTION_INT("FILTER_TYPE", filter_type, 0, 1);
    FETCH_AND_SET_OPTION_INT("AUTOFILTER", autofilter, 0, 1);
    FETCH_AND_SET_OPTION_INT("PASS", pass, 1, 10);
    FETCH_AND_SET_OPTION_INT("PREPROCESSING", preprocessing, 0, 1);
    FETCH_AND_SET_OPTION_INT("PARTITIONS", partitions, 0, 3);

    if (!WebPValidateConfig(&sConfig))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPValidateConfig() failed");
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Allocate memory                                                 */
/* -------------------------------------------------------------------- */
    GByte   *pabyBuffer;

    pabyBuffer = (GByte *) VSIMalloc( 3 * nXSize * nYSize );
    if (pabyBuffer == NULL)
    {
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Create the dataset.                                             */
/* -------------------------------------------------------------------- */
    VSILFILE    *fpImage;

    fpImage = VSIFOpenL( pszFilename, "wb" );
    if( fpImage == NULL )
    {
        CPLError( CE_Failure, CPLE_OpenFailed,
                  "Unable to create WEBP file %s.\n",
                  pszFilename );
        VSIFree(pabyBuffer);
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      WEBP library settings                                           */
/* -------------------------------------------------------------------- */

    sPicture.colorspace = 0;
    sPicture.width = nXSize;
    sPicture.height = nYSize;
    sPicture.writer = WEBPDatasetWriter;
    sPicture.custom_ptr = fpImage;
    if (!WebPPictureAlloc(&sPicture))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureAlloc() failed");
        VSIFree(pabyBuffer);
        VSIFCloseL( fpImage );
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Acquire source imagery.                                         */
/* -------------------------------------------------------------------- */
    CPLErr      eErr = CE_None;

    eErr = poSrcDS->RasterIO( GF_Read, 0, 0, nXSize, nYSize,
                              pabyBuffer, nXSize, nYSize, GDT_Byte,
                              3, NULL,
                              3, 3 * nXSize, 1 );

/* -------------------------------------------------------------------- */
/*      Import and write to file                                        */
/* -------------------------------------------------------------------- */
    if (eErr == CE_None &&
        !WebPPictureImportRGB(&sPicture, pabyBuffer, 3 * nXSize))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureImportRGB() failed");
        eErr = CE_Failure;
    }

    if (eErr == CE_None && !WebPEncode(&sConfig, &sPicture))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPEncode() failed");
        eErr = CE_Failure;
    }

/* -------------------------------------------------------------------- */
/*      Cleanup and close.                                              */
/* -------------------------------------------------------------------- */
    CPLFree( pabyBuffer );

    WebPPictureFree(&sPicture);

    VSIFCloseL( fpImage );

    if( eErr != CE_None )
    {
        VSIUnlink( pszFilename );
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Re-open dataset, and copy any auxilary pam information.         */
/* -------------------------------------------------------------------- */
    GDALOpenInfo oOpenInfo(pszFilename, GA_ReadOnly);

    /* If outputing to stdout, we can't reopen it, so we'll return */
    /* a fake dataset to make the caller happy */
    CPLPushErrorHandler(CPLQuietErrorHandler);
    WEBPDataset *poDS = (WEBPDataset*) WEBPDataset::Open( &oOpenInfo );
    CPLPopErrorHandler();
    if( poDS )
    {
        poDS->CloneInfo( poSrcDS, GCIF_PAM_DEFAULT );
        return poDS;
    }

    return NULL;
}
Exemplo n.º 25
0
int
encodeWEBP(Vbitmap *vbitmap, Ychannel *channelout, YmagineFormatOptions *options)
{
  int rc = YMAGINE_ERROR;

#if HAVE_WEBP
  WebPConfig config;
  WebPPicture picture;
  WebPPreset preset = WEBP_PRESET_PHOTO; // One of DEFAULT, PICTURE, PHOTO, DRAWING, ICON or TEXT
  int width;
  int height;
  int pitch;
  int quality;
  const unsigned char *pixels;
  int colormode;

  /*
   * List of encoding options for WebP config
   *   int lossless;           // Lossless encoding (0=lossy(default), 1=lossless).
   *   float quality;          // between 0 (smallest file) and 100 (biggest)
   *   int method;             // quality/speed trade-off (0=fast, 6=slower-better)
   *
   *   WebPImageHint image_hint;  // Hint for image type (lossless only for now).
   *
   *   // Parameters related to lossy compression only:
   *   int target_size;        // if non-zero, set the desired target size in bytes.
   *                           // Takes precedence over the 'compression' parameter.
   *   float target_PSNR;      // if non-zero, specifies the minimal distortion to
   *                           // try to achieve. Takes precedence over target_size.
   *   int segments;           // maximum number of segments to use, in [1..4]
   *   int sns_strength;       // Spatial Noise Shaping. 0=off, 100=maximum.
   *   int filter_strength;    // range: [0 = off .. 100 = strongest]
   *   int filter_sharpness;   // range: [0 = off .. 7 = least sharp]
   *   int filter_type;        // filtering type: 0 = simple, 1 = strong (only used
   *                           // if filter_strength > 0 or autofilter > 0)
   *   int autofilter;         // Auto adjust filter's strength [0 = off, 1 = on]
   *   int alpha_compression;  // Algorithm for encoding the alpha plane (0 = none,
   *                           // 1 = compressed with WebP lossless). Default is 1.
   *   int alpha_filtering;    // Predictive filtering method for alpha plane.
   *                           //  0: none, 1: fast, 2: best. Default if 1.
   *   int alpha_quality;      // Between 0 (smallest size) and 100 (lossless).
   *                           // Default is 100.
   *   int pass;               // number of entropy-analysis passes (in [1..10]).
   *
   *   int show_compressed;    // if true, export the compressed picture back.
   *                           // In-loop filtering is not applied.
   *   int preprocessing;      // preprocessing filter (0=none, 1=segment-smooth)
   *   int partitions;         // log2(number of token partitions) in [0..3]
   *                           // Default is set to 0 for easier progressive decoding.
   *   int partition_limit;    // quality degradation allowed to fit the 512k limit on
   *                           // prediction modes coding (0: no degradation,
   *                           // 100: maximum possible degradation).
   */

  colormode = VbitmapColormode(vbitmap);

  if (colormode != VBITMAP_COLOR_RGBA && colormode != VBITMAP_COLOR_RGB) {
    ALOGD("currently only support RGB, RGBA webp encoding");
    return rc;
  }

  quality = YmagineFormatOptions_normalizeQuality(options);

  if (!WebPConfigPreset(&config, preset, (float) quality)) {
    return YMAGINE_ERROR;   // version error
  }

  if (options && options->accuracy >= 0) {
    int method = options->accuracy / 15;
    if (method > 6) {
      method = 6;
    }
    config.method = method;
  }

  if (!WebPValidateConfig(&config)) {
    // parameter ranges verification failed
    return YMAGINE_ERROR;
  }

  rc = VbitmapLock(vbitmap);
  if (rc < 0) {
    ALOGE("AndroidBitmap_lockPixels() failed");
    return YMAGINE_ERROR;
  }

  width = VbitmapWidth(vbitmap);
  height = VbitmapHeight(vbitmap);
  pitch = VbitmapPitch(vbitmap);
  pixels = VbitmapBuffer(vbitmap);

  if (WebPPictureInit(&picture)) {
    picture.use_argb = 1;
    picture.width = width;
    picture.height = height;
    picture.writer = WebPYchannelWrite;
    picture.custom_ptr = channelout;

    if (colormode == VBITMAP_COLOR_RGBA) {
      WebPPictureImportRGBA(&picture, pixels, pitch);
    } else {
      WebPPictureImportRGB(&picture, pixels, pitch);
    }

    WebPEncode(&config, &picture);

    WebPPictureFree(&picture);
  }

  VbitmapUnlock(vbitmap);
  rc = YMAGINE_OK;
#endif

  return rc;
}
Exemplo n.º 26
0
bool QWebpHandler::write(const QImage &image)
{
    if (image.isNull()) {
        qWarning() << "source image is null.";
        return false;
    }

    QImage srcImage = image;
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
    if (srcImage.format() != QImage::Format_ARGB32)
        srcImage = srcImage.convertToFormat(QImage::Format_ARGB32);
#else /* Q_BIG_ENDIAN */
    if (srcImage.format() != QImage::Format_RGBA8888)
        srcImage = srcImage.convertToFormat(QImage::Format_RGBA8888);
#endif

    WebPPicture picture;
    WebPConfig config;

    if (!WebPPictureInit(&picture) || !WebPConfigInit(&config)) {
        qWarning() << "failed to init webp picture and config";
        return false;
    }

    picture.width = srcImage.width();
    picture.height = srcImage.height();
    picture.use_argb = 1;
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
    if (!WebPPictureImportBGRA(&picture, srcImage.bits(), srcImage.bytesPerLine())) {
#else /* Q_BIG_ENDIAN */
    if (!WebPPictureImportRGBA(&picture, srcImage.bits(), srcImage.bytesPerLine())) {
#endif
        qWarning() << "failed to import image data to webp picture.";

        WebPPictureFree(&picture);
        return false;
    }

    config.lossless = m_lossless;
    config.quality = m_quality;
    picture.writer = pictureWriter;
    picture.custom_ptr = device();

    if (!WebPEncode(&config, &picture)) {
        qWarning() << "failed to encode webp picture, error code: " << picture.error_code;
        WebPPictureFree(&picture);
        return false;
    }

    WebPPictureFree(&picture);

    return true;
}

QVariant QWebpHandler::option(ImageOption option) const
{
    if (!supportsOption(option) || !ensureScanned())
        return QVariant();

    switch (option) {
    case Quality:
        return m_quality;
    case Size:
        return QSize(m_width, m_height);
    case Animation:
        return (m_flags & ANIMATION_FLAG) == ANIMATION_FLAG;
    default:
        return QVariant();
    }
}

void QWebpHandler::setOption(ImageOption option, const QVariant &value)
{
    switch (option) {
    case Quality:
        m_quality = qBound(0, value.toInt(), 100);
        m_lossless = (m_quality >= 100);
        return;
    default:
        break;
    }
    return QImageIOHandler::setOption(option, value);
}

bool QWebpHandler::supportsOption(ImageOption option) const
{
    return option == Quality
        || option == Size
        || option == Animation;
}

QByteArray QWebpHandler::name() const
{
    return QByteArrayLiteral("webp");
}

int QWebpHandler::imageCount() const
{
    if (!ensureScanned())
        return 0;

    if ((m_flags & ANIMATION_FLAG) == 0)
        return 1;

    return m_frameCount;
}

int QWebpHandler::currentImageNumber() const
{
    if (!ensureScanned())
        return 0;

    return m_iter.frame_num;
}

QRect QWebpHandler::currentImageRect() const
{
    if (!ensureScanned())
        return QRect();

    return QRect(m_iter.x_offset, m_iter.y_offset, m_iter.width, m_iter.height);
}

bool QWebpHandler::jumpToImage(int imageNumber)
{
    if (!ensureScanned())
        return false;

    WebPDemuxReleaseIterator(&m_iter);
    return WebPDemuxGetFrame(m_demuxer, imageNumber, &m_iter);
}

bool QWebpHandler::jumpToNextImage()
{
    if (!ensureScanned())
        return false;

    return WebPDemuxNextFrame(&m_iter);
}

int QWebpHandler::loopCount() const
{
    if (!ensureScanned() || (m_flags & ANIMATION_FLAG) == 0)
        return 0;

    return m_loop;
}

int QWebpHandler::nextImageDelay() const
{
    if (!ensureScanned())
        return 0;

    return m_iter.duration;
}
Exemplo n.º 27
0
GDALDataset *
WEBPDataset::CreateCopy( const char * pszFilename, GDALDataset *poSrcDS,
                        int bStrict, char ** papszOptions,
                        GDALProgressFunc pfnProgress, void * pProgressData )

{
    int  nBands = poSrcDS->GetRasterCount();
    int  nXSize = poSrcDS->GetRasterXSize();
    int  nYSize = poSrcDS->GetRasterYSize();

/* -------------------------------------------------------------------- */
/*      WEBP library initialization                                     */
/* -------------------------------------------------------------------- */

    WebPPicture sPicture;
    if (!WebPPictureInit(&sPicture))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureInit() failed");
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Some some rudimentary checks                                    */
/* -------------------------------------------------------------------- */

    if( nXSize > 16383 || nYSize > 16383 )
    {
        CPLError( CE_Failure, CPLE_NotSupported,
                  "WEBP maximum image dimensions are 16383 x 16383.");

        return NULL;
    }

    if( nBands != 3
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
        && nBands != 4
#endif
        )
    {
        CPLError( CE_Failure, CPLE_NotSupported,
                  "WEBP driver doesn't support %d bands. Must be 3 (RGB) "
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
                  "or 4 (RGBA) "
#endif
                  "bands.",
                  nBands );

        return NULL;
    }

    GDALDataType eDT = poSrcDS->GetRasterBand(1)->GetRasterDataType();

    if( eDT != GDT_Byte )
    {
        CPLError( (bStrict) ? CE_Failure : CE_Warning, CPLE_NotSupported,
                  "WEBP driver doesn't support data type %s. "
                  "Only eight bit byte bands supported.",
                  GDALGetDataTypeName(
                      poSrcDS->GetRasterBand(1)->GetRasterDataType()) );

        if (bStrict)
            return NULL;
    }

/* -------------------------------------------------------------------- */
/*      What options has the user selected?                             */
/* -------------------------------------------------------------------- */
    float fQuality = 75.0f;
    const char* pszQUALITY = CSLFetchNameValue(papszOptions, "QUALITY"); 
    if( pszQUALITY != NULL )
    {
        fQuality = (float) CPLAtof(pszQUALITY);
        if( fQuality < 0.0f || fQuality > 100.0f )
        {
            CPLError( CE_Failure, CPLE_IllegalArg,
                      "%s=%s is not a legal value.", "QUALITY", pszQUALITY);
            return NULL;
        }
    }

    WebPPreset nPreset = WEBP_PRESET_DEFAULT;
    const char* pszPRESET = CSLFetchNameValueDef(papszOptions, "PRESET", "DEFAULT");
    if (EQUAL(pszPRESET, "DEFAULT"))
        nPreset = WEBP_PRESET_DEFAULT;
    else if (EQUAL(pszPRESET, "PICTURE"))
        nPreset = WEBP_PRESET_PICTURE;
    else if (EQUAL(pszPRESET, "PHOTO"))
        nPreset = WEBP_PRESET_PHOTO;
    else if (EQUAL(pszPRESET, "PICTURE"))
        nPreset = WEBP_PRESET_PICTURE;
    else if (EQUAL(pszPRESET, "DRAWING"))
        nPreset = WEBP_PRESET_DRAWING;
    else if (EQUAL(pszPRESET, "ICON"))
        nPreset = WEBP_PRESET_ICON;
    else if (EQUAL(pszPRESET, "TEXT"))
        nPreset = WEBP_PRESET_TEXT;
    else
    {
        CPLError( CE_Failure, CPLE_IllegalArg,
                  "%s=%s is not a legal value.", "PRESET", pszPRESET );
        return NULL;
    }

    WebPConfig sConfig;
    if (!WebPConfigInitInternal(&sConfig, nPreset, fQuality, WEBP_ENCODER_ABI_VERSION))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPConfigInit() failed");
        return NULL;
    }


#define FETCH_AND_SET_OPTION_INT(name, fieldname, minval, maxval) \
{ \
    const char* pszVal = CSLFetchNameValue(papszOptions, name); \
    if (pszVal != NULL) \
    { \
        sConfig.fieldname = atoi(pszVal); \
        if (sConfig.fieldname < minval || sConfig.fieldname > maxval) \
        { \
            CPLError( CE_Failure, CPLE_IllegalArg, \
                      "%s=%s is not a legal value.", name, pszVal ); \
            return NULL; \
        } \
    } \
}

    FETCH_AND_SET_OPTION_INT("TARGETSIZE", target_size, 0, INT_MAX);

    const char* pszPSNR = CSLFetchNameValue(papszOptions, "PSNR");
    if (pszPSNR)
    {
        sConfig.target_PSNR = CPLAtof(pszPSNR);
        if (sConfig.target_PSNR < 0)
        {
            CPLError( CE_Failure, CPLE_IllegalArg,
                      "PSNR=%s is not a legal value.", pszPSNR );
            return NULL;
        }
    }

    FETCH_AND_SET_OPTION_INT("METHOD", method, 0, 6);
    FETCH_AND_SET_OPTION_INT("SEGMENTS", segments, 1, 4);
    FETCH_AND_SET_OPTION_INT("SNS_STRENGTH", sns_strength, 0, 100);
    FETCH_AND_SET_OPTION_INT("FILTER_STRENGTH", filter_strength, 0, 100);
    FETCH_AND_SET_OPTION_INT("FILTER_SHARPNESS", filter_sharpness, 0, 7);
    FETCH_AND_SET_OPTION_INT("FILTER_TYPE", filter_type, 0, 1);
    FETCH_AND_SET_OPTION_INT("AUTOFILTER", autofilter, 0, 1);
    FETCH_AND_SET_OPTION_INT("PASS", pass, 1, 10);
    FETCH_AND_SET_OPTION_INT("PREPROCESSING", preprocessing, 0, 1);
    FETCH_AND_SET_OPTION_INT("PARTITIONS", partitions, 0, 3);
#if WEBP_ENCODER_ABI_VERSION >= 0x0002
    FETCH_AND_SET_OPTION_INT("PARTITION_LIMIT", partition_limit, 0, 100);
#endif
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
    sConfig.lossless = CSLFetchBoolean(papszOptions, "LOSSLESS", FALSE);
    if (sConfig.lossless)
        sPicture.use_argb = 1;
#endif

    if (!WebPValidateConfig(&sConfig))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPValidateConfig() failed");
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Allocate memory                                                 */
/* -------------------------------------------------------------------- */
    GByte   *pabyBuffer;

    pabyBuffer = (GByte *) VSIMalloc( nBands * nXSize * nYSize );
    if (pabyBuffer == NULL)
    {
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Create the dataset.                                             */
/* -------------------------------------------------------------------- */
    VSILFILE    *fpImage;

    fpImage = VSIFOpenL( pszFilename, "wb" );
    if( fpImage == NULL )
    {
        CPLError( CE_Failure, CPLE_OpenFailed,
                  "Unable to create WEBP file %s.\n",
                  pszFilename );
        VSIFree(pabyBuffer);
        return NULL;
    }

    WebPUserData sUserData;
    sUserData.fp = fpImage;
    sUserData.pfnProgress = pfnProgress ? pfnProgress : GDALDummyProgress;
    sUserData.pProgressData = pProgressData;

/* -------------------------------------------------------------------- */
/*      WEBP library settings                                           */
/* -------------------------------------------------------------------- */

    sPicture.width = nXSize;
    sPicture.height = nYSize;
    sPicture.writer = WEBPDatasetWriter;
    sPicture.custom_ptr = &sUserData;
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
    sPicture.progress_hook = WEBPDatasetProgressHook;
#endif
    if (!WebPPictureAlloc(&sPicture))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureAlloc() failed");
        VSIFree(pabyBuffer);
        VSIFCloseL( fpImage );
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Acquire source imagery.                                         */
/* -------------------------------------------------------------------- */
    CPLErr      eErr = CE_None;

    eErr = poSrcDS->RasterIO( GF_Read, 0, 0, nXSize, nYSize,
                              pabyBuffer, nXSize, nYSize, GDT_Byte,
                              nBands, NULL,
                              nBands, nBands * nXSize, 1, NULL );

/* -------------------------------------------------------------------- */
/*      Import and write to file                                        */
/* -------------------------------------------------------------------- */
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
    if (eErr == CE_None && nBands == 4)
    {
        if (!WebPPictureImportRGBA(&sPicture, pabyBuffer, nBands * nXSize))
        {
            CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureImportRGBA() failed");
            eErr = CE_Failure;
        }
    }
    else
#endif
    if (eErr == CE_None &&
        !WebPPictureImportRGB(&sPicture, pabyBuffer, nBands * nXSize))
    {
        CPLError(CE_Failure, CPLE_AppDefined, "WebPPictureImportRGB() failed");
        eErr = CE_Failure;
    }

    if (eErr == CE_None && !WebPEncode(&sConfig, &sPicture))
    {
        const char* pszErrorMsg = NULL;
#if WEBP_ENCODER_ABI_VERSION >= 0x0100
        switch(sPicture.error_code)
        {
            case VP8_ENC_ERROR_OUT_OF_MEMORY: pszErrorMsg = "Out of memory"; break;
            case VP8_ENC_ERROR_BITSTREAM_OUT_OF_MEMORY: pszErrorMsg = "Out of memory while flushing bits"; break;
            case VP8_ENC_ERROR_NULL_PARAMETER: pszErrorMsg = "A pointer parameter is NULL"; break;
            case VP8_ENC_ERROR_INVALID_CONFIGURATION: pszErrorMsg = "Configuration is invalid"; break;
            case VP8_ENC_ERROR_BAD_DIMENSION: pszErrorMsg = "Picture has invalid width/height"; break;
            case VP8_ENC_ERROR_PARTITION0_OVERFLOW: pszErrorMsg = "Partition is bigger than 512k. Try using less SEGMENTS, or increase PARTITION_LIMIT value"; break;
            case VP8_ENC_ERROR_PARTITION_OVERFLOW: pszErrorMsg = "Partition is bigger than 16M"; break;
            case VP8_ENC_ERROR_BAD_WRITE: pszErrorMsg = "Error while flusing bytes"; break;
            case VP8_ENC_ERROR_FILE_TOO_BIG: pszErrorMsg = "File is bigger than 4G"; break;
            case VP8_ENC_ERROR_USER_ABORT: pszErrorMsg = "User interrupted"; break;
            default: break;
        }
#endif
        if (pszErrorMsg)
            CPLError(CE_Failure, CPLE_AppDefined, "WebPEncode() failed : %s", pszErrorMsg);
        else
            CPLError(CE_Failure, CPLE_AppDefined, "WebPEncode() failed");
        eErr = CE_Failure;
    }

/* -------------------------------------------------------------------- */
/*      Cleanup and close.                                              */
/* -------------------------------------------------------------------- */
    CPLFree( pabyBuffer );

    WebPPictureFree(&sPicture);

    VSIFCloseL( fpImage );

    if( eErr != CE_None )
    {
        VSIUnlink( pszFilename );
        return NULL;
    }

/* -------------------------------------------------------------------- */
/*      Re-open dataset, and copy any auxiliary pam information.         */
/* -------------------------------------------------------------------- */
    GDALOpenInfo oOpenInfo(pszFilename, GA_ReadOnly);

    /* If outputing to stdout, we can't reopen it, so we'll return */
    /* a fake dataset to make the caller happy */
    CPLPushErrorHandler(CPLQuietErrorHandler);
    WEBPDataset *poDS = (WEBPDataset*) WEBPDataset::Open( &oOpenInfo );
    CPLPopErrorHandler();
    if( poDS )
    {
        poDS->CloneInfo( poSrcDS, GCIF_PAM_DEFAULT );
        return poDS;
    }

    return NULL;
}
Exemplo n.º 28
0
/**
 * Convert bitmap object from Android code to WebP format. Save to file.
 */
jint Java_com_tbliss_android_seniorproject_webpconv_WebPConv_doConvJniGraphics2( JNIEnv* env,
																				 jobject javaThis,
																				 jobject jbitmap,
																				 jfloat jqualityFactor,
																				 jstring filename) 
{
	int ret = 0;
	float cqualityFactor;
	int stride, width, height;
	void* pixels; // pointer to address of bitmap
	int outputSize = 0;
	AndroidBitmapInfo bitmapInfo;
	uint8_t* outputPointer;
	uint8_t* cbitmapPointer;
	WebPConfig config;
	WebPMemoryWriter wrt;
	size_t dataWritten = 0;
	int bytesWritten = 0;
	FILE* fileout = NULL;
	const char* fname	= (*env)->GetStringUTFChars(env, filename, NULL);
	
	// Get Bitmap info (height/width/stride)
	if ((ret = AndroidBitmap_getInfo(env, jbitmap, &bitmapInfo)) < 0){
		LOGV("Could not get Bitmap info. error=%d", ret);
		return 0;
	}
	width = (int)bitmapInfo.width;
	height = (int)bitmapInfo.height;
	stride = (int)bitmapInfo.stride; // width * 4
	LOGV("Bitmap: width=%d, height=%d, stride=%d, format=%d", width, height, stride, bitmapInfo.format);
	
    // Lock Bitmap pixels
	if ((ret = AndroidBitmap_lockPixels(env, jbitmap, &pixels)) < 0) {
     LOGV("AndroidBitmap_lockPixels() failed ! error=%d", ret);
		 return 0;
    }

	cbitmapPointer = (uint8_t*)pixels;
	outputSize = stride * height;
	LOGV("output_size: %d", outputSize);
	outputPointer = (uint8_t*) malloc(outputSize);
	cqualityFactor = (float)jqualityFactor;
	
	// Setup a config
    if (!WebPConfigPreset(&config, WEBP_PRESET_PICTURE, cqualityFactor)) {
	    LOGV("WebPConfigPreset failed");
        return 0;   // version error
    }
    
    // ... additional tuning
	config.method = 1;
	LOGV("config.method = %d", config.method);
    
    if (WebPValidateConfig(&config) != 1) {
	    LOGV("Error with config");
	}

    // Setup the input data
    WebPPicture pic;
    if (!WebPPictureInit(&pic)) {
	    LOGV("WebPPictureInit failed");
        return 0;  // version error
    }

    pic.width = width;
    pic.height = height;
  	
    if (!WebPPictureImportRGBA(&pic, cbitmapPointer, stride)) {
		LOGV("WebPPictureImportRGB failed");
		return 0;
	}
	
    // Set up a byte-output write method. WebPMemoryWriter, for instance.
    pic.writer = MyMemoryWriter;
    pic.custom_ptr = &wrt;
    //InitMemoryWriter(&wrt);
	wrt.mem = &outputPointer;
	wrt.size = &dataWritten;
	wrt.max_size = outputSize;
	
    // Compress!
    ret = WebPEncode(&config, &pic);   // ok = 0 => error occurred!
	if (!ret) {
	    LOGV("ret == 0, WebPEncode fail");
	}
  
    WebPPictureFree(&pic);  // must be called independently of the 'ok' result.

	// Write to phone
	fileout = fopen(fname, "wb");
	if(!fileout){
		LOGV("cannot open output file %s", fname);
		return 0;
	}
	bytesWritten = fwrite(outputPointer, 1, dataWritten, fileout);
	LOGV("bytesWritten: %d", bytesWritten);
	
	// Unlock pixels of Bitmap
	fclose(fileout);
	AndroidBitmap_unlockPixels(env, jbitmap);
	free(outputPointer);
	(*env)->ReleaseStringUTFChars(env, filename, fname);
	
	return dataWritten;
}
static GstFlowReturn
gst_webp_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstWebpEnc *enc = GST_WEBP_ENC (encoder);
  GstBuffer *out_buffer = NULL;
  GstVideoFrame vframe;

  GST_LOG_OBJECT (enc, "got new frame");

  gst_webp_set_picture_params (enc);

  if (!gst_video_frame_map (&vframe, &enc->input_state->info,
          frame->input_buffer, GST_MAP_READ))
    return GST_FLOW_ERROR;

  if (!enc->use_argb) {
    enc->webp_picture.y = GST_VIDEO_FRAME_COMP_DATA (&vframe, 0);
    enc->webp_picture.u = GST_VIDEO_FRAME_COMP_DATA (&vframe, 1);
    enc->webp_picture.v = GST_VIDEO_FRAME_COMP_DATA (&vframe, 2);

    enc->webp_picture.y_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0);
    enc->webp_picture.uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 1);

  } else {
    switch (enc->rgb_format) {
      case GST_VIDEO_FORMAT_RGB:
        WebPPictureImportRGB (&enc->webp_picture,
            GST_VIDEO_FRAME_COMP_DATA (&vframe, 0),
            GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0));
        break;
      case GST_VIDEO_FORMAT_RGBA:
        WebPPictureImportRGBA (&enc->webp_picture,
            GST_VIDEO_FRAME_COMP_DATA (&vframe, 0),
            GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0));
        break;
      default:
        break;
    }
  }

  if (WebPEncode (&enc->webp_config, &enc->webp_picture)) {
    WebPPictureFree (&enc->webp_picture);

    out_buffer = gst_buffer_new_allocate (NULL, enc->webp_writer.size, NULL);
    if (!out_buffer) {
      GST_ERROR_OBJECT (enc, "Failed to create output buffer");
      return GST_FLOW_ERROR;
    }
    gst_buffer_fill (out_buffer, 0, enc->webp_writer.mem,
        enc->webp_writer.size);
    free (enc->webp_writer.mem);
  } else {
    GST_ERROR_OBJECT (enc, "Failed to encode WebPPicture");
    return GST_FLOW_ERROR;
  }

  gst_video_frame_unmap (&vframe);
  frame->output_buffer = out_buffer;
  return gst_video_encoder_finish_frame (encoder, frame);
}