Example #1
0
static GstSample *
_gst_sample_copy (GstSample * sample)
{
  GstSample *copy;

  copy = gst_sample_new (sample->buffer, sample->caps, &sample->segment,
      gst_structure_copy (sample->info));

  return copy;
}
Example #2
0
GstSample *
gst_app_sink_pull_sample (GstAppSink * appsink)
{
  GstSample *sample = NULL;
  GstBuffer *buffer;
  GstAppSinkPrivate *priv;

  g_return_val_if_fail (GST_IS_APP_SINK (appsink), NULL);

  priv = appsink->priv;

  g_mutex_lock (&priv->mutex);

  while (TRUE) {
    GST_DEBUG_OBJECT (appsink, "trying to grab a buffer");
    if (!priv->started)
      goto not_started;

    if (priv->num_buffers > 0)
      break;

    if (priv->is_eos)
      goto eos;

    /* nothing to return, wait */
    GST_DEBUG_OBJECT (appsink, "waiting for a buffer");
    g_cond_wait (&priv->cond, &priv->mutex);
  }
  buffer = dequeue_buffer (appsink);
  GST_DEBUG_OBJECT (appsink, "we have a buffer %p", buffer);
  sample = gst_sample_new (buffer, priv->last_caps, &priv->last_segment, NULL);
  gst_buffer_unref (buffer);

  g_cond_signal (&priv->cond);
  g_mutex_unlock (&priv->mutex);

  return sample;

  /* special conditions */
eos:
  {
    GST_DEBUG_OBJECT (appsink, "we are EOS, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
not_started:
  {
    GST_DEBUG_OBJECT (appsink, "we are stopped, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
}
Example #3
0
static void
got_buf_cb (GstElement * sink, GstBuffer * new_buf, GstPad * pad,
    GstSample ** p_old_sample)
{
  GstCaps *caps;

  caps = gst_pad_get_current_caps (pad);

  if (*p_old_sample)
    gst_sample_unref (*p_old_sample);
  *p_old_sample = gst_sample_new (new_buf, caps, NULL, NULL);

  gst_caps_unref (caps);
}
Example #4
0
/**
 * gst_app_sink_pull_preroll:
 * @appsink: a #GstAppSink
 *
 * Get the last preroll sample in @appsink. This was the sample that caused the
 * appsink to preroll in the PAUSED state. This sample can be pulled many times
 * and remains available to the application even after EOS.
 *
 * This function is typically used when dealing with a pipeline in the PAUSED
 * state. Calling this function after doing a seek will give the sample right
 * after the seek position.
 *
 * Note that the preroll sample will also be returned as the first sample
 * when calling gst_app_sink_pull_sample().
 *
 * If an EOS event was received before any buffers, this function returns
 * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
 *
 * This function blocks until a preroll sample or EOS is received or the appsink
 * element is set to the READY/NULL state.
 *
 * Returns: (transfer full): a #GstSample or NULL when the appsink is stopped or EOS.
 *          Call gst_sample_unref() after usage.
 */
GstSample *
gst_app_sink_pull_preroll (GstAppSink * appsink)
{
  GstSample *sample = NULL;
  GstAppSinkPrivate *priv;

  g_return_val_if_fail (GST_IS_APP_SINK (appsink), NULL);

  priv = appsink->priv;

  g_mutex_lock (&priv->mutex);

  while (TRUE) {
    GST_DEBUG_OBJECT (appsink, "trying to grab a buffer");
    if (!priv->started)
      goto not_started;

    if (priv->preroll != NULL)
      break;

    if (priv->is_eos)
      goto eos;

    /* nothing to return, wait */
    GST_DEBUG_OBJECT (appsink, "waiting for the preroll buffer");
    g_cond_wait (&priv->cond, &priv->mutex);
  }
  sample =
      gst_sample_new (priv->preroll, priv->preroll_caps, &priv->preroll_segment,
      NULL);
  GST_DEBUG_OBJECT (appsink, "we have the preroll sample %p", sample);
  g_mutex_unlock (&priv->mutex);

  return sample;

  /* special conditions */
eos:
  {
    GST_DEBUG_OBJECT (appsink, "we are EOS, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
not_started:
  {
    GST_DEBUG_OBJECT (appsink, "we are stopped, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
}
/**
 * gst_wrapper_camera_bin_src_imgsrc_probe:
 *
 * Buffer probe called before sending each buffer to image queue.
 */
static GstPadProbeReturn
gst_wrapper_camera_bin_src_imgsrc_probe (GstPad * pad, GstPadProbeInfo * info,
    gpointer data)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (data);
  GstBaseCameraSrc *camerasrc = GST_BASE_CAMERA_SRC (data);
  GstBuffer *buffer = GST_BUFFER (info->data);
  GstPadProbeReturn ret = GST_PAD_PROBE_DROP;

  GST_LOG_OBJECT (self, "Image probe, mode %d, capture count %d bufsize: %"
      G_GSIZE_FORMAT, camerasrc->mode, self->image_capture_count,
      gst_buffer_get_size (buffer));

  g_mutex_lock (&camerasrc->capturing_mutex);
  if (self->image_capture_count > 0) {
    GstSample *sample;
    GstCaps *caps;
    ret = GST_PAD_PROBE_OK;
    self->image_capture_count--;

    /* post preview */
    /* TODO This can likely be optimized if the viewfinder caps is the same as
     * the preview caps, avoiding another scaling of the same buffer. */
    GST_DEBUG_OBJECT (self, "Posting preview for image");
    caps = gst_pad_get_current_caps (pad);
    sample = gst_sample_new (buffer, caps, NULL, NULL);
    gst_base_camera_src_post_preview (camerasrc, sample);
    gst_caps_unref (caps);
    gst_sample_unref (sample);

    if (self->image_capture_count == 0) {
      gst_base_camera_src_finish_capture (camerasrc);
    }
  }
  g_mutex_unlock (&camerasrc->capturing_mutex);
  return ret;
}
Example #6
0
/**
 * gst_tag_image_data_to_image_sample:
 * @image_data: the (encoded) image
 * @image_data_len: the length of the encoded image data at @image_data
 * @image_type: type of the image, or #GST_TAG_IMAGE_TYPE_UNDEFINED. Pass
 *     #GST_TAG_IMAGE_TYPE_NONE if no image type should be set at all (e.g.
 *     for preview images)
 *
 * Helper function for tag-reading plugins to create a #GstSample suitable to
 * add to a #GstTagList as an image tag (such as #GST_TAG_IMAGE or
 * #GST_TAG_PREVIEW_IMAGE) from the encoded image data and an (optional) image
 * type.
 *
 * Background: cover art and other images in tags are usually stored as a
 * blob of binary image data, often accompanied by a MIME type or some other
 * content type string (e.g. 'png', 'jpeg', 'jpg'). Sometimes there is also an
 * 'image type' to indicate what kind of image this is (e.g. front cover,
 * back cover, artist, etc.). The image data may also be an URI to the image
 * rather than the image itself.
 *
 * In GStreamer, image tags are #GstSample<!-- -->s containing the raw image
 * data, with the sample caps describing the content type of the image
 * (e.g. image/jpeg, image/png, text/uri-list). The sample info may contain
 * an additional 'image-type' field of #GST_TYPE_TAG_IMAGE_TYPE to describe
 * the type of image (front cover, back cover etc.). #GST_TAG_PREVIEW_IMAGE
 * tags should not carry an image type, their type is already indicated via
 * the special tag name.
 *
 * This function will do various checks and typefind the encoded image
 * data (we can't trust the declared mime type).
 *
 * Returns: a newly-allocated image sample for use in tag lists, or NULL
 */
GstSample *
gst_tag_image_data_to_image_sample (const guint8 * image_data,
    guint image_data_len, GstTagImageType image_type)
{
  const gchar *name;
  GstBuffer *image;
  GstSample *sample;
  GstCaps *caps;
  GstMapInfo info;
  GstStructure *image_info = NULL;

  g_return_val_if_fail (image_data != NULL, NULL);
  g_return_val_if_fail (image_data_len > 0, NULL);
  g_return_val_if_fail (gst_tag_image_type_is_valid (image_type), NULL);

  GST_DEBUG ("image data len: %u bytes", image_data_len);

  /* allocate space for a NUL terminator for an uri too */
  image = gst_buffer_new_and_alloc (image_data_len + 1);
  if (image == NULL)
    goto alloc_failed;

  gst_buffer_map (image, &info, GST_MAP_WRITE);
  memcpy (info.data, image_data, image_data_len);
  info.data[image_data_len] = '\0';
  gst_buffer_unmap (image, &info);

  /* Find GStreamer media type, can't trust declared type */
  caps = gst_type_find_helper_for_buffer (NULL, image, NULL);

  if (caps == NULL)
    goto no_type;

  GST_DEBUG ("Found GStreamer media type: %" GST_PTR_FORMAT, caps);

  /* sanity check: make sure typefound/declared caps are either URI or image */
  name = gst_structure_get_name (gst_caps_get_structure (caps, 0));

  if (!g_str_has_prefix (name, "image/") &&
      !g_str_has_prefix (name, "video/") &&
      !g_str_equal (name, "text/uri-list")) {
    GST_DEBUG ("Unexpected image type '%s', ignoring image frame", name);
    goto error;
  }

  /* Decrease size by 1 if we don't have an URI list
   * to keep the original size of the image
   */
  if (!g_str_equal (name, "text/uri-list"))
    gst_buffer_set_size (image, image_data_len);

  if (image_type != GST_TAG_IMAGE_TYPE_NONE) {
    GST_LOG ("Setting image type: %d", image_type);
    image_info = gst_structure_new ("GstTagImageInfo",
        "image-type", GST_TYPE_TAG_IMAGE_TYPE, image_type, NULL);
  }
  sample = gst_sample_new (image, caps, NULL, image_info);
  gst_buffer_unref (image);
  gst_caps_unref (caps);

  return sample;

/* ERRORS */
no_type:
  {
    GST_DEBUG ("Could not determine GStreamer media type, ignoring image");
    /* fall through */
  }
error:
  {
    if (image)
      gst_buffer_unref (image);
    if (caps)
      gst_caps_unref (caps);
    return NULL;
  }
alloc_failed:
  {
    GST_WARNING ("failed to allocate buffer of %d for image", image_data_len);
    gst_buffer_unref (image);
    return NULL;
  }

}
/**
 * gst_wrapper_camera_bin_src_vidsrc_probe:
 *
 * Buffer probe called before sending each buffer to image queue.
 */
static GstPadProbeReturn
gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstPadProbeInfo * info,
    gpointer data)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (data);
  GstBaseCameraSrc *camerasrc = GST_BASE_CAMERA_SRC_CAST (self);
  GstPadProbeReturn ret = GST_PAD_PROBE_DROP;
  GstBuffer *buffer = GST_BUFFER (info->data);

  GST_LOG_OBJECT (self, "Video probe, mode %d, capture status %d",
      camerasrc->mode, self->video_rec_status);

  /* TODO do we want to lock for every buffer? */
  /*
   * Note that we can use gst_pad_push_event here because we are a buffer
   * probe.
   */
  /* TODO shouldn't access this directly */
  g_mutex_lock (&camerasrc->capturing_mutex);
  if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
    /* NOP */
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
    GstClockTime ts;
    GstSegment segment;
    GstCaps *caps;
    GstSample *sample;

    GST_DEBUG_OBJECT (self, "Starting video recording");
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;

    ts = GST_BUFFER_TIMESTAMP (buffer);
    if (!GST_CLOCK_TIME_IS_VALID (ts))
      ts = 0;
    gst_segment_init (&segment, GST_FORMAT_TIME);
    segment.start = ts;
    gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment));

    /* post preview */
    GST_DEBUG_OBJECT (self, "Posting preview for video");
    caps = gst_pad_get_current_caps (pad);
    sample = gst_sample_new (buffer, caps, NULL, NULL);
    gst_base_camera_src_post_preview (camerasrc, sample);
    gst_caps_unref (caps);
    gst_sample_unref (sample);

    ret = GST_PAD_PROBE_OK;
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_FINISHING) {
    GstPad *peer;

    /* send eos */
    GST_DEBUG_OBJECT (self, "Finishing video recording, pushing eos");

    peer = gst_pad_get_peer (self->vidsrc);

    if (peer) {
      /* send to the peer as we don't want our pads with eos flag */
      gst_pad_send_event (peer, gst_event_new_eos ());
      gst_object_unref (peer);
    } else {
      GST_WARNING_OBJECT (camerasrc, "No peer pad for vidsrc");
    }
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_DONE;
    gst_base_camera_src_finish_capture (camerasrc);
  } else {
    ret = GST_PAD_PROBE_OK;
  }
  g_mutex_unlock (&camerasrc->capturing_mutex);
  return ret;
}
static GstFlowReturn
gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
  GstVideoOverlayComposition *compo = NULL;
  GstVideoOverlayCompositionMeta *upstream_compo_meta;

  if (gst_pad_check_reconfigure (self->srcpad)) {
    if (!gst_overlay_composition_negotiate (self, NULL)) {
      gst_pad_mark_reconfigure (self->srcpad);
      gst_buffer_unref (buffer);
      GST_OBJECT_LOCK (self->srcpad);
      if (GST_PAD_IS_FLUSHING (self->srcpad)) {
        GST_OBJECT_UNLOCK (self->srcpad);
        return GST_FLOW_FLUSHING;
      }
      GST_OBJECT_UNLOCK (self->srcpad);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  if (!self->sample) {
    self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL);
  } else {
    self->sample = gst_sample_make_writable (self->sample);
    gst_sample_set_buffer (self->sample, buffer);
    gst_sample_set_caps (self->sample, self->caps);
    gst_sample_set_segment (self->sample, &self->segment);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0,
      self->sample, &compo);

  /* Don't store the buffer in the sample any longer, otherwise it will not
   * be writable below as we have one reference in the sample and one in
   * this function.
   *
   * If the sample is not writable itself then the application kept an
   * reference itself.
   */
  if (gst_sample_is_writable (self->sample)) {
    gst_sample_set_buffer (self->sample, NULL);
  }

  if (!compo) {
    GST_DEBUG_OBJECT (self->sinkpad,
        "Application did not provide an overlay composition");
    return gst_pad_push (self->srcpad, buffer);
  }

  /* If upstream attached a meta, we can safely add our own things
   * in it. Upstream must've checked that downstream supports it */
  upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer);
  if (upstream_compo_meta) {
    GstVideoOverlayComposition *merged_compo =
        gst_video_overlay_composition_copy (upstream_compo_meta->overlay);
    guint i, n;

    GST_DEBUG_OBJECT (self->sinkpad,
        "Appending to upstream overlay composition");

    n = gst_video_overlay_composition_n_rectangles (compo);
    for (i = 0; i < n; i++) {
      GstVideoOverlayRectangle *rect =
          gst_video_overlay_composition_get_rectangle (compo, i);
      gst_video_overlay_composition_add_rectangle (merged_compo, rect);
    }

    gst_video_overlay_composition_unref (compo);
    gst_video_overlay_composition_unref (upstream_compo_meta->overlay);
    upstream_compo_meta->overlay = merged_compo;
  } else if (self->attach_compo_to_buffer) {
    GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta");

    buffer = gst_buffer_make_writable (buffer);
    gst_buffer_add_video_overlay_composition_meta (buffer, compo);
    gst_video_overlay_composition_unref (compo);
  } else {
    GstVideoFrame frame;

    buffer = gst_buffer_make_writable (buffer);
    if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) {
      gst_video_overlay_composition_unref (compo);
      goto map_failed;
    }

    gst_video_overlay_composition_blend (compo, &frame);

    gst_video_frame_unmap (&frame);
    gst_video_overlay_composition_unref (compo);
  }

  return gst_pad_push (self->srcpad, buffer);

map_failed:
  {
    GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
Example #9
0
static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint bpc = 0, color_type;
  png_uint_32 width, height;
  GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;

  g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);

  /* Get bits per channel */
  bpc = png_get_bit_depth (pngdec->png, pngdec->info);

  /* Get Color type */
  color_type = png_get_color_type (pngdec->png, pngdec->info);

  /* Add alpha channel if 16-bit depth, but not for GRAY images */
  if ((bpc > 8) && (color_type != PNG_COLOR_TYPE_GRAY)) {
    png_set_add_alpha (pngdec->png, 0xffff, PNG_FILLER_BEFORE);
    png_set_swap (pngdec->png);
  }
#if 0
  /* We used to have this HACK to reverse the outgoing bytes, but the problem
   * that originally required the hack seems to have been in videoconvert's
   * RGBA descriptions. It doesn't seem needed now that's fixed, but might
   * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */
  if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)
    png_set_bgr (pngdec->png);
#endif

  /* Gray scale with alpha channel converted to RGB */
  if (color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
    GST_LOG_OBJECT (pngdec,
        "converting grayscale png with alpha channel to RGB");
    png_set_gray_to_rgb (pngdec->png);
  }

  /* Gray scale converted to upscaled to 8 bits */
  if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||
      (color_type == PNG_COLOR_TYPE_GRAY)) {
    if (bpc < 8) {              /* Convert to 8 bits */
      GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");
#if PNG_LIBPNG_VER < 10400
      png_set_gray_1_2_4_to_8 (pngdec->png);
#else
      png_set_expand_gray_1_2_4_to_8 (pngdec->png);
#endif
    }
  }

  /* Palette converted to RGB */
  if (color_type == PNG_COLOR_TYPE_PALETTE) {
    GST_LOG_OBJECT (pngdec, "converting palette png to RGB");
    png_set_palette_to_rgb (pngdec->png);
  }

  png_set_interlace_handling (pngdec->png);

  /* Update the info structure */
  png_read_update_info (pngdec->png, pngdec->info);

  /* Get IHDR header again after transformation settings */
  png_get_IHDR (pngdec->png, pngdec->info, &width, &height,
      &bpc, &pngdec->color_type, NULL, NULL, NULL);

  GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", (gint) width,
      (gint) height);

  switch (pngdec->color_type) {
    case PNG_COLOR_TYPE_RGB:
      GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_RGB;
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:
      GST_LOG_OBJECT (pngdec,
          "we have an alpha channel, depth is 32 or 64 bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_RGBA;
      else if (bpc == 16)
        format = GST_VIDEO_FORMAT_ARGB64;
      break;
    case PNG_COLOR_TYPE_GRAY:
      GST_LOG_OBJECT (pngdec,
          "We have an gray image, depth is 8 or 16 (be) bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_GRAY8;
      else if (bpc == 16)
        format = GST_VIDEO_FORMAT_GRAY16_BE;
      break;
    default:
      break;
  }

  if (format == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
        ("pngdec does not support this color type"));
    ret = GST_FLOW_NOT_SUPPORTED;
    goto beach;
  }

  /* Check if output state changed */
  if (pngdec->output_state) {
    GstVideoInfo *info = &pngdec->output_state->info;

    if (width == GST_VIDEO_INFO_WIDTH (info) &&
        height == GST_VIDEO_INFO_HEIGHT (info) &&
        GST_VIDEO_INFO_FORMAT (info) == format) {
      goto beach;
    }
    gst_video_codec_state_unref (pngdec->output_state);
  }
#ifdef HAVE_LIBPNG_1_5
  if ((pngdec->color_type & PNG_COLOR_MASK_COLOR)
      && !(pngdec->color_type & PNG_COLOR_MASK_PALETTE)
      && png_get_valid (pngdec->png, pngdec->info, PNG_INFO_iCCP)) {
    png_charp icc_name;
    png_bytep icc_profile;
    int icc_compression_type;
    png_uint_32 icc_proflen = 0;
    png_uint_32 ret = png_get_iCCP (pngdec->png, pngdec->info, &icc_name,
        &icc_compression_type, &icc_profile, &icc_proflen);

    if ((ret & PNG_INFO_iCCP)) {
      gpointer gst_icc_prof = g_memdup (icc_profile, icc_proflen);
      GstBuffer *tagbuffer = NULL;
      GstSample *tagsample = NULL;
      GstTagList *taglist = NULL;
      GstStructure *info = NULL;
      GstCaps *caps;

      GST_DEBUG_OBJECT (pngdec, "extracted ICC profile '%s' length=%i",
          icc_name, (guint32) icc_proflen);

      tagbuffer = gst_buffer_new_wrapped (gst_icc_prof, icc_proflen);

      caps = gst_caps_new_empty_simple ("application/vnd.iccprofile");
      info = gst_structure_new_empty ("application/vnd.iccprofile");

      if (icc_name)
        gst_structure_set (info, "icc-name", G_TYPE_STRING, icc_name, NULL);

      tagsample = gst_sample_new (tagbuffer, caps, NULL, info);

      gst_buffer_unref (tagbuffer);
      gst_caps_unref (caps);

      taglist = gst_tag_list_new_empty ();
      gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_ATTACHMENT,
          tagsample, NULL);
      gst_sample_unref (tagsample);

      gst_video_decoder_merge_tags (GST_VIDEO_DECODER (pngdec), taglist,
          GST_TAG_MERGE_APPEND);
      gst_tag_list_unref (taglist);
    }
  }
#endif

  pngdec->output_state =
      gst_video_decoder_set_output_state (GST_VIDEO_DECODER (pngdec), format,
      width, height, pngdec->input_state);
  gst_video_decoder_negotiate (GST_VIDEO_DECODER (pngdec));
  GST_DEBUG ("Final %d %d", GST_VIDEO_INFO_WIDTH (&pngdec->output_state->info),
      GST_VIDEO_INFO_HEIGHT (&pngdec->output_state->info));

beach:
  return ret;
}
Example #10
0
static GstFlowReturn
gst_skor_sink_transform_frame_ip (GstVideoFilter * vfilter, GstVideoFrame * frame)
{
  GstSkorSink *sink = GST_SKORSINK (vfilter);
  gpointer data;
  gint stride, height;
  zbar_image_t *image;
  const zbar_symbol_t *symbol;
  int n;

  image = zbar_image_create ();

  /* all formats we support start with an 8-bit Y plane. zbar doesn't need
   * to know about the chroma plane(s) */
  data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  zbar_image_set_format (image, GST_MAKE_FOURCC ('Y', '8', '0', '0'));
  zbar_image_set_size (image, stride, height);
  zbar_image_set_data (image, (gpointer) data, stride * height, NULL);

  /* scan the image for barcodes */
  n = zbar_scan_image (sink->scanner, image);
  if (G_UNLIKELY (n == -1)) {
    GST_WARNING_OBJECT (sink, "Error trying to scan frame. Skipping");
    goto out;
  }
  if (n == 0)
    goto out;

  /* extract results */
  symbol = zbar_image_first_symbol (image);
  for (; symbol; symbol = zbar_symbol_next (symbol)) {
    zbar_symbol_type_t typ = zbar_symbol_get_type (symbol);
    const char *data = zbar_symbol_get_data (symbol);
    gint quality = zbar_symbol_get_quality (symbol);

    GST_DEBUG_OBJECT (sink, "decoded %s symbol \"%s\" at quality %d",
        zbar_get_symbol_name (typ), data, quality);

    if (sink->cache && zbar_symbol_get_count (symbol) != 0)
      continue;

    if (sink->data_consumer)
      sink->data_consumer (data);

    if (sink->message) {
      GstMessage *m;
      GstStructure *s;
      GstSample *sample;
      GstCaps *sample_caps;

      s = gst_structure_new ("barcode",
          "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (frame->buffer),
          "type", G_TYPE_STRING, zbar_get_symbol_name (typ),
          "symbol", G_TYPE_STRING, data, "quality", G_TYPE_INT, quality, NULL);

      if (sink->attach_frame) {
        /* create a sample from image */
        sample_caps = gst_video_info_to_caps (&frame->info);
        sample = gst_sample_new (frame->buffer, sample_caps, NULL, NULL);
        gst_caps_unref (sample_caps);
        gst_structure_set (s, "frame", GST_TYPE_SAMPLE, sample, NULL);
        gst_sample_unref (sample);
      }

      m = gst_message_new_element (GST_OBJECT (sink), s);
      gst_element_post_message (GST_ELEMENT (sink), m);

    } else if (sink->attach_frame)
      GST_WARNING_OBJECT (sink,
          "attach-frame=true has no effect if message=false");
  }

out:
  /* clean up */
  zbar_image_scanner_recycle_image (sink->scanner, image);
  zbar_image_destroy (image);

  return GST_FLOW_OK;
}