static void
gst_droidcamsrc_dev_prepare_buffer (GstDroidCamSrcDev * dev, GstBuffer * buffer,
    DroidMediaRect rect, int width, int height, GstVideoFormat format)
{
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));
  GstVideoCropMeta *crop;

  GST_LOG_OBJECT (src, "prepare buffer %" GST_PTR_FORMAT, buffer);

  gst_droidcamsrc_timestamp (src, buffer);

  crop = gst_buffer_add_video_crop_meta (buffer);
  crop->x = rect.left;
  crop->y = rect.top;
  crop->width = rect.right - rect.left;
  crop->height = rect.bottom - rect.top;

  gst_buffer_add_gst_buffer_orientation_meta (buffer,
      dev->info->orientation, dev->info->direction);

  gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
      format, width, height);

  GST_LOG_OBJECT (src, "preview info: w=%d, h=%d, crop: x=%d, y=%d, w=%d, h=%d",
      width, height, crop->x, crop->y, crop->width, crop->height);
}
static GstBuffer *
create_overlay_buffer (void)
{
  GZlibDecompressor *decompress;
  GConverterResult decomp_res;
  guchar *gzipped_pixdata, *pixdata;
  gsize gzipped_size, bytes_read, pixdata_size;
  GstBuffer *logo_pixels;
  guint w, h, stride;

  gzipped_pixdata = g_base64_decode (gzipped_pixdata_base64, &gzipped_size);
  g_assert (gzipped_pixdata != NULL);

  pixdata = g_malloc (64 * 1024);

  decompress = g_zlib_decompressor_new (G_ZLIB_COMPRESSOR_FORMAT_GZIP);
  decomp_res = g_converter_convert (G_CONVERTER (decompress),
      gzipped_pixdata, gzipped_size, pixdata, 64 * 1024,
      G_CONVERTER_INPUT_AT_END, &bytes_read, &pixdata_size, NULL);
  g_assert (decomp_res == G_CONVERTER_FINISHED);
  g_assert (bytes_read == gzipped_size);
  g_free (gzipped_pixdata);
  g_object_unref (decompress);

  /* 0: Pixbuf magic (0x47646b50) */
  g_assert (GST_READ_UINT32_BE (pixdata) == 0x47646b50);

  /* 4: length incl. header */
  /* 8: pixdata_type */
  /* 12: rowstride (900) */
  stride = GST_READ_UINT32_BE (pixdata + 12);
  /* 16: width (225) */
  w = GST_READ_UINT32_BE (pixdata + 16);
  /* 20: height (57) */
  h = GST_READ_UINT32_BE (pixdata + 20);
  /* 24: pixel_data */
  GST_LOG ("%dx%d @ %d", w, h, stride);
  /* we assume that the last line also has padding at the end */
  g_assert (pixdata_size - 24 >= h * stride);

  logo_pixels = gst_buffer_new_and_alloc (h * stride);
  gst_buffer_fill (logo_pixels, 0, pixdata + 24, h * stride);
  gst_buffer_add_video_meta (logo_pixels, GST_VIDEO_FRAME_FLAG_NONE,
      GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, w, h);

  g_free (pixdata);

  return logo_pixels;
}
/* This function handles GstBuffer creation */
static GstFlowReturn
gst_vdp_video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
  GstVideoInfo *info;
  GstBuffer *buf;
  GstMemory *vdp_mem;

  info = &vdppool->info;

  if (!(buf = gst_buffer_new ()))
    goto no_buffer;

  if (!(vdp_mem = gst_vdp_video_memory_alloc (vdppool->device, info)))
    goto mem_create_failed;

  gst_buffer_append_memory (buf, vdp_mem);

  if (vdppool->add_videometa) {
    GstVideoMeta *vmeta;

    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
    /* these are just the defaults for now */
    vmeta = gst_buffer_add_video_meta (buf, 0, GST_VIDEO_INFO_FORMAT (info),
        GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info));
    vmeta->map = gst_vdp_video_memory_map;
    vmeta->unmap = gst_vdp_video_memory_unmap;
  }

  *buffer = buf;

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create image");
    return GST_FLOW_ERROR;
  }

mem_create_failed:
  {
    GST_WARNING_OBJECT (pool, "Could create GstVdpVideo Memory");
    return GST_FLOW_ERROR;
  }
}
Exemple #4
0
/* The produced buffer contains only metadata, no memory blocks - the IPU sink does not need anything more
 * TODO: add some logic to wrap the framebuffer memory block, including map/unmap code etc. */
GstBuffer* gst_imx_ipu_blitter_wrap_framebuffer(GstImxIpuBlitter *ipu_blitter, int framebuffer_fd, guint x, guint y, guint width, guint height)
{
	guint fb_width, fb_height;
	GstVideoFormat fb_format;
	GstBuffer *buffer;
	GstImxPhysMemMeta *phys_mem_meta;
	struct fb_var_screeninfo fb_var;
	struct fb_fix_screeninfo fb_fix;

	if (ioctl(framebuffer_fd, FBIOGET_FSCREENINFO, &fb_fix) == -1)
	{
		GST_ERROR_OBJECT(ipu_blitter, "could not open get fixed screen info: %s", strerror(errno));
		return NULL;
	}

	if (ioctl(framebuffer_fd, FBIOGET_VSCREENINFO, &fb_var) == -1)
	{
		GST_ERROR_OBJECT(ipu_blitter, "could not open get variable screen info: %s", strerror(errno));
		return NULL;
	}

	fb_width = fb_var.xres;
	fb_height = fb_var.yres;
	fb_format = gst_imx_ipu_blitter_get_format_from_fb(ipu_blitter, &fb_var, &fb_fix);

	GST_DEBUG_OBJECT(ipu_blitter, "framebuffer resolution is %u x %u", fb_width, fb_height);

	buffer = gst_buffer_new();
	gst_buffer_add_video_meta(buffer, GST_VIDEO_FRAME_FLAG_NONE, fb_format, fb_width, fb_height);

	if ((width != 0) && (height != 0))
	{
		GstVideoCropMeta *video_crop_meta;
		
		video_crop_meta = gst_buffer_add_video_crop_meta(buffer);
		video_crop_meta->x = x;
		video_crop_meta->y = y;
		video_crop_meta->width = width;
		video_crop_meta->height = height;
	}

	phys_mem_meta = GST_IMX_PHYS_MEM_META_ADD(buffer);
	phys_mem_meta->phys_addr = (guintptr)(fb_fix.smem_start);

	return buffer;
}
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
theora_handle_image (GstTheoraDec * dec, th_ycbcr_buffer buf,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
  gint width, height, stride;
  GstFlowReturn result;
  gint i, comp;
  guint8 *dest, *src;
  GstVideoFrame vframe;
  gint pic_width, pic_height;
  gint offset_x, offset_y;

  result = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (G_UNLIKELY (result != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
        gst_flow_get_name (result));
    return result;
  }

  if (!dec->can_crop) {
    /* we need to crop the hard way */
    offset_x = dec->info.pic_x;
    offset_y = dec->info.pic_y;
    pic_width = dec->info.pic_width;
    pic_height = dec->info.pic_height;
    /* Ensure correct offsets in chroma for formats that need it
     * by rounding the offset. libtheora will add proper pixels,
     * so no need to handle them ourselves. */
    if (offset_x & 1 && dec->info.pixel_fmt != TH_PF_444)
      offset_x--;
    if (offset_y & 1 && dec->info.pixel_fmt == TH_PF_420)
      offset_y--;
  } else {
    /* copy the whole frame */
    offset_x = 0;
    offset_y = 0;
    pic_width = dec->info.frame_width;
    pic_height = dec->info.frame_height;

    if (dec->info.pic_width != dec->info.frame_width ||
        dec->info.pic_height != dec->info.frame_height ||
        dec->info.pic_x != 0 || dec->info.pic_y != 0) {
      GstVideoMeta *vmeta;
      GstVideoCropMeta *cmeta;

      vmeta = gst_buffer_get_video_meta (frame->output_buffer);
      /* If the buffer pool didn't add the meta already
       * we add it ourselves here */
      if (!vmeta)
        vmeta = gst_buffer_add_video_meta (frame->output_buffer,
            GST_VIDEO_FRAME_FLAG_NONE,
            dec->output_state->info.finfo->format,
            dec->info.frame_width, dec->info.frame_height);

      /* Just to be sure that the buffer pool doesn't do something
       * completely weird and we would crash later
       */
      g_assert (vmeta->format == dec->output_state->info.finfo->format);
      g_assert (vmeta->width == dec->info.frame_width);
      g_assert (vmeta->height == dec->info.frame_height);

      cmeta = gst_buffer_add_video_crop_meta (frame->output_buffer);

      /* we can do things slightly more efficient when we know that
       * downstream understands clipping */
      cmeta->x = dec->info.pic_x;
      cmeta->y = dec->info.pic_y;
      cmeta->width = dec->info.pic_width;
      cmeta->height = dec->info.pic_height;
    }
  }

  /* if only libtheora would allow us to give it a destination frame */
  GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
      "doing unavoidable video frame copy");

  if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
              frame->output_buffer, GST_MAP_WRITE)))
    goto invalid_frame;

  for (comp = 0; comp < 3; comp++) {
    width =
        GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vframe.info.finfo, comp, pic_width);
    height =
        GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vframe.info.finfo, comp,
        pic_height);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
    dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);

    src = buf[comp].data;
    src += ((height == pic_height) ? offset_y : offset_y / 2)
        * buf[comp].stride;
    src += (width == pic_width) ? offset_x : offset_x / 2;

    for (i = 0; i < height; i++) {
      memcpy (dest, src, width);

      dest += stride;
      src += buf[comp].stride;
    }
  }
  gst_video_frame_unmap (&vframe);

  return GST_FLOW_OK;
invalid_frame:
  {
    GST_DEBUG_OBJECT (dec, "could not map video frame");
    return GST_FLOW_ERROR;
  }
}
/* Takes ownership of pixbuf; call with OBJECT_LOCK */
static void
gst_gdk_pixbuf_overlay_set_pixbuf (GstGdkPixbufOverlay * overlay,
    GdkPixbuf * pixbuf)
{
  GstVideoMeta *video_meta;
  guint8 *pixels, *p;
  gint width, height, stride, w, h, plane;

  if (!gdk_pixbuf_get_has_alpha (pixbuf)) {
    GdkPixbuf *alpha_pixbuf;

    /* FIXME: we could do this much more efficiently ourselves below, but
     * we're lazy for now */
    /* FIXME: perhaps expose substitute_color via properties */
    alpha_pixbuf = gdk_pixbuf_add_alpha (pixbuf, FALSE, 0, 0, 0);
    g_object_unref (pixbuf);
    pixbuf = alpha_pixbuf;
  }

  width = gdk_pixbuf_get_width (pixbuf);
  height = gdk_pixbuf_get_height (pixbuf);
  stride = gdk_pixbuf_get_rowstride (pixbuf);
  pixels = gdk_pixbuf_get_pixels (pixbuf);

  /* the memory layout in GdkPixbuf is R-G-B-A, we want:
   *  - B-G-R-A on little-endian platforms
   *  - A-R-G-B on big-endian platforms
   */
  for (h = 0; h < height; ++h) {
    p = pixels + (h * stride);
    for (w = 0; w < width; ++w) {
      guint8 tmp;

      /* R-G-B-A ==> B-G-R-A */
      tmp = p[0];
      p[0] = p[2];
      p[2] = tmp;

      if (G_BYTE_ORDER == G_BIG_ENDIAN) {
        /* B-G-R-A ==> A-R-G-B */
        /* we can probably assume sane alignment */
        *((guint32 *) p) = GUINT32_SWAP_LE_BE (*((guint32 *) p));
      }

      p += 4;
    }
  }

  /* assume we have row padding even for the last row */
  /* transfer ownership of pixbuf to the buffer */
  overlay->pixels = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      pixels, height * stride, 0, height * stride, pixbuf,
      (GDestroyNotify) g_object_unref);

  video_meta = gst_buffer_add_video_meta (overlay->pixels,
      GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB,
      width, height);

  for (plane = 0; plane < video_meta->n_planes; ++plane)
    video_meta->stride[plane] = stride;

  overlay->update_composition = TRUE;

  GST_INFO_OBJECT (overlay, "Updated pixbuf, %d x %d", width, height);
}
static gboolean gst_imx_blitter_video_sink_open_framebuffer_device(GstImxBlitterVideoSink *blitter_video_sink)
{
	/* must be called with lock held */

	gboolean ret = TRUE;
	int fd;
	guint fb_width, fb_height;
	GstVideoFormat fb_format;
	GstBuffer *buffer;
	GstImxPhysMemMeta *phys_mem_meta;
	struct fb_var_screeninfo fb_var;
	struct fb_fix_screeninfo fb_fix;

	g_assert(blitter_video_sink != NULL);
	g_assert(blitter_video_sink->framebuffer_name != NULL);


	/* Close any currently open framebuffer first */
	if (blitter_video_sink->framebuffer_fd != -1)
		gst_imx_blitter_video_sink_close_framebuffer_device(blitter_video_sink);

	GST_INFO_OBJECT(blitter_video_sink, "opening framebuffer %s", blitter_video_sink->framebuffer_name);


	/* Open framebuffer and get its variable and fixed information */

	fd = open(blitter_video_sink->framebuffer_name, O_RDWR, 0);
	if (fd < 0)
	{
		GST_ELEMENT_ERROR(blitter_video_sink, RESOURCE, OPEN_READ_WRITE, ("could not open %s: %s", blitter_video_sink->framebuffer_name, strerror(errno)), (NULL));
		return FALSE;
	}

	if (ioctl(fd, FBIOGET_FSCREENINFO, &fb_fix) == -1)
	{
		close(fd);
		GST_ERROR_OBJECT(blitter_video_sink, "could not get fixed screen info: %s", strerror(errno));
		return FALSE;
	}

	if (ioctl(fd, FBIOGET_VSCREENINFO, &fb_var) == -1)
	{
		close(fd);
		GST_ERROR_OBJECT(blitter_video_sink, "could not get variable screen info: %s", strerror(errno));
		return FALSE;
	}


	/* Copy FD, variable and fixed screen information structs
	 * These are also needed during the vsync setup below*/
	blitter_video_sink->framebuffer_fd = fd;
	blitter_video_sink->fb_var = fb_var;
	blitter_video_sink->fb_fix = fb_fix;


	/* Set up vsync (vsync is done via page flipping) */
	if (blitter_video_sink->use_vsync)
	{
		/* Check how many pages can currently be used. If this number is
		 * less than 3, reconfigure the framebuffer to allow for 3 pages.
		 * See the explanation at the set_property PROP_USE_VSYNC block
		 * for the reason why three pages are expected instead of 2. */

		guint cur_num_pages = fb_var.yres_virtual / fb_var.yres;
		if (cur_num_pages < 3)
		{
			GST_INFO_OBJECT(
				blitter_video_sink,
				"framebuffer configuration:  resolution is %u x %u , virtual %u x %u => need to reconfigure virtual height",
				fb_var.xres, fb_var.yres,
				fb_var.xres_virtual, fb_var.yres_virtual
			);
			if (!gst_imx_blitter_video_sink_reconfigure_fb(blitter_video_sink, 3))
			{
				GST_ERROR_OBJECT(blitter_video_sink, "could not reconfigure framebuffer");
				close(fd);
				blitter_video_sink->framebuffer_fd = -1;
				return FALSE;
			}
		}
		else
		{
			GST_INFO_OBJECT(
				blitter_video_sink,
				"framebuffer configuration:  resolution is %u x %u , virtual %u x %u => don't need to reconfigure virtual height",
				fb_var.xres, fb_var.yres,
				fb_var.xres_virtual, fb_var.yres_virtual
			);
		}

		/* Fetch fixed screen info again in case it changed after the FB reconfiguration */
		if (ioctl(fd, FBIOGET_FSCREENINFO, &fb_fix) == -1)
		{
			GST_ERROR_OBJECT(blitter_video_sink, "could not open get fixed screen info: %s", strerror(errno));
			close(fd);
			blitter_video_sink->framebuffer_fd = -1;
			return FALSE;
		}

		/* Update the fixed screen info copy */
		blitter_video_sink->fb_fix = fb_fix;
	}


	/* Get width, height, format for framebuffer */
	fb_width = fb_var.xres;
	fb_height = fb_var.yres;
	fb_format = gst_imx_blitter_video_sink_get_format_from_fb(blitter_video_sink, &fb_var, &fb_fix);
	if (fb_format == GST_VIDEO_FORMAT_UNKNOWN)
	{
		GST_ELEMENT_ERROR(blitter_video_sink, RESOURCE, OPEN_READ_WRITE, ("framebuffer has unknown format"), (NULL));
		return FALSE;
	}


	/* Construct framebuffer and add meta to it
	 * Note: not adding any GstMemory blocks, since none are needed */
	buffer = gst_buffer_new();
	gst_buffer_add_video_meta(buffer, GST_VIDEO_FRAME_FLAG_NONE, fb_format, fb_width, fb_height);
	phys_mem_meta = GST_IMX_PHYS_MEM_META_ADD(buffer);
	phys_mem_meta->phys_addr = (gst_imx_phys_addr_t)(fb_fix.smem_start);


	/* Set up framebuffer related information */
	blitter_video_sink->framebuffer = buffer;
	blitter_video_sink->framebuffer_fd = fd;
	blitter_video_sink->framebuffer_region.x1 = 0;
	blitter_video_sink->framebuffer_region.y1 = 0;
	blitter_video_sink->framebuffer_region.x2 = fb_width;
	blitter_video_sink->framebuffer_region.y2 = fb_height;
	GST_INFO_OBJECT(blitter_video_sink, "framebuffer FD is %d", blitter_video_sink->framebuffer_fd);


	/* Create videoinfo structure for the framebuffer */
	gst_video_info_set_format(&(blitter_video_sink->output_video_info), fb_format, fb_width, fb_height);


	/* New framebuffer means the canvas most likely changed -> update */
	blitter_video_sink->canvas_needs_update = TRUE;


	/* If a blitter is present, set its output video info
	 * and output frame, since these two items changed */
	if (blitter_video_sink->blitter != NULL)
	{
		ret = ret && gst_imx_blitter_set_output_video_info(blitter_video_sink->blitter, &(blitter_video_sink->output_video_info));
		ret = ret && gst_imx_blitter_set_output_frame(blitter_video_sink->blitter, blitter_video_sink->framebuffer);
		ret = ret && gst_imx_blitter_set_num_output_pages(blitter_video_sink->blitter, blitter_video_sink->use_vsync ? 3 : 1);
	}

	if (!ret)
	{
		close(fd);
		blitter_video_sink->framebuffer_fd = -1;
	}

	return ret;
}
static GstVideoOverlayComposition *
gst_dvbsub_overlay_subs_to_comp (GstDVBSubOverlay * overlay,
    DVBSubtitles * subs)
{
  GstVideoOverlayComposition *comp = NULL;
  GstVideoOverlayRectangle *rect;
  gint width, height, dw, dh, wx, wy;
  gint i;

  g_return_val_if_fail (subs != NULL && subs->num_rects > 0, NULL);

  width = GST_VIDEO_INFO_WIDTH (&overlay->info);
  height = GST_VIDEO_INFO_HEIGHT (&overlay->info);

  dw = subs->display_def.display_width;
  dh = subs->display_def.display_height;

  GST_LOG_OBJECT (overlay,
      "converting %d rectangles for display %dx%d -> video %dx%d",
      subs->num_rects, dw, dh, width, height);

  if (subs->display_def.window_flag) {
    wx = subs->display_def.window_x;
    wy = subs->display_def.window_y;
    GST_LOG_OBJECT (overlay, "display window %dx%d @ (%d, %d)",
        subs->display_def.window_width, subs->display_def.window_height,
        wx, wy);
  } else {
    wx = 0;
    wy = 0;
  }

  for (i = 0; i < subs->num_rects; i++) {
    DVBSubtitleRect *srect = &subs->rects[i];
    GstBuffer *buf;
    gint w, h;
    guint8 *in_data;
    guint32 *palette, *data;
    gint rx, ry, rw, rh, stride;
    gint k, l;
    GstMapInfo map;

    GST_LOG_OBJECT (overlay, "rectangle %d: %dx%d @ (%d, %d)", i,
        srect->w, srect->h, srect->x, srect->y);

    w = srect->w;
    h = srect->h;

    buf = gst_buffer_new_and_alloc (w * h * 4);
    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    data = (guint32 *) map.data;
    in_data = srect->pict.data;
    palette = srect->pict.palette;
    stride = srect->pict.rowstride;
    for (k = 0; k < h; k++) {
      for (l = 0; l < w; l++) {
        guint32 ayuv;

        ayuv = palette[*in_data];
        GST_WRITE_UINT32_BE (data, ayuv);
        in_data++;
        data++;
      }
      in_data += stride - w;
    }
    gst_buffer_unmap (buf, &map);

    /* this is assuming the subtitle rectangle coordinates are relative
     * to the window (if there is one) within a display of specified dimension.
     * Coordinate wrt the latter is then scaled to the actual dimension of
     * the video we are dealing with here. */
    rx = gst_util_uint64_scale (wx + srect->x, width, dw);
    ry = gst_util_uint64_scale (wy + srect->y, height, dh);
    rw = gst_util_uint64_scale (srect->w, width, dw);
    rh = gst_util_uint64_scale (srect->h, height, dh);

    GST_LOG_OBJECT (overlay, "rectangle %d rendered: %dx%d @ (%d, %d)", i,
        rw, rh, rx, ry);

    gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_YUV, w, h);
    rect = gst_video_overlay_rectangle_new_raw (buf, rx, ry, rw, rh, 0);
    g_assert (rect);
    if (comp) {
      gst_video_overlay_composition_add_rectangle (comp, rect);
    } else {
      comp = gst_video_overlay_composition_new (rect);
    }
    gst_video_overlay_rectangle_unref (rect);
    gst_buffer_unref (buf);
  }

  return comp;
}
Exemple #9
0
static GstFlowReturn
gst_omx_buffer_pool_alloc_buffer (GstBufferPool * bpool,
    GstBuffer ** buffer, GstBufferPoolAcquireParams * params)
{
  GstOMXBufferPool *pool = GST_OMX_BUFFER_POOL (bpool);
  GstBuffer *buf;
  GstOMXBuffer *omx_buf;

  g_return_val_if_fail (pool->allocating, GST_FLOW_ERROR);

  omx_buf = g_ptr_array_index (pool->port->buffers, pool->current_buffer_index);
  g_return_val_if_fail (omx_buf != NULL, GST_FLOW_ERROR);

  if (pool->other_pool) {
    guint i, n;

    buf = g_ptr_array_index (pool->buffers, pool->current_buffer_index);
    g_assert (pool->other_pool == buf->pool);
    gst_object_replace ((GstObject **) & buf->pool, NULL);

    n = gst_buffer_n_memory (buf);
    for (i = 0; i < n; i++) {
      GstMemory *mem = gst_buffer_peek_memory (buf, i);

      /* FIXME: We don't allow sharing because we need to know
       * when the memory becomes unused and can only then put
       * it back to the pool. Which is done in the pool's release
       * function
       */
      GST_MINI_OBJECT_FLAG_SET (mem, GST_MEMORY_FLAG_NO_SHARE);
    }

    if (pool->add_videometa) {
      GstVideoMeta *meta;

      meta = gst_buffer_get_video_meta (buf);
      if (!meta) {
        gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE,
            GST_VIDEO_INFO_FORMAT (&pool->video_info),
            GST_VIDEO_INFO_WIDTH (&pool->video_info),
            GST_VIDEO_INFO_HEIGHT (&pool->video_info));
      }
    }

    pool->need_copy = FALSE;
  } else {
    GstMemory *mem;
    const guint nstride = pool->port->port_def.format.video.nStride;
    const guint nslice = pool->port->port_def.format.video.nSliceHeight;
    gsize offset[GST_VIDEO_MAX_PLANES] = { 0, };
    gint stride[GST_VIDEO_MAX_PLANES] = { nstride, 0, };

    mem = gst_omx_memory_allocator_alloc (pool->allocator, 0, omx_buf);
    buf = gst_buffer_new ();
    gst_buffer_append_memory (buf, mem);
    g_ptr_array_add (pool->buffers, buf);

    switch (GST_VIDEO_INFO_FORMAT (&pool->video_info)) {
      case GST_VIDEO_FORMAT_ABGR:
      case GST_VIDEO_FORMAT_ARGB:
      case GST_VIDEO_FORMAT_RGB16:
      case GST_VIDEO_FORMAT_BGR16:
      case GST_VIDEO_FORMAT_YUY2:
      case GST_VIDEO_FORMAT_UYVY:
      case GST_VIDEO_FORMAT_YVYU:
      case GST_VIDEO_FORMAT_GRAY8:
        break;
      case GST_VIDEO_FORMAT_I420:
        stride[1] = nstride / 2;
        offset[1] = offset[0] + stride[0] * nslice;
        stride[2] = nstride / 2;
        offset[2] = offset[1] + (stride[1] * nslice / 2);
        break;
      case GST_VIDEO_FORMAT_NV12:
      case GST_VIDEO_FORMAT_NV16:
        stride[1] = nstride;
        offset[1] = offset[0] + stride[0] * nslice;
        break;
      default:
        g_assert_not_reached ();
        break;
    }

    if (pool->add_videometa) {
      pool->need_copy = FALSE;
    } else {
      GstVideoInfo info;
      gboolean need_copy = FALSE;
      gint i;

      gst_video_info_init (&info);
      gst_video_info_set_format (&info,
          GST_VIDEO_INFO_FORMAT (&pool->video_info),
          GST_VIDEO_INFO_WIDTH (&pool->video_info),
          GST_VIDEO_INFO_HEIGHT (&pool->video_info));

      for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&pool->video_info); i++) {
        if (info.stride[i] != stride[i] || info.offset[i] != offset[i]) {
          need_copy = TRUE;
          break;
        }
      }

      pool->need_copy = need_copy;
    }

    if (pool->need_copy || pool->add_videometa) {
      /* We always add the videometa. It's the job of the user
       * to copy the buffer if pool->need_copy is TRUE
       */
      gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE,
          GST_VIDEO_INFO_FORMAT (&pool->video_info),
          GST_VIDEO_INFO_WIDTH (&pool->video_info),
          GST_VIDEO_INFO_HEIGHT (&pool->video_info),
          GST_VIDEO_INFO_N_PLANES (&pool->video_info), offset, stride);
    }
  }

  gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (buf),
      gst_omx_buffer_data_quark, omx_buf, NULL);

  *buffer = buf;

  pool->current_buffer_index++;

  return GST_FLOW_OK;
}