static gboolean
plugin_update_sinkpad_info_from_buffer (GstVaapiPluginBase * plugin,
    GstBuffer * buf)
{
  GstVideoInfo *const vip = &plugin->sinkpad_info;
  GstVideoMeta *vmeta;
  guint i;

  vmeta = gst_buffer_get_video_meta (buf);
  if (!vmeta)
    return TRUE;

  if (GST_VIDEO_INFO_FORMAT (vip) != vmeta->format ||
      GST_VIDEO_INFO_WIDTH (vip) != vmeta->width ||
      GST_VIDEO_INFO_HEIGHT (vip) != vmeta->height ||
      GST_VIDEO_INFO_N_PLANES (vip) != vmeta->n_planes)
    return FALSE;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vip); ++i) {
    GST_VIDEO_INFO_PLANE_OFFSET (vip, i) = vmeta->offset[i];
    GST_VIDEO_INFO_PLANE_STRIDE (vip, i) = vmeta->stride[i];
  }
  GST_VIDEO_INFO_SIZE (vip) = gst_buffer_get_size (buf);
  return TRUE;
}
Ejemplo n.º 2
0
static void gst_imx_ipu_blitter_init_dummy_black_buffer(GstImxIpuBlitter *ipu_blitter)
{
	GstVideoInfo video_info;

	gst_video_info_init(&video_info);
	gst_video_info_set_format(&video_info, GST_VIDEO_FORMAT_RGBx, 64, 64);

	ipu_blitter->dummy_black_buffer = gst_buffer_new_allocate(ipu_blitter->allocator, GST_VIDEO_INFO_SIZE(&video_info), NULL);
	gst_buffer_memset(ipu_blitter->dummy_black_buffer, 0, 0, GST_VIDEO_INFO_SIZE(&video_info));

	gst_buffer_add_video_meta_full(
		ipu_blitter->dummy_black_buffer,
		GST_VIDEO_FRAME_FLAG_NONE,
		GST_VIDEO_INFO_FORMAT(&video_info),
		GST_VIDEO_INFO_WIDTH(&video_info),
		GST_VIDEO_INFO_HEIGHT(&video_info),
		GST_VIDEO_INFO_N_PLANES(&video_info),
		&(GST_VIDEO_INFO_PLANE_OFFSET(&video_info, 0)),
		&(GST_VIDEO_INFO_PLANE_STRIDE(&video_info, 0))
	);

	{
		GstImxPhysMemory *imx_phys_mem_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->dummy_black_buffer, 0);
		GstImxPhysMemMeta *phys_mem_meta = (GstImxPhysMemMeta *)GST_IMX_PHYS_MEM_META_ADD(ipu_blitter->dummy_black_buffer);

		phys_mem_meta->phys_addr = imx_phys_mem_mem->phys_addr;
	}
}
Ejemplo n.º 3
0
static gboolean
gst_wl_shm_validate_video_info (const GstVideoInfo * vinfo)
{
  gint height = GST_VIDEO_INFO_HEIGHT (vinfo);
  gint base_stride = GST_VIDEO_INFO_PLANE_STRIDE (vinfo, 0);
  gsize base_offs = GST_VIDEO_INFO_PLANE_OFFSET (vinfo, 0);
  gint i;
  gsize offs = 0;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vinfo); i++) {
    guint32 estride;

    /* Overwrite the video info's stride and offset using the pitch calculcated
     * by the kms driver. */
    estride = gst_wl_shm_extrapolate_stride (vinfo->finfo, i, base_stride);

    if (estride != GST_VIDEO_INFO_PLANE_STRIDE (vinfo, i))
      return FALSE;

    if (GST_VIDEO_INFO_PLANE_OFFSET (vinfo, i) - base_offs != offs)
      return FALSE;

    /* Note that we cannot negotiate special padding betweem each planes,
     * hence using the display height here. */
    offs +=
        estride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vinfo->finfo, i, height);
  }

  if (vinfo->size < offs)
    return FALSE;

  return TRUE;
}
Ejemplo n.º 4
0
static GstFlowReturn
video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstVideoBufferPool *vpool = GST_VIDEO_BUFFER_POOL_CAST (pool);
  GstVideoBufferPoolPrivate *priv = vpool->priv;
  GstVideoInfo *info;

  info = &priv->info;

  GST_DEBUG_OBJECT (pool, "alloc %" G_GSIZE_FORMAT, info->size);

  *buffer =
      gst_buffer_new_allocate (priv->allocator, info->size, &priv->params);
  if (*buffer == NULL)
    goto no_memory;

  if (priv->add_videometa) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");

    gst_buffer_add_video_meta_full (*buffer, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info),
        GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
        GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride);
  }

  return GST_FLOW_OK;

  /* ERROR */
no_memory:
  {
    GST_WARNING_OBJECT (pool, "can't create memory");
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 5
0
static gboolean
_gst_gl_download_perform_with_data_unlocked (GstGLDownload * download,
    GLuint texture_id, gpointer data[GST_VIDEO_MAX_PLANES])
{
  guint i;

  g_return_val_if_fail (download != NULL, FALSE);
  g_return_val_if_fail (texture_id > 0, FALSE);
  g_return_val_if_fail (GST_VIDEO_INFO_FORMAT (&download->info) !=
      GST_VIDEO_FORMAT_UNKNOWN
      && GST_VIDEO_INFO_FORMAT (&download->info) != GST_VIDEO_FORMAT_ENCODED,
      FALSE);

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) {
    g_return_val_if_fail (data[i] != NULL, FALSE);
  }

  if (!download->priv->in_tex[0])
    download->priv->in_tex[0] =
        gst_gl_memory_wrapped_texture (download->context, texture_id,
        GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&download->info),
        GST_VIDEO_INFO_HEIGHT (&download->info), NULL, NULL);

  download->priv->in_tex[0]->tex_id = texture_id;

  return _do_download (download, texture_id, data);
}
Ejemplo n.º 6
0
static inline gsize
_get_plane_data_size (GstVideoInfo * info, guint plane)
{
  if (GST_VIDEO_INFO_N_PLANES (info) == plane + 1)
    return info->offset[0] + info->size - info->offset[plane];

  return info->offset[plane + 1] - info->offset[plane];
}
Ejemplo n.º 7
0
static gsize
_video_info_plane_size (GstVideoInfo * info, guint plane)
{
  if (GST_VIDEO_INFO_N_PLANES (info) == plane + 1)
    return info->offset[0] + info->size - info->offset[plane];

  return info->offset[plane + 1] - info->offset[plane];
}
Ejemplo n.º 8
0
static gboolean
gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse,
    GstRawBaseParseConfig config, GstBuffer * in_data,
    G_GNUC_UNUSED gsize total_num_in_bytes,
    G_GNUC_UNUSED gsize num_valid_in_bytes, GstBuffer ** processed_data)
{
  GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
  GstRawVideoParseConfig *config_ptr =
      gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
  guint frame_flags = 0;
  GstVideoInfo *video_info = &(config_ptr->info);
  GstVideoMeta *videometa;
  GstBuffer *out_data;

  /* In case of extra padding bytes, get a subbuffer without the padding bytes.
   * Otherwise, just add the video meta. */
  if (GST_VIDEO_INFO_SIZE (video_info) < config_ptr->frame_stride) {
    *processed_data = out_data =
        gst_buffer_copy_region (in_data,
        GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
        GST_BUFFER_COPY_MEMORY, 0, GST_VIDEO_INFO_SIZE (video_info));
  } else {
    out_data = in_data;
    *processed_data = NULL;
  }

  if (config_ptr->interlaced) {
    GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_INTERLACED);
    frame_flags |= GST_VIDEO_FRAME_FLAG_INTERLACED;

    if (config_ptr->top_field_first) {
      GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
      frame_flags |= GST_VIDEO_FRAME_FLAG_TFF;
    } else
      GST_BUFFER_FLAG_UNSET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
  }

  /* Remove any existing videometa - it will be replaced by the new videometa
   * from here */
  while ((videometa = gst_buffer_get_video_meta (out_data))) {
    GST_LOG_OBJECT (raw_base_parse, "removing existing videometa from buffer");
    gst_buffer_remove_meta (out_data, (GstMeta *) videometa);
  }

  gst_buffer_add_video_meta_full (out_data,
      frame_flags,
      config_ptr->format,
      config_ptr->width,
      config_ptr->height,
      GST_VIDEO_INFO_N_PLANES (video_info),
      config_ptr->plane_offsets, config_ptr->plane_strides);


  return TRUE;
}
static void
fill_video_alignment (GstVaapiVideoBufferPool * pool, GstVideoAlignment * align)
{
  GstVideoInfo *const vip = &pool->priv->vmeta_vinfo;
  guint i;

  gst_video_alignment_reset (align);
  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vip); i++)
    align->stride_align[i] =
        (1U << g_bit_nth_lsf (GST_VIDEO_INFO_PLANE_STRIDE (vip, i), 0)) - 1;
}
Ejemplo n.º 10
0
static void
fill_video_info (GstVideoInfo * vip, GstVideoFormat format, guint width,
                 guint height, gsize offset[GST_VIDEO_MAX_PLANES],
                 gint stride[GST_VIDEO_MAX_PLANES])
{
    guint i;

    gst_video_info_set_format (vip, format, width, height);
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vip); i++) {
        GST_VIDEO_INFO_PLANE_OFFSET (vip, i) = offset[i];
        GST_VIDEO_INFO_PLANE_STRIDE (vip, i) = stride[i];
    }
}
static gboolean
_upload_memory (GstGLUpload * upload)
{
    guint in_width, in_height;
    guint in_texture[GST_VIDEO_MAX_PLANES];
    GstBuffer *inbuf;
    GstVideoFrame out_frame;
    GstVideoInfo out_info;
    gint i;

    in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info);
    in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info);

    if (!upload->initted) {
        if (!_init_upload (upload)) {
            return FALSE;
        }
    }

    inbuf = gst_buffer_new ();
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) {
        in_texture[i] = upload->in_tex[i]->tex_id;
        gst_buffer_append_memory (inbuf,
                                  gst_memory_ref ((GstMemory *) upload->in_tex[i]));
    }

    GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u",
               in_texture[0], in_texture[1], in_texture[2], in_width, in_height);

    upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf);
    gst_buffer_unref (inbuf);

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width,
                               in_height);
    if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf,
                              GST_MAP_READ | GST_MAP_GL)) {
        gst_buffer_unref (upload->priv->outbuf);
        upload->priv->outbuf = NULL;
        return FALSE;
    }

    upload->out_tex->tex_id = *(guint *) out_frame.data[0];

    gst_video_frame_unmap (&out_frame);
    upload->priv->released = FALSE;

    return TRUE;
}
Ejemplo n.º 12
0
static gboolean
_do_download (GstGLDownload * download, guint texture_id,
    gpointer data[GST_VIDEO_MAX_PLANES])
{
  guint out_width, out_height;
  GstBuffer *inbuf, *outbuf;
  GstMapInfo map_info;
  gboolean ret = TRUE;
  gint i;

  out_width = GST_VIDEO_INFO_WIDTH (&download->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&download->info);

  if (!download->initted) {
    if (!_init_download (download))
      return FALSE;
  }

  GST_TRACE ("doing download of texture:%u (%ux%u)",
      download->priv->in_tex[0]->tex_id, out_width, out_height);

  inbuf = gst_buffer_new ();
  gst_buffer_append_memory (inbuf,
      gst_memory_ref ((GstMemory *) download->priv->in_tex[0]));

  outbuf = gst_gl_color_convert_perform (download->convert, inbuf);
  if (!outbuf)
    return FALSE;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) {
    GstMemory *out_mem = gst_buffer_peek_memory (outbuf, i);
    gpointer temp_data = ((GstGLMemory *) out_mem)->data;
    ((GstGLMemory *) out_mem)->data = data[i];

    if (!gst_memory_map (out_mem, &map_info, GST_MAP_READ)) {
      GST_ERROR_OBJECT (download, "Failed to map memory");
      ret = FALSE;
    }
    gst_memory_unmap (out_mem, &map_info);
    ((GstGLMemory *) out_mem)->data = temp_data;
  }

  gst_buffer_unref (inbuf);
  gst_buffer_unref (outbuf);

  return ret;
}
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
{
    WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
    if (!GST_IS_SAMPLE(m_sample.get()))
        return nullptr;

    GstCaps* caps = gst_sample_get_caps(m_sample.get());
    if (!caps)
        return nullptr;

    GstVideoInfo videoInfo;
    gst_video_info_init(&videoInfo);
    if (!gst_video_info_from_caps(&videoInfo, caps))
        return nullptr;

    IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
    RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
    GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());

#if GST_CHECK_VERSION(1, 1, 0)
    GstVideoGLTextureUploadMeta* meta;
    if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
        if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
            const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
            guint ids[4] = { textureGL->id(), 0, 0, 0 };

            if (gst_video_gl_texture_upload_meta_upload(meta, ids))
                return texture;
        }
    }
#endif

    // Right now the TextureMapper only supports chromas with one plane
    ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);

    GstVideoFrame videoFrame;
    if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
        return nullptr;

    int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
    const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
    texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
    gst_video_frame_unmap(&videoFrame);

    return texture;
}
/* This function handles GstXImageBuffer creation depending on XShm availability */
static GstFlowReturn
xvimage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstXvImageBufferPool *xvpool = GST_XVIMAGE_BUFFER_POOL_CAST (pool);
  GstVideoInfo *info;
  GstBuffer *xvimage;
  GstMemory *mem;
  GError *err = NULL;

  info = &xvpool->info;

  xvimage = gst_buffer_new ();

  mem = gst_xvimage_allocator_alloc (xvpool->allocator, xvpool->im_format,
      info, xvpool->padded_width, xvpool->padded_height, &xvpool->crop, &err);

  if (mem == NULL) {
    gst_buffer_unref (xvimage);
    goto no_buffer;
  }
  gst_buffer_append_memory (xvimage, mem);

  if (xvpool->add_metavideo) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
    gst_buffer_add_video_meta_full (xvimage, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
        GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
        info->offset, info->stride);
  }

  *buffer = xvimage;

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create image: %s", err->message);
    g_clear_error (&err);
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 15
0
/* This function handles GstXImageBuffer creation depending on XShm availability */
static GstFlowReturn
ximage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstXImageBufferPool *xpool = GST_XIMAGE_BUFFER_POOL_CAST (pool);
  GstXImageBufferPoolPrivate *priv = xpool->priv;
  GstVideoInfo *info;
  GstBuffer *ximage;
  GstMemory *mem;

  info = &priv->info;

  ximage = gst_buffer_new ();
  mem = ximage_memory_alloc (xpool);
  if (mem == NULL) {
    gst_buffer_unref (ximage);
    goto no_buffer;
  }
  gst_buffer_append_memory (ximage, mem);

  if (priv->add_metavideo) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
    /* these are just the defaults for now */
    gst_buffer_add_video_meta_full (ximage, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
        GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
        info->offset, info->stride);
  }
  *buffer = ximage;

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create image");
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 16
0
/* This function handles GstXImageBuffer creation depending on XShm availability */
static GstFlowReturn
xvimage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstXvImageBufferPool *xvpool = GST_XVIMAGE_BUFFER_POOL_CAST (pool);
  GstXvImageBufferPoolPrivate *priv = xvpool->priv;
  GstVideoInfo *info;
  GstBuffer *xvimage;
  GstXvImageMeta *meta;

  info = &priv->info;

  xvimage = gst_buffer_new ();
  meta = gst_buffer_add_xvimage_meta (xvimage, xvpool);
  if (meta == NULL) {
    gst_buffer_unref (xvimage);
    goto no_buffer;
  }

  if (priv->add_metavideo) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
    gst_buffer_add_video_meta_full (xvimage, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
        GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
        info->offset, info->stride);
  }

  *buffer = xvimage;

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create image");
    return GST_FLOW_ERROR;
  }
}
static gboolean
gst_video_info_update_from_image (GstVideoInfo * vip, GstVaapiImage * image)
{
  GstVideoFormat format;
  const guchar *data;
  guint i, num_planes, data_size, width, height;

  /* Reset format from image */
  format = gst_vaapi_image_get_format (image);
  gst_vaapi_image_get_size (image, &width, &height);
  gst_video_info_set_format (vip, format, width, height);

  num_planes = gst_vaapi_image_get_plane_count (image);
  g_return_val_if_fail (num_planes == GST_VIDEO_INFO_N_PLANES (vip), FALSE);

  /* Determine the base data pointer */
  data = get_image_data (image);
  g_return_val_if_fail (data != NULL, FALSE);
  data_size = gst_vaapi_image_get_data_size (image);

  /* Check that we don't have disjoint planes */
  for (i = 0; i < num_planes; i++) {
    const guchar *const plane = gst_vaapi_image_get_plane (image, i);
    if (plane - data > data_size)
      return FALSE;
  }

  /* Update GstVideoInfo structure */
  for (i = 0; i < num_planes; i++) {
    const guchar *const plane = gst_vaapi_image_get_plane (image, i);
    GST_VIDEO_INFO_PLANE_OFFSET (vip, i) = plane - data;
    GST_VIDEO_INFO_PLANE_STRIDE (vip, i) = gst_vaapi_image_get_pitch (image, i);
  }
  GST_VIDEO_INFO_SIZE (vip) = data_size;
  return TRUE;
}
Ejemplo n.º 18
0
static GstFlowReturn gst_vlc_video_pool_alloc_buffer( GstBufferPool *p_pool,
        GstBuffer **p_buffer, GstBufferPoolAcquireParams *p_params)
{
    GstVlcVideoPool *p_vpool = GST_VLC_VIDEO_POOL_CAST( p_pool );
    GstVideoInfo *p_info = &p_vpool->info;

    *p_buffer = gst_buffer_new( );

    if( !gst_vlc_picture_plane_allocator_alloc( p_vpool->p_allocator,
                *p_buffer ))
        return GST_FLOW_EOS;

    if( p_vpool->b_add_metavideo )
    {
        msg_Dbg( p_vpool->p_dec, "meta video enabled" );
        gst_buffer_add_video_meta_full( *p_buffer, GST_VIDEO_FRAME_FLAG_NONE,
                GST_VIDEO_INFO_FORMAT( p_info ), GST_VIDEO_INFO_WIDTH( p_info ),
                GST_VIDEO_INFO_HEIGHT( p_info ),
                GST_VIDEO_INFO_N_PLANES( p_info ),
                p_info->offset, p_info->stride );
    }

    return GST_FLOW_OK;
}
Ejemplo n.º 19
0
static GstBuffer *
gst_egl_allocate_eglimage (APP_STATE_T * ctx,
    GstAllocator * allocator, GstVideoFormat format, gint width, gint height)
{
  GstEGLGLESImageData *data = NULL;
  GstBuffer *buffer;
  GstVideoInfo info;
  gint i;
  gint stride[3];
  gsize offset[3];
  GstMemory *mem[3] = { NULL, NULL, NULL };
  guint n_mem;
  GstMemoryFlags flags = 0;

  memset (stride, 0, sizeof (stride));
  memset (offset, 0, sizeof (offset));

  if (!gst_egl_image_memory_is_mappable ())
    flags |= GST_MEMORY_FLAG_NOT_MAPPABLE;
  /* See https://bugzilla.gnome.org/show_bug.cgi?id=695203 */
  flags |= GST_MEMORY_FLAG_NO_SHARE;

  gst_video_info_set_format (&info, format, width, height);

  GST_DEBUG ("Allocating EGL Image format %s width %d height %d",
      gst_video_format_to_string (format), width, height);
  switch (format) {
    case GST_VIDEO_FORMAT_RGBA:{
      gsize size;
      EGLImageKHR image;

      mem[0] =
          gst_egl_image_allocator_alloc (allocator, ctx->gst_display,
          GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info),
          GST_VIDEO_INFO_HEIGHT (&info), &size);

      if (mem[0]) {
        stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info);
        n_mem = 1;
        GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE);
      } else {
        data = g_slice_new0 (GstEGLGLESImageData);

        stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4);
        size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info);

        glGenTextures (1, &data->texture);
        if (got_gl_error ("glGenTextures"))
          goto mem_error;

        glBindTexture (GL_TEXTURE_2D, data->texture);
        if (got_gl_error ("glBindTexture"))
          goto mem_error;

        /* Set 2D resizing params */
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

        /* If these are not set the texture image unit will return
         * * (R, G, B, A) = black on glTexImage2D for non-POT width/height
         * * frames. For a deeper explanation take a look at the OpenGL ES
         * * documentation for glTexParameter */
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        if (got_gl_error ("glTexParameteri"))
          goto mem_error;

        glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA,
            GST_VIDEO_INFO_WIDTH (&info),
            GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
        if (got_gl_error ("glTexImage2D"))
          goto mem_error;

        image =
            eglCreateImageKHR (gst_egl_display_get (ctx->gst_display),
            ctx->context, EGL_GL_TEXTURE_2D_KHR,
            (EGLClientBuffer) (guintptr) data->texture, NULL);
        if (got_egl_error ("eglCreateImageKHR"))
          goto mem_error;

        mem[0] =
            gst_egl_image_allocator_wrap (allocator, ctx->gst_display,
            image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, flags, size, data, NULL);

        n_mem = 1;
      }
    }
      break;
    default:
      goto mem_error;
      break;
  }

  buffer = gst_buffer_new ();
  gst_buffer_add_video_meta_full (buffer, 0, format, width, height,
      GST_VIDEO_INFO_N_PLANES (&info), offset, stride);

  /* n_mem could be reused for planar colorspaces, for now its == 1 for RGBA */
  for (i = 0; i < n_mem; i++)
    gst_buffer_append_memory (buffer, mem[i]);

  return buffer;
mem_error:
  {
    GST_ERROR ("Failed to create EGLImage");

    if (data)
      gst_egl_gles_image_data_free (data);

    if (mem[0])
      gst_memory_unref (mem[0]);

    return NULL;
  }

}
static gboolean
gst_vaapi_surface_create_from_buffer_proxy (GstVaapiSurface * surface,
    GstVaapiBufferProxy * proxy, const GstVideoInfo * vip)
{
#if VA_CHECK_VERSION (0,36,0)
  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (surface);
  GstVideoFormat format;
  VASurfaceID surface_id;
  VAStatus status;
  guint chroma_type, va_chroma_format;
  const VAImageFormat *va_format;
  VASurfaceAttrib attribs[2], *attrib;
  VASurfaceAttribExternalBuffers extbuf;
  unsigned long extbuf_handle;
  guint i, width, height;

  format = GST_VIDEO_INFO_FORMAT (vip);
  width = GST_VIDEO_INFO_WIDTH (vip);
  height = GST_VIDEO_INFO_HEIGHT (vip);

  gst_vaapi_buffer_proxy_replace (&surface->extbuf_proxy, proxy);

  va_format = gst_vaapi_video_format_to_va_format (format);
  if (!va_format)
    goto error_unsupported_format;

  chroma_type = gst_vaapi_video_format_get_chroma_type (format);
  if (!chroma_type)
    goto error_unsupported_format;

  va_chroma_format = from_GstVaapiChromaType (chroma_type);
  if (!va_chroma_format)
    goto error_unsupported_format;

  extbuf_handle = GST_VAAPI_BUFFER_PROXY_HANDLE (proxy);
  extbuf.pixel_format = va_format->fourcc;
  extbuf.width = width;
  extbuf.height = height;
  extbuf.data_size = GST_VAAPI_BUFFER_PROXY_SIZE (proxy);
  extbuf.num_planes = GST_VIDEO_INFO_N_PLANES (vip);
  for (i = 0; i < extbuf.num_planes; i++) {
    extbuf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (vip, i);
    extbuf.offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (vip, i);
  }
  extbuf.buffers = &extbuf_handle;
  extbuf.num_buffers = 1;
  extbuf.flags = 0;
  extbuf.private_data = NULL;

  attrib = attribs;
  attrib->type = VASurfaceAttribExternalBufferDescriptor;
  attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
  attrib->value.type = VAGenericValueTypePointer;
  attrib->value.value.p = &extbuf;
  attrib++;
  attrib->type = VASurfaceAttribMemoryType;
  attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
  attrib->value.type = VAGenericValueTypeInteger;
  attrib->value.value.i =
      from_GstVaapiBufferMemoryType (GST_VAAPI_BUFFER_PROXY_TYPE (proxy));
  attrib++;

  GST_VAAPI_DISPLAY_LOCK (display);
  status = vaCreateSurfaces (GST_VAAPI_DISPLAY_VADISPLAY (display),
      va_chroma_format, width, height, &surface_id, 1, attribs,
      attrib - attribs);
  GST_VAAPI_DISPLAY_UNLOCK (display);
  if (!vaapi_check_status (status, "vaCreateSurfaces()"))
    return FALSE;

  surface->format = format;
  surface->chroma_type = chroma_type;
  surface->width = width;
  surface->height = height;

  GST_DEBUG ("surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id));
  GST_VAAPI_OBJECT_ID (surface) = surface_id;
  return TRUE;

  /* ERRORS */
error_unsupported_format:
  GST_ERROR ("unsupported format %s",
      gst_vaapi_video_format_to_string (format));
  return FALSE;
#else
  return FALSE;
#endif
}
static gboolean
gst_vaapi_surface_create_full (GstVaapiSurface * surface,
    const GstVideoInfo * vip, guint flags)
{
#if VA_CHECK_VERSION(0,34,0)
  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (surface);
  const GstVideoFormat format = GST_VIDEO_INFO_FORMAT (vip);
  VASurfaceID surface_id;
  VAStatus status;
  guint chroma_type, va_chroma_format, i;
  const VAImageFormat *va_format;
  VASurfaceAttrib attribs[3], *attrib;
  VASurfaceAttribExternalBuffers extbuf;
  gboolean extbuf_needed = FALSE;

  va_format = gst_vaapi_video_format_to_va_format (format);
  if (!va_format)
    goto error_unsupported_format;

  chroma_type = gst_vaapi_video_format_get_chroma_type (format);
  if (!chroma_type)
    goto error_unsupported_format;

  va_chroma_format = from_GstVaapiChromaType (chroma_type);
  if (!va_chroma_format)
    goto error_unsupported_format;

  memset (&extbuf, 0, sizeof (extbuf));
  extbuf.pixel_format = va_format->fourcc;
  extbuf.width = GST_VIDEO_INFO_WIDTH (vip);
  extbuf.height = GST_VIDEO_INFO_HEIGHT (vip);
  extbuf_needed = ! !(flags & GST_VAAPI_SURFACE_ALLOC_FLAG_LINEAR_STORAGE);

  extbuf.num_planes = GST_VIDEO_INFO_N_PLANES (vip);
  if (flags & GST_VAAPI_SURFACE_ALLOC_FLAG_FIXED_STRIDES) {
    for (i = 0; i < extbuf.num_planes; i++)
      extbuf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (vip, i);
    extbuf_needed = TRUE;
  }
  if (flags & GST_VAAPI_SURFACE_ALLOC_FLAG_FIXED_OFFSETS) {
    for (i = 0; i < extbuf.num_planes; i++)
      extbuf.offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (vip, i);
    extbuf_needed = TRUE;
  }

  attrib = attribs;
  attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
  attrib->type = VASurfaceAttribPixelFormat;
  attrib->value.type = VAGenericValueTypeInteger;
  attrib->value.value.i = va_format->fourcc;
  attrib++;

  if (extbuf_needed) {
    attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
    attrib->type = VASurfaceAttribMemoryType;
    attrib->value.type = VAGenericValueTypeInteger;
    attrib->value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
    attrib++;

    attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
    attrib->type = VASurfaceAttribExternalBufferDescriptor;
    attrib->value.type = VAGenericValueTypePointer;
    attrib->value.value.p = &extbuf;
    attrib++;
  }

  GST_VAAPI_DISPLAY_LOCK (display);
  status = vaCreateSurfaces (GST_VAAPI_DISPLAY_VADISPLAY (display),
      va_chroma_format, extbuf.width, extbuf.height, &surface_id, 1,
      attribs, attrib - attribs);
  GST_VAAPI_DISPLAY_UNLOCK (display);
  if (!vaapi_check_status (status, "vaCreateSurfaces()"))
    return FALSE;

  surface->format = format;
  surface->chroma_type = chroma_type;
  surface->width = extbuf.width;
  surface->height = extbuf.height;

  GST_DEBUG ("surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id));
  GST_VAAPI_OBJECT_ID (surface) = surface_id;
  return TRUE;

  /* ERRORS */
error_unsupported_format:
  GST_ERROR ("unsupported format %s",
      gst_vaapi_video_format_to_string (format));
  return FALSE;
#else
  return FALSE;
#endif
}
Ejemplo n.º 22
0
static gboolean gst_imx_egl_viv_sink_gles2_renderer_fill_texture(GstImxEglVivSinkGLES2Renderer *renderer, GstBuffer *buffer)
{
	GstVideoMeta *video_meta;
	GstMapInfo map_info;
	guint num_extra_lines, stride[3], offset[3], is_phys_buf;
	GstImxPhysMemMeta *phys_mem_meta;
	GstVideoFormat fmt;
	GLenum gl_format;
	GLuint w, h, total_w, total_h;
	
	phys_mem_meta = NULL;
	fmt = renderer->video_info.finfo->format;

	gl_format = gst_imx_egl_viv_sink_gles2_renderer_get_viv_format(fmt);
	w = renderer->video_info.width;
	h = renderer->video_info.height;

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(buffer);
	is_phys_buf = (phys_mem_meta != NULL) && (phys_mem_meta->phys_addr != 0);

	/* Get the stride and number of extra lines */
	video_meta = gst_buffer_get_video_meta(buffer);
	if (video_meta != NULL)
	{
		for (guint i = 0; i < MIN(video_meta->n_planes, 3); ++i)
		{
			stride[i] = video_meta->stride[i];
			offset[i] = video_meta->offset[i];
		}
	}
	else
	{
		for (guint i = 0; i < MIN(GST_VIDEO_INFO_N_PLANES(&(renderer->video_info)), 3); ++i)
		{
			stride[i] = GST_VIDEO_INFO_PLANE_STRIDE(&(renderer->video_info), i);
			offset[i] = GST_VIDEO_INFO_PLANE_OFFSET(&(renderer->video_info), i);
		}
	}

	num_extra_lines = is_phys_buf ? (phys_mem_meta->padding / stride[0]) : 0;

	/* stride is in bytes, we need pixels */
	total_w = stride[0] / gst_imx_egl_viv_sink_gles2_renderer_bpp(fmt);
	total_h = h + num_extra_lines;

	GST_LOG("w/h: %d/%d total_w/h: %d/%d", w, h, total_w, total_h);

	glUniform2f(renderer->uv_scale_uloc, (float)w / (float)total_w, (float)h / (float)total_h);

	/* Only update texture if the video frame actually changed */
	if ((renderer->viv_planes[0] == NULL) || (renderer->video_info_updated))
	{
		GST_LOG("video frame did change");

		if (is_phys_buf)
		{
			GLvoid *virt_addr;
			GLuint phys_addr;

			phys_addr = (GLuint)(phys_mem_meta->phys_addr);

			GST_LOG("mapping physical address 0x%x of video frame in buffer %p into VIV texture", phys_addr, (gpointer)buffer);

			gst_buffer_map(buffer, &map_info, GST_MAP_READ);
			virt_addr = map_info.data;

			/* Just set to make sure the == NULL check above is false */
			renderer->viv_planes[0] = virt_addr;

			glTexDirectVIVMap(
				GL_TEXTURE_2D,
				total_w, total_h,
				gl_format,
				(GLvoid **)(&virt_addr), &phys_addr
			);

			gst_buffer_unmap(buffer, &map_info);
			GST_LOG("done showing frame in buffer %p with virtual address %p physical address 0x%x", (gpointer)buffer, virt_addr, phys_addr);

			if (!gst_imx_egl_viv_sink_gles2_renderer_check_gl_error("render", "glTexDirectVIVMap"))
				return FALSE;
		}
		else
		{
			glTexDirectVIV(
				GL_TEXTURE_2D,
				total_w, total_h,
				gl_format,
				(GLvoid **) &(renderer->viv_planes)
			);
			if (!gst_imx_egl_viv_sink_gles2_renderer_check_gl_error("render", "glTexDirectVIV"))
				return FALSE;

			GST_LOG("copying pixels into VIV direct texture buffer");

			gst_buffer_map(buffer, &map_info, GST_MAP_READ);
			switch (fmt)
			{
				case GST_VIDEO_FORMAT_I420:
				case GST_VIDEO_FORMAT_YV12:
					memcpy(renderer->viv_planes[0], map_info.data + offset[0], stride[0] * total_h);
					memcpy(renderer->viv_planes[1], map_info.data + offset[1], stride[1] * total_h / 2);
					memcpy(renderer->viv_planes[2], map_info.data + offset[2], stride[2] * total_h / 2);
					break;
				case GST_VIDEO_FORMAT_NV12:
				case GST_VIDEO_FORMAT_NV21:
					memcpy(renderer->viv_planes[0], map_info.data + offset[0], stride[0] * total_h);
					memcpy(renderer->viv_planes[1], map_info.data + offset[1], stride[1] * total_h / 2);
					break;
				default:
					memcpy(renderer->viv_planes[0], map_info.data, stride[0] * total_h);
			}
			gst_buffer_unmap(buffer, &map_info);

		}

		glTexDirectInvalidateVIV(GL_TEXTURE_2D);
		if (!gst_imx_egl_viv_sink_gles2_renderer_check_gl_error("render", "glTexDirectInvalidateVIV"))
			return FALSE;

		renderer->video_info_updated = FALSE;
	}
	else
	{
		GST_LOG("video frame did not change - not doing anything");
	}

	return TRUE;
}
Ejemplo n.º 23
0
static GstFlowReturn
gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
  GstBuffer *newbuf;
  GstV4l2Meta *meta;
  GstV4l2Object *obj;
  GstVideoInfo *info;
  guint index;

  obj = pool->obj;
  info = &obj->info;

  switch (obj->mode) {
    case GST_V4L2_IO_RW:
    {
      newbuf =
          gst_buffer_new_allocate (pool->allocator, pool->size, &pool->params);
      break;
    }
    case GST_V4L2_IO_MMAP:
    {
      newbuf = gst_buffer_new ();
      meta = GST_V4L2_META_ADD (newbuf);

      index = pool->num_allocated;

      GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf);

      meta->vbuffer.index = index;
      meta->vbuffer.type = obj->type;
      meta->vbuffer.memory = V4L2_MEMORY_MMAP;

      if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
        goto querybuf_failed;

      GST_LOG_OBJECT (pool, "  index:     %u", meta->vbuffer.index);
      GST_LOG_OBJECT (pool, "  type:      %d", meta->vbuffer.type);
      GST_LOG_OBJECT (pool, "  bytesused: %u", meta->vbuffer.bytesused);
      GST_LOG_OBJECT (pool, "  flags:     %08x", meta->vbuffer.flags);
      GST_LOG_OBJECT (pool, "  field:     %d", meta->vbuffer.field);
      GST_LOG_OBJECT (pool, "  memory:    %d", meta->vbuffer.memory);
      if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
        GST_LOG_OBJECT (pool, "  MMAP offset:  %u", meta->vbuffer.m.offset);
      GST_LOG_OBJECT (pool, "  length:    %u", meta->vbuffer.length);
      GST_LOG_OBJECT (pool, "  input:     %u", meta->vbuffer.input);

      meta->mem = v4l2_mmap (0, meta->vbuffer.length,
          PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
          meta->vbuffer.m.offset);
      if (meta->mem == MAP_FAILED)
        goto mmap_failed;

      gst_buffer_append_memory (newbuf,
          gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
              meta->mem, meta->vbuffer.length, 0, meta->vbuffer.length, NULL,
              NULL));

      /* add metadata to raw video buffers */
      if (pool->add_videometa && info->finfo) {
        gsize offset[GST_VIDEO_MAX_PLANES];
        gint stride[GST_VIDEO_MAX_PLANES];

        offset[0] = 0;
        stride[0] = obj->bytesperline;

        GST_DEBUG_OBJECT (pool, "adding video meta, stride %d", stride[0]);
        gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE,
            GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
            GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
            offset, stride);
      }
      break;
    }
    case GST_V4L2_IO_USERPTR:
    default:
      g_assert_not_reached ();
      break;
  }

  pool->num_allocated++;

  *buffer = newbuf;

  return GST_FLOW_OK;

  /* ERRORS */
querybuf_failed:
  {
    gint errnosave = errno;

    GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
    gst_buffer_unref (newbuf);
    errno = errnosave;
    return GST_FLOW_ERROR;
  }
mmap_failed:
  {
    gint errnosave = errno;

    GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
    gst_buffer_unref (newbuf);
    errno = errnosave;
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 24
0
static gboolean
gst_gl_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config)
{
  GstGLBufferPool *glpool = GST_GL_BUFFER_POOL_CAST (pool);
  GstGLBufferPoolPrivate *priv = glpool->priv;
  GstVideoInfo info;
  GstCaps *caps = NULL;
  guint min_buffers, max_buffers;
  GstAllocator *allocator = NULL;
  GstAllocationParams alloc_params;
  gboolean reset = TRUE;
  gint p;

  if (!gst_buffer_pool_config_get_params (config, &caps, NULL, &min_buffers,
          &max_buffers))
    goto wrong_config;

  if (caps == NULL)
    goto no_caps;

  /* now parse the caps from the config */
  if (!gst_video_info_from_caps (&info, caps))
    goto wrong_caps;

  GST_LOG_OBJECT (pool, "%dx%d, caps %" GST_PTR_FORMAT, info.width, info.height,
      caps);

  if (!gst_buffer_pool_config_get_allocator (config, &allocator, &alloc_params))
    goto wrong_config;

  if (priv->allocator)
    gst_object_unref (priv->allocator);

  if (!allocator) {
    gst_gl_memory_init ();
    priv->allocator = gst_allocator_find (GST_GL_MEMORY_ALLOCATOR);
  } else {
    priv->allocator = gst_object_ref (allocator);
  }

  priv->params = alloc_params;

  priv->im_format = GST_VIDEO_INFO_FORMAT (&info);
  if (priv->im_format == -1)
    goto unknown_format;

  if (priv->caps)
    reset = !gst_caps_is_equal (priv->caps, caps);

  gst_caps_replace (&priv->caps, caps);
  priv->info = info;

  priv->add_videometa = gst_buffer_pool_config_has_option (config,
      GST_BUFFER_POOL_OPTION_VIDEO_META);
  priv->add_uploadmeta = gst_buffer_pool_config_has_option (config,
      GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META);
  priv->add_glsyncmeta = gst_buffer_pool_config_has_option (config,
      GST_BUFFER_POOL_OPTION_GL_SYNC_META);

#if GST_GL_HAVE_PLATFORM_EGL
  g_assert (priv->allocator != NULL);
  priv->want_eglimage =
      (g_strcmp0 (priv->allocator->mem_type, GST_EGL_IMAGE_MEMORY_TYPE) == 0);
#else
  priv->want_eglimage = FALSE;
#endif

  if (gst_buffer_pool_config_has_option (config,
          GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) {

    priv->add_videometa = TRUE;

    gst_buffer_pool_config_get_video_alignment (config, &priv->valign);
    gst_video_info_align (&priv->info, &priv->valign);

    gst_buffer_pool_config_set_video_alignment (config, &priv->valign);
  } else {
    gst_video_alignment_reset (&priv->valign);
  }

  if (reset) {
    if (glpool->upload)
      gst_object_unref (glpool->upload);

    glpool->upload = gst_gl_upload_meta_new (glpool->context);
  }

  /* Recalulate the size as we don't add padding between planes. */
  priv->info.size = 0;
  for (p = 0; p < GST_VIDEO_INFO_N_PLANES (&priv->info); p++) {
    priv->info.size +=
        gst_gl_get_plane_data_size (&priv->info, &priv->valign, p);
  }

  gst_buffer_pool_config_set_params (config, caps, priv->info.size,
      min_buffers, max_buffers);

  return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config);

  /* ERRORS */
wrong_config:
  {
    GST_WARNING_OBJECT (pool, "invalid config");
    return FALSE;
  }
no_caps:
  {
    GST_WARNING_OBJECT (pool, "no caps in config");
    return FALSE;
  }
wrong_caps:
  {
    GST_WARNING_OBJECT (pool,
        "failed getting geometry from caps %" GST_PTR_FORMAT, caps);
    return FALSE;
  }
unknown_format:
  {
    GST_WARNING_OBJECT (glpool, "failed to get format from caps %"
        GST_PTR_FORMAT, caps);
    GST_ELEMENT_ERROR (glpool, RESOURCE, WRITE,
        ("Failed to create output image buffer of %dx%d pixels",
            priv->info.width, priv->info.height),
        ("Invalid input caps %" GST_PTR_FORMAT, caps));
    return FALSE;
  }
}
Ejemplo n.º 25
0
static void
check_conversion (TestFrame * frames, guint size)
{
  gint i, j, k, l;
  gint ref_count = 0;

  for (i = 0; i < size; i++) {
    GstBuffer *inbuf;
    GstVideoInfo in_info;
    gint in_width = frames[i].width;
    gint in_height = frames[i].height;
    GstVideoFormat in_v_format = frames[i].v_format;
    gchar *in_data[GST_VIDEO_MAX_PLANES] = { 0 };
    GstGLMemory *in_mem[GST_VIDEO_MAX_PLANES] = { 0 };
    GstVideoFrame in_frame;
    GstCaps *in_caps;

    gst_video_info_set_format (&in_info, in_v_format, in_width, in_height);
    in_caps = gst_video_info_to_caps (&in_info);
    gst_caps_set_features (in_caps, 0,
        gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      in_data[j] = frames[i].data[j];
    }

    /* create GL buffer */
    ref_count += GST_VIDEO_INFO_N_PLANES (&in_info);
    inbuf = gst_buffer_new ();
    fail_unless (gst_gl_memory_setup_wrapped (context, GST_GL_TEXTURE_TARGET_2D,
            &in_info, NULL, (gpointer *) in_data, in_mem, &ref_count,
            _frame_unref));

    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      gst_buffer_append_memory (inbuf, (GstMemory *) in_mem[j]);
    }

    fail_unless (gst_video_frame_map (&in_frame, &in_info, inbuf,
            GST_MAP_READ));

    /* sanity check that the correct values were wrapped */
    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      for (k = 0; k < _video_info_plane_size (&in_info, j); k++) {
        if (in_data[j][k] != IGNORE_MAGIC)
          fail_unless (((gchar *) in_frame.data[j])[k] == in_data[j][k]);
      }
    }

    for (j = 0; j < size; j++) {
      GstBuffer *outbuf;
      GstVideoInfo out_info;
      gint out_width = frames[j].width;
      gint out_height = frames[j].height;
      GstVideoFormat out_v_format = frames[j].v_format;
      gchar *out_data[GST_VIDEO_MAX_PLANES] = { 0 };
      GstVideoFrame out_frame;
      GstCaps *out_caps;

      gst_video_info_set_format (&out_info, out_v_format, out_width,
          out_height);
      out_caps = gst_video_info_to_caps (&out_info);
      gst_caps_set_features (out_caps, 0,
          gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

      for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) {
        out_data[k] = frames[j].data[k];
      }

      gst_gl_color_convert_set_caps (convert, in_caps, out_caps);

      /* convert the data */
      outbuf = gst_gl_color_convert_perform (convert, inbuf);
      if (outbuf == NULL) {
        const gchar *in_str = gst_video_format_to_string (in_v_format);
        const gchar *out_str = gst_video_format_to_string (out_v_format);
        GST_WARNING ("failed to convert from %s to %s", in_str, out_str);
      }

      fail_unless (gst_video_frame_map (&out_frame, &out_info, outbuf,
              GST_MAP_READ));

      /* check that the converted values are correct */
      for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) {
        for (l = 0; l < _video_info_plane_size (&out_info, k); l++) {
          gchar out_pixel = ((gchar *) out_frame.data[k])[l];
          if (out_data[k][l] != IGNORE_MAGIC && out_pixel != IGNORE_MAGIC)
            fail_unless (out_pixel == out_data[k][l]);
          /* FIXME: check alpha clobbering */
        }
      }

      gst_caps_unref (out_caps);
      gst_video_frame_unmap (&out_frame);
      gst_buffer_unref (outbuf);
    }

    gst_caps_unref (in_caps);
    gst_video_frame_unmap (&in_frame);
    gst_buffer_unref (inbuf);

    fail_unless_equals_int (ref_count, 0);
  }
}
Ejemplo n.º 26
0
/* called with the object lock held */
static gboolean
gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);
  GstBuffer *converted_buffer, *inbuf;
  GstVideoInfo *out_info = &vagg->info;
#ifndef G_DISABLE_ASSERT
  gint n;
#endif
  gint v, views;
  gint valid_views = 0;
  GList *walk;

  inbuf = gst_buffer_new ();
  walk = GST_ELEMENT (mixer)->sinkpads;
  while (walk) {
    GstGLStereoMixPad *pad = walk->data;
    GstMemory *in_mem;

    GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);

    if (!pad || !pad->current_buffer) {
      GST_DEBUG ("skipping texture, null frame");
      walk = g_list_next (walk);
      continue;
    }

    in_mem = gst_buffer_get_memory (pad->current_buffer, 0);

    GST_LOG_OBJECT (mixer,
        "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);
    /* Appending the memory to a 2nd buffer locks it
     * exclusive a 2nd time, which will mark it for
     * copy-on-write. The ref will keep the memory
     * alive but we add a parent_buffer_meta to also
     * prevent the input buffer from returning to any buffer
     * pool it might belong to
     */
    gst_buffer_append_memory (inbuf, in_mem);
    /* Use parent buffer meta to keep input buffer alive */
    gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);

    valid_views++;
    walk = g_list_next (walk);
  }

  if (mixer->mix_info.views != valid_views) {
    GST_WARNING_OBJECT (mixer, "Not enough input views to process");
    return FALSE;
  }

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_SEPARATED)
    views = out_info->views;
  else
    views = 1;

  if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,
          FALSE, inbuf) != GST_FLOW_OK)
    return FALSE;

  /* Clear any existing buffers, just in case */
  gst_buffer_replace (&mixer->primary_out, NULL);
  gst_buffer_replace (&mixer->auxilliary_out, NULL);

  if (gst_gl_view_convert_get_output (mixer->viewconvert,
          &mixer->primary_out) != GST_FLOW_OK)
    return FALSE;

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
    if (gst_gl_view_convert_get_output (mixer->viewconvert,
            &mixer->auxilliary_out) != GST_FLOW_OK)
      return FALSE;
  }

  if (mixer->primary_out == NULL)
    return FALSE;

  converted_buffer = mixer->primary_out;

#ifndef G_DISABLE_ASSERT
  n = gst_buffer_n_memory (converted_buffer);
  g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);
#endif

  for (v = 0; v < views; v++) {
    gst_buffer_add_video_meta_full (converted_buffer, v,
        GST_VIDEO_INFO_FORMAT (out_info),
        GST_VIDEO_INFO_WIDTH (out_info),
        GST_VIDEO_INFO_HEIGHT (out_info),
        GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);
    if (mixer->auxilliary_out) {
      gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,
          GST_VIDEO_INFO_FORMAT (out_info),
          GST_VIDEO_INFO_WIDTH (out_info),
          GST_VIDEO_INFO_HEIGHT (out_info),
          GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,
          out_info->stride);
    }
  }

  return TRUE;
}
Ejemplo n.º 27
0
/**
 * gst_video_info_align:
 * @info: a #GstVideoInfo
 * @align: alignment parameters
 *
 * Adjust the offset and stride fields in @info so that the padding and
 * stride alignment in @align is respected.
 *
 * Extra padding will be added to the right side when stride alignment padding
 * is required and @align will be updated with the new padding values.
 */
void
gst_video_info_align (GstVideoInfo * info, GstVideoAlignment * align)
{
  const GstVideoFormatInfo *vinfo = info->finfo;
  gint width, height;
  gint padded_width, padded_height;
  gint i, n_planes;
  gboolean aligned;

  width = GST_VIDEO_INFO_WIDTH (info);
  height = GST_VIDEO_INFO_HEIGHT (info);

  GST_LOG ("padding %u-%ux%u-%u", align->padding_top,
      align->padding_left, align->padding_right, align->padding_bottom);

  n_planes = GST_VIDEO_INFO_N_PLANES (info);

  if (GST_VIDEO_FORMAT_INFO_HAS_PALETTE (vinfo))
    n_planes--;

  /* first make sure the left padding does not cause alignment problems later */
  do {
    GST_LOG ("left padding %u", align->padding_left);
    aligned = TRUE;
    for (i = 0; i < n_planes; i++) {
      gint hedge;

      /* this is the amout of pixels to add as left padding */
      hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, i, align->padding_left);
      hedge *= GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, i);

      GST_LOG ("plane %d, padding %d, alignment %u", i, hedge,
          align->stride_align[i]);
      aligned &= (hedge & align->stride_align[i]) == 0;
    }
    if (aligned)
      break;

    GST_LOG ("unaligned padding, increasing padding");
    /* increase padded_width */
    align->padding_left += align->padding_left & ~(align->padding_left - 1);
  } while (!aligned);

  /* add the padding */
  padded_width = width + align->padding_left + align->padding_right;
  padded_height = height + align->padding_top + align->padding_bottom;

  do {
    GST_LOG ("padded dimension %u-%u", padded_width, padded_height);

    info->width = padded_width;
    info->height = padded_height;
    fill_planes (info);

    /* check alignment */
    aligned = TRUE;
    for (i = 0; i < n_planes; i++) {
      GST_LOG ("plane %d, stride %d, alignment %u", i, info->stride[i],
          align->stride_align[i]);
      aligned &= (info->stride[i] & align->stride_align[i]) == 0;
    }
    if (aligned)
      break;

    GST_LOG ("unaligned strides, increasing dimension");
    /* increase padded_width */
    padded_width += padded_width & ~(padded_width - 1);
  } while (!aligned);

  align->padding_right = padded_width - width - align->padding_left;

  info->width = width;
  info->height = height;

  for (i = 0; i < n_planes; i++) {
    gint vedge, hedge, comp;

    /* Find the component for this plane, FIXME, we assume the plane number and
     * component number is the same for now, for scaling the dimensions this is
     * currently true for all formats but it might not be when adding new
     * formats. We might need to add a plane subsamling in the format info to
     * make this more generic or maybe use a plane -> component mapping. */
    comp = i;

    hedge =
        GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, comp, align->padding_left);
    vedge =
        GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vinfo, comp, align->padding_top);

    GST_DEBUG ("plane %d: comp: %d, hedge %d vedge %d align %d stride %d", i,
        comp, hedge, vedge, align->stride_align[i], info->stride[i]);

    info->offset[i] += (vedge * info->stride[i]) +
        (hedge * GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, comp));
  }
}
Ejemplo n.º 28
0
static gboolean
gst_gl_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config)
{
  GstGLBufferPool *glpool = GST_GL_BUFFER_POOL_CAST (pool);
  GstGLBufferPoolPrivate *priv = glpool->priv;
  GstVideoInfo info;
  GstCaps *caps = NULL;
  guint min_buffers, max_buffers;
  guint max_align, n;
  GstAllocator *allocator = NULL;
  GstAllocationParams alloc_params;
  GstGLTextureTarget tex_target;
  gboolean ret = TRUE;
  gint p;

  if (!gst_buffer_pool_config_get_params (config, &caps, NULL, &min_buffers,
          &max_buffers))
    goto wrong_config;

  if (caps == NULL)
    goto no_caps;

  /* now parse the caps from the config */
  if (!gst_video_info_from_caps (&info, caps))
    goto wrong_caps;

  GST_LOG_OBJECT (pool, "%dx%d, caps %" GST_PTR_FORMAT, info.width, info.height,
      caps);

  if (!gst_buffer_pool_config_get_allocator (config, &allocator, &alloc_params))
    goto wrong_config;

  gst_caps_replace (&priv->caps, caps);

  if (priv->allocator)
    gst_object_unref (priv->allocator);

  if (allocator) {
    if (!GST_IS_GL_MEMORY_ALLOCATOR (allocator)) {
      gst_object_unref (allocator);
      goto wrong_allocator;
    } else {
      priv->allocator = gst_object_ref (allocator);
    }
  } else {
    priv->allocator =
        GST_ALLOCATOR (gst_gl_memory_allocator_get_default (glpool->context));
    g_assert (priv->allocator);
  }

  priv->add_videometa = gst_buffer_pool_config_has_option (config,
      GST_BUFFER_POOL_OPTION_VIDEO_META);
  priv->add_glsyncmeta = gst_buffer_pool_config_has_option (config,
      GST_BUFFER_POOL_OPTION_GL_SYNC_META);

  if (priv->gl_params)
    gst_gl_allocation_params_free ((GstGLAllocationParams *) priv->gl_params);
  priv->gl_params = (GstGLVideoAllocationParams *)
      gst_buffer_pool_config_get_gl_allocation_params (config);
  if (!priv->gl_params)
    priv->gl_params = gst_gl_video_allocation_params_new (glpool->context,
        &alloc_params, &info, -1, NULL, 0, 0);

  max_align = alloc_params.align;

  if (gst_buffer_pool_config_has_option (config,
          GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) {
    priv->add_videometa = TRUE;

    gst_buffer_pool_config_get_video_alignment (config,
        priv->gl_params->valign);

    for (n = 0; n < GST_VIDEO_MAX_PLANES; ++n)
      max_align |= priv->gl_params->valign->stride_align[n];

    for (n = 0; n < GST_VIDEO_MAX_PLANES; ++n)
      priv->gl_params->valign->stride_align[n] = max_align;

    gst_video_info_align (priv->gl_params->v_info, priv->gl_params->valign);

    gst_buffer_pool_config_set_video_alignment (config,
        priv->gl_params->valign);
  }

  if (alloc_params.align < max_align) {
    GST_WARNING_OBJECT (pool, "allocation params alignment %u is smaller "
        "than the max specified video stride alignment %u, fixing",
        (guint) alloc_params.align, max_align);

    alloc_params.align = max_align;
    gst_buffer_pool_config_set_allocator (config, allocator, &alloc_params);
    if (priv->gl_params->parent.alloc_params)
      gst_allocation_params_free (priv->gl_params->parent.alloc_params);
    priv->gl_params->parent.alloc_params =
        gst_allocation_params_copy (&alloc_params);
  }

  {
    GstStructure *s = gst_caps_get_structure (caps, 0);
    const gchar *target_str = gst_structure_get_string (s, "texture-target");
    gboolean multiple_texture_targets = FALSE;

    tex_target = priv->gl_params->target;
    if (target_str)
      tex_target = gst_gl_texture_target_from_string (target_str);

    if (gst_buffer_pool_config_has_option (config,
            GST_BUFFER_POOL_OPTION_GL_TEXTURE_TARGET_2D)) {
      if (tex_target && tex_target != GST_GL_TEXTURE_TARGET_2D)
        multiple_texture_targets = TRUE;
      tex_target = GST_GL_TEXTURE_TARGET_2D;
    }
    if (gst_buffer_pool_config_has_option (config,
            GST_BUFFER_POOL_OPTION_GL_TEXTURE_TARGET_RECTANGLE)) {
      if (tex_target && tex_target != GST_GL_TEXTURE_TARGET_RECTANGLE)
        multiple_texture_targets = TRUE;
      tex_target = GST_GL_TEXTURE_TARGET_RECTANGLE;
    }
    if (gst_buffer_pool_config_has_option (config,
            GST_BUFFER_POOL_OPTION_GL_TEXTURE_TARGET_EXTERNAL_OES)) {
      if (tex_target && tex_target != GST_GL_TEXTURE_TARGET_EXTERNAL_OES)
        multiple_texture_targets = TRUE;
      tex_target = GST_GL_TEXTURE_TARGET_EXTERNAL_OES;
    }

    if (!tex_target)
      tex_target = GST_GL_TEXTURE_TARGET_2D;

    if (multiple_texture_targets) {
      GST_WARNING_OBJECT (pool, "Multiple texture targets configured either "
          "through caps or buffer pool options");
      ret = FALSE;
    }

    priv->gl_params->target = tex_target;
  }

  /* Recalulate the size and offset as we don't add padding between planes. */
  priv->gl_params->v_info->size = 0;
  for (p = 0; p < GST_VIDEO_INFO_N_PLANES (priv->gl_params->v_info); p++) {
    priv->gl_params->v_info->offset[p] = priv->gl_params->v_info->size;
    priv->gl_params->v_info->size +=
        gst_gl_get_plane_data_size (priv->gl_params->v_info,
        priv->gl_params->valign, p);
  }

  gst_buffer_pool_config_set_params (config, caps,
      priv->gl_params->v_info->size, min_buffers, max_buffers);

  return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config) && ret;

  /* ERRORS */
wrong_config:
  {
    GST_WARNING_OBJECT (pool, "invalid config");
    return FALSE;
  }
no_caps:
  {
    GST_WARNING_OBJECT (pool, "no caps in config");
    return FALSE;
  }
wrong_caps:
  {
    GST_WARNING_OBJECT (pool,
        "failed getting geometry from caps %" GST_PTR_FORMAT, caps);
    return FALSE;
  }
wrong_allocator:
  {
    GST_WARNING_OBJECT (pool, "Incorrect allocator type for this pool");
    return FALSE;
  }
}
Ejemplo n.º 29
0
static gboolean
import_dmabuf_to_msdk_surface (GstMsdkVPP * thiz, GstBuffer * buf,
    MsdkSurface * msdk_surface)
{
  GstMemory *mem = NULL;
  GstVideoInfo vinfo;
  GstVideoMeta *vmeta;
  GstMsdkMemoryID *msdk_mid = NULL;
  mfxFrameSurface1 *mfx_surface = NULL;
  gint fd, i;

  mem = gst_buffer_peek_memory (buf, 0);
  fd = gst_dmabuf_memory_get_fd (mem);
  if (fd < 0)
    return FALSE;

  vinfo = thiz->sinkpad_info;

  /* Update offset/stride/size if there is VideoMeta attached to
   * the buffer */
  vmeta = gst_buffer_get_video_meta (buf);
  if (vmeta) {
    if (GST_VIDEO_INFO_FORMAT (&vinfo) != vmeta->format ||
        GST_VIDEO_INFO_WIDTH (&vinfo) != vmeta->width ||
        GST_VIDEO_INFO_HEIGHT (&vinfo) != vmeta->height ||
        GST_VIDEO_INFO_N_PLANES (&vinfo) != vmeta->n_planes) {
      GST_ERROR_OBJECT (thiz, "VideoMeta attached to buffer is not matching"
          "the negotiated width/height/format");
      return FALSE;
    }
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&vinfo); ++i) {
      GST_VIDEO_INFO_PLANE_OFFSET (&vinfo, i) = vmeta->offset[i];
      GST_VIDEO_INFO_PLANE_STRIDE (&vinfo, i) = vmeta->stride[i];
    }
    GST_VIDEO_INFO_SIZE (&vinfo) = gst_buffer_get_size (buf);
  }

  /* Upstream neither accepted the msdk pool nor the msdk buffer size restrictions.
   * Current media-driver and GMMLib will fail due to strict memory size restrictions.
   * Ideally, media-driver should accept what ever memory coming from other drivers
   * in case of dmabuf-import and this is how the intel-vaapi-driver works.
   * For now, in order to avoid any crash we check the buffer size and fallback
   * to copy frame method.
   *
   * See this: https://github.com/intel/media-driver/issues/169
   * */
  if (GST_VIDEO_INFO_SIZE (&vinfo) <
      GST_VIDEO_INFO_SIZE (&thiz->sinkpad_buffer_pool_info))
    return FALSE;

  mfx_surface = msdk_surface->surface;
  msdk_mid = (GstMsdkMemoryID *) mfx_surface->Data.MemId;

  /* release the internal memory storage of associated mfxSurface */
  gst_msdk_replace_mfx_memid (thiz->context, mfx_surface, VA_INVALID_ID);

  /* export dmabuf to vasurface */
  if (!gst_msdk_export_dmabuf_to_vasurface (thiz->context, &vinfo, fd,
          msdk_mid->surface))
    return FALSE;

  return TRUE;
}
Ejemplo n.º 30
0
static GstFlowReturn gst_imx_v4l2_buffer_pool_alloc_buffer(GstBufferPool *bpool, GstBuffer **buffer, G_GNUC_UNUSED GstBufferPoolAcquireParams *params)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	GstBuffer *buf;
	GstImxV4l2Meta *meta;
	GstImxPhysMemMeta *phys_mem_meta;
	GstVideoInfo *info;

	buf = gst_buffer_new();
	if (buf == NULL)
	{
		GST_ERROR_OBJECT(pool, "could not create new buffer");
		return GST_FLOW_ERROR;
	}

	GST_DEBUG_OBJECT(pool, "alloc %u %p", pool->num_allocated, (gpointer)buf);

	meta = GST_IMX_V4L2_META_ADD(buf);
	meta->vbuffer.index = pool->num_allocated;
	meta->vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	meta->vbuffer.memory = V4L2_MEMORY_MMAP;

	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_QUERYBUF error: %s",
				g_strerror(errno));
		gst_buffer_unref(buf);
		return GST_FLOW_ERROR;
	}

	meta->mem = mmap(NULL, meta->vbuffer.length,
			PROT_READ | PROT_WRITE, MAP_SHARED, GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l),
			meta->vbuffer.m.offset);
	g_assert(meta->mem);

	/* Need to query twice to get the physical address */
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_QUERYBUF for physical address error: %s",
				g_strerror(errno));
		gst_buffer_unref(buf);
		return GST_FLOW_ERROR;
	}

	phys_mem_meta = GST_IMX_PHYS_MEM_META_ADD(buf);
	phys_mem_meta->phys_addr = meta->vbuffer.m.offset;

	/* Safeguard to catch data loss if in any future i.MX version the types do not match */
	g_assert(meta->vbuffer.m.offset == (__u32)(phys_mem_meta->phys_addr));

	if (pool->add_videometa)
	{
		info = &pool->video_info;

		gst_buffer_add_video_meta_full(
				buf,
				GST_VIDEO_FRAME_FLAG_NONE,
				GST_VIDEO_INFO_FORMAT(info),
				GST_VIDEO_INFO_WIDTH(info),
				GST_VIDEO_INFO_HEIGHT(info),
				GST_VIDEO_INFO_N_PLANES(info),
				info->offset,
				info->stride
				);
	}

#ifdef HAVE_VIV_UPLOAD
	if (pool->add_vivuploadmeta)
	{
		gst_imx_buffer_add_vivante_gl_texture_upload_meta(buf);
	}
#endif

	pool->num_allocated++;

	*buffer = buf;

	return GST_FLOW_OK;
}