Ejemplo n.º 1
0
static gboolean
gst_yuv_to_rgb_set_info (GstVideoFilter * filter,
  GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
  GstVideoInfo * out_info)
{
  GstYuvToRgb *yuvtorgb = GST_YUVTORGB_CAST (filter);

  if (in_info->width != out_info->width || in_info->height != out_info->height
      || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
    goto format_mismatch;

  /* if present, these must match too */
  if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
    goto format_mismatch;

  /* if present, these must match too */
  if (in_info->interlace_mode != out_info->interlace_mode)
    goto format_mismatch;

  GST_DEBUG ("reconfigured %d %d", GST_VIDEO_INFO_FORMAT (in_info),
      GST_VIDEO_INFO_FORMAT (out_info));

  return TRUE;

    /* ERRORS */
format_mismatch:
  {
    GST_ERROR_OBJECT (yuvtorgb, "input and output formats do not match");
    return FALSE;
  }
}
Ejemplo n.º 2
0
static void
_RGB_to_GRAY (GstGLColorConvert * convert)
{
  struct ConvertInfo *info = &convert->priv->convert_info;
  GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info);
  const gchar *in_format_str = gst_video_format_to_string (in_format);
  gchar *pixel_order = _RGB_pixel_order (in_format_str, "rgba");
  gchar *alpha = NULL;

  info->in_n_textures = 1;
  info->out_n_textures = 1;
  info->shader_tex_names[0] = "tex";

  if (_is_RGBx (in_format))
    alpha = g_strdup_printf ("t.%c = 1.0;", pixel_order[3]);

  switch (GST_VIDEO_INFO_FORMAT (&convert->out_info)) {
    case GST_VIDEO_FORMAT_GRAY8:
      info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "",
          pixel_order[0], pixel_order[0], pixel_order[0], pixel_order[3]);
      break;
    default:
      break;
  }

  g_free (alpha);
  g_free (pixel_order);
}
Ejemplo n.º 3
0
static gboolean
_gst_gl_download_perform_with_data_unlocked (GstGLDownload * download,
    GLuint texture_id, gpointer data[GST_VIDEO_MAX_PLANES])
{
  guint i;

  g_return_val_if_fail (download != NULL, FALSE);
  g_return_val_if_fail (texture_id > 0, FALSE);
  g_return_val_if_fail (GST_VIDEO_INFO_FORMAT (&download->info) !=
      GST_VIDEO_FORMAT_UNKNOWN
      && GST_VIDEO_INFO_FORMAT (&download->info) != GST_VIDEO_FORMAT_ENCODED,
      FALSE);

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) {
    g_return_val_if_fail (data[i] != NULL, FALSE);
  }

  if (!download->priv->in_tex[0])
    download->priv->in_tex[0] =
        gst_gl_memory_wrapped_texture (download->context, texture_id,
        GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&download->info),
        GST_VIDEO_INFO_HEIGHT (&download->info), NULL, NULL);

  download->priv->in_tex[0]->tex_id = texture_id;

  return _do_download (download, texture_id, data);
}
Ejemplo n.º 4
0
static gboolean
gst_mfxpostproc_update_src_caps (GstMfxPostproc * vpp, GstCaps * caps,
    gboolean * caps_changed_ptr)
{
  GST_INFO_OBJECT (vpp, "new src caps = %" GST_PTR_FORMAT, caps);

  if (!video_info_update (caps, &vpp->srcpad_info, caps_changed_ptr))
    return FALSE;

  if (GST_VIDEO_INFO_FORMAT (&vpp->sinkpad_info) !=
      GST_VIDEO_INFO_FORMAT (&vpp->srcpad_info))
    vpp->flags |= GST_MFX_POSTPROC_FLAG_FORMAT;

  if ((vpp->width || vpp->height) &&
      vpp->width != GST_VIDEO_INFO_WIDTH (&vpp->sinkpad_info) &&
      vpp->height != GST_VIDEO_INFO_HEIGHT (&vpp->sinkpad_info))
    vpp->flags |= GST_MFX_POSTPROC_FLAG_SIZE;

  if (vpp->fps_n && gst_util_fraction_compare(
        GST_VIDEO_INFO_FPS_N (&vpp->srcpad_info),
        GST_VIDEO_INFO_FPS_D (&vpp->srcpad_info),
        GST_VIDEO_INFO_FPS_N (&vpp->sinkpad_info),
        GST_VIDEO_INFO_FPS_D (&vpp->sinkpad_info)))
    vpp->flags |= GST_MFX_POSTPROC_FLAG_FRC;

  return TRUE;
}
Ejemplo n.º 5
0
static void
_RGB_to_RGB (GstGLColorConvert * convert)
{
  struct ConvertInfo *info = &convert->priv->convert_info;
  GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info);
  const gchar *in_format_str = gst_video_format_to_string (in_format);
  GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info);
  const gchar *out_format_str = gst_video_format_to_string (out_format);
  gchar *pixel_order = _RGB_pixel_order (in_format_str, out_format_str);
  gchar *alpha = NULL;

  info->in_n_textures = 1;
  info->out_n_textures = 1;
  if (_is_RGBx (in_format)) {
    int i;
    char input_alpha_channel = 'a';
    for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
      if (in_format_str[i] == 'X' || in_format_str[i] == 'x') {
        input_alpha_channel = _index_to_shader_swizzle (i);
        break;
      }
    }
    alpha = g_strdup_printf ("t.%c = 1.0;", input_alpha_channel);
  }
  info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "",
      pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]);
  info->shader_tex_names[0] = "tex";

  g_free (alpha);
  g_free (pixel_order);
}
Ejemplo n.º 6
0
static void
_gst_gl_color_convert_set_format_unlocked (GstGLColorConvert * convert,
    GstVideoInfo * in_info, GstVideoInfo * out_info)
{
  g_return_if_fail (convert != NULL);
  g_return_if_fail (in_info);
  g_return_if_fail (out_info);
  g_return_if_fail (GST_VIDEO_INFO_FORMAT (in_info) !=
      GST_VIDEO_FORMAT_UNKNOWN);
  g_return_if_fail (GST_VIDEO_INFO_FORMAT (in_info) !=
      GST_VIDEO_FORMAT_ENCODED);
  g_return_if_fail (GST_VIDEO_INFO_FORMAT (out_info) !=
      GST_VIDEO_FORMAT_UNKNOWN);
  g_return_if_fail (GST_VIDEO_INFO_FORMAT (out_info) !=
      GST_VIDEO_FORMAT_ENCODED);

  if (gst_video_info_is_equal (&convert->in_info, in_info) &&
      gst_video_info_is_equal (&convert->out_info, out_info))
    return;

  gst_gl_color_convert_reset (convert);
  convert->in_info = *in_info;
  convert->out_info = *out_info;
  convert->initted = FALSE;
}
Ejemplo n.º 7
0
static void
gst_msdkvpp_set_passthrough (GstMsdkVPP * thiz)
{
  gboolean passthrough = TRUE;

  /* no passthrough if any of the filter algorithm is enabled */
  if (thiz->flags)
    passthrough = FALSE;

  /* vpp could be needed in some specific circumstances, for eg:
   * input surface is dmabuf and output must be videomemory. So far
   * the underline iHD driver doesn't seems to support dmabuf mapping,
   * so we could explicitly ask msdkvpp to provide non-dambuf videomemory
   * surfaces as output thourgh capsfileters */
  if (thiz->need_vpp)
    passthrough = FALSE;

  /* no passthrough if there is change in out width,height or format */
  if (GST_VIDEO_INFO_WIDTH (&thiz->sinkpad_info) !=
      GST_VIDEO_INFO_WIDTH (&thiz->srcpad_info)
      || GST_VIDEO_INFO_HEIGHT (&thiz->sinkpad_info) !=
      GST_VIDEO_INFO_HEIGHT (&thiz->srcpad_info)
      || GST_VIDEO_INFO_FORMAT (&thiz->sinkpad_info) !=
      GST_VIDEO_INFO_FORMAT (&thiz->srcpad_info))
    passthrough = FALSE;

  gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (thiz), passthrough);
}
Ejemplo n.º 8
0
gboolean gst_imx_pxp_video_transform_are_video_infos_equal(G_GNUC_UNUSED GstImxBlitterVideoTransform *blitter_video_transform, GstVideoInfo const *in_info, GstVideoInfo const *out_info)
{
	return
		(GST_VIDEO_INFO_WIDTH(in_info) == GST_VIDEO_INFO_WIDTH(out_info)) &&
		(GST_VIDEO_INFO_HEIGHT(in_info) == GST_VIDEO_INFO_HEIGHT(out_info)) &&
		(GST_VIDEO_INFO_FORMAT(in_info) == GST_VIDEO_INFO_FORMAT(out_info))
		;
}
Ejemplo n.º 9
0
static void
gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y;
  guint8 *ydata;
  guint8 *uvdata;
  gint ystride, uvstride;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;
  gint upos, vpos;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * ystride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr++;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
  uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);

  upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
  vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;

  for (y = 0; y < height2; y++) {
    guint8 *uvptr;
    guint8 u1, v1;

    uvptr = uvdata + y * uvstride;

    for (x = 0; x < width2; x++) {
      u1 = uvptr[upos];
      v1 = uvptr[vpos];

      uvptr[upos] = tableu[u1][v1];
      uvptr[vpos] = tablev[u1][v1];
      uvptr += 2;
    }
  }
}
Ejemplo n.º 10
0
static gboolean
video_info_changed (GstVideoInfo * old_vip, GstVideoInfo * new_vip)
{
  if (GST_VIDEO_INFO_FORMAT (old_vip) != GST_VIDEO_INFO_FORMAT (new_vip))
    return TRUE;
  if (GST_VIDEO_INFO_WIDTH (old_vip) != GST_VIDEO_INFO_WIDTH (new_vip))
    return TRUE;
  if (GST_VIDEO_INFO_HEIGHT (old_vip) != GST_VIDEO_INFO_HEIGHT (new_vip))
    return TRUE;
  return FALSE;
}
Ejemplo n.º 11
0
static gboolean
gst_video_convert_set_info (GstVideoFilter * filter,
    GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
    GstVideoInfo * out_info)
{
  GstVideoConvert *space;

  space = GST_VIDEO_CONVERT_CAST (filter);

  if (space->convert) {
    gst_video_converter_free (space->convert);
    space->convert = NULL;
  }

  /* these must match */
  if (in_info->width != out_info->width || in_info->height != out_info->height
      || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
    goto format_mismatch;

  /* if present, these must match too */
  if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
    goto format_mismatch;

  /* if present, these must match too */
  if (in_info->interlace_mode != out_info->interlace_mode)
    goto format_mismatch;


  space->convert = gst_video_converter_new (in_info, out_info,
      gst_structure_new ("GstVideoConvertConfig",
          GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
          space->dither,
          GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
          space->dither_quantization, NULL));
  if (space->convert == NULL)
    goto no_convert;

  GST_DEBUG ("reconfigured %d %d", GST_VIDEO_INFO_FORMAT (in_info),
      GST_VIDEO_INFO_FORMAT (out_info));

  return TRUE;

  /* ERRORS */
format_mismatch:
  {
    GST_ERROR_OBJECT (space, "input and output formats do not match");
    return FALSE;
  }
no_convert:
  {
    GST_ERROR_OBJECT (space, "could not create converter");
    return FALSE;
  }
}
Ejemplo n.º 12
0
static gboolean
gst_pngenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstPngEnc *pngenc;
  gboolean ret = TRUE;
  GstVideoInfo *info;
  GstVideoCodecState *output_state;

  pngenc = GST_PNGENC (encoder);
  info = &state->info;

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_RGBA:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
      break;
    case GST_VIDEO_FORMAT_RGB:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGB;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->png_color_type = PNG_COLOR_TYPE_GRAY;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->depth = 16;
      break;
    default:                   /* GST_VIDEO_FORMAT_RGB and GST_VIDEO_FORMAT_GRAY8 */
      pngenc->depth = 8;
      break;
  }

  if (pngenc->input_state)
    gst_video_codec_state_unref (pngenc->input_state);
  pngenc->input_state = gst_video_codec_state_ref (state);

  output_state =
      gst_video_encoder_set_output_state (encoder,
      gst_caps_new_empty_simple ("image/png"), state);
  gst_video_codec_state_unref (output_state);

done:

  return ret;
}
Ejemplo n.º 13
0
/**
 * gst_video_info_is_equal:
 * @info: a #GstVideoInfo
 * @other: a #GstVideoInfo
 *
 * Compares two #GstVideoInfo and returns whether they are equal or not
 *
 * Returns: %TRUE if @info and @other are equal, else %FALSE.
 */
gboolean
gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other)
{
  gint i;

  if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other))
    return FALSE;
  if (GST_VIDEO_INFO_INTERLACE_MODE (info) !=
      GST_VIDEO_INFO_INTERLACE_MODE (other))
    return FALSE;
  if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other))
    return FALSE;
  if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other))
    return FALSE;
  if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other))
    return FALSE;
  if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other))
    return FALSE;
  if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other))
    return FALSE;
  if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other))
    return FALSE;
  if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other))
    return FALSE;
  if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other))
    return FALSE;
  if (!gst_video_colorimetry_is_equal (&GST_VIDEO_INFO_COLORIMETRY (info),
          &GST_VIDEO_INFO_COLORIMETRY (other)))
    return FALSE;
  if (GST_VIDEO_INFO_CHROMA_SITE (info) != GST_VIDEO_INFO_CHROMA_SITE (other))
    return FALSE;
  if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) !=
      GST_VIDEO_INFO_MULTIVIEW_MODE (other))
    return FALSE;
  if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) !=
      GST_VIDEO_INFO_MULTIVIEW_FLAGS (other))
    return FALSE;
  if (GST_VIDEO_INFO_VIEWS (info) != GST_VIDEO_INFO_VIEWS (other))
    return FALSE;

  for (i = 0; i < info->finfo->n_planes; i++) {
    if (info->stride[i] != other->stride[i])
      return FALSE;
    if (info->offset[i] != other->offset[i])
      return FALSE;
  }

  return TRUE;
}
Ejemplo n.º 14
0
struct wl_buffer *
gst_wl_shm_memory_construct_wl_buffer (GstMemory * mem, GstWlDisplay * display,
    const GstVideoInfo * info)
{
  GstWlShmMemory *shm_mem = (GstWlShmMemory *) mem;
  gint width, height, stride;
  gsize size;
  enum wl_shm_format format;
  struct wl_shm_pool *wl_pool;
  struct wl_buffer *wbuffer;

  width = GST_VIDEO_INFO_WIDTH (info);
  height = GST_VIDEO_INFO_HEIGHT (info);
  stride = GST_VIDEO_INFO_PLANE_STRIDE (info, 0);
  size = GST_VIDEO_INFO_SIZE (info);
  format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (info));

  g_return_val_if_fail (gst_is_wl_shm_memory (mem), NULL);
  g_return_val_if_fail (size <= mem->size, NULL);
  g_return_val_if_fail (shm_mem->fd != -1, NULL);

  GST_DEBUG_OBJECT (mem->allocator, "Creating wl_buffer of size %"
      G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height,
      stride, gst_wl_shm_format_to_string (format));

  wl_pool = wl_shm_create_pool (display->shm, shm_mem->fd, mem->size);
  wbuffer = wl_shm_pool_create_buffer (wl_pool, 0, width, height, stride,
      format);

  close (shm_mem->fd);
  shm_mem->fd = -1;
  wl_shm_pool_destroy (wl_pool);

  return wbuffer;
}
Ejemplo n.º 15
0
static GstFlowReturn
gst_wayland_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL_CAST (pool);
  gint width, height, stride;
  gsize size;
  enum wl_shm_format format;
  gint offset;
  void *data;
  GstWlMeta *meta;

  width = GST_VIDEO_INFO_WIDTH (&self->info);
  height = GST_VIDEO_INFO_HEIGHT (&self->info);
  stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0);
  size = GST_VIDEO_INFO_SIZE (&self->info);
  format =
      gst_video_format_to_wayland_format (GST_VIDEO_INFO_FORMAT (&self->info));

  GST_DEBUG_OBJECT (self, "Allocating buffer of size %" G_GSSIZE_FORMAT
      " (%d x %d, stride %d), format %s", size, width, height, stride,
      gst_wayland_format_to_string (format));

  /* try to reserve another memory block from the shm pool */
  if (self->used + size > self->size)
    goto no_buffer;

  offset = self->used;
  self->used += size;
  data = ((gchar *) self->data) + offset;

  /* create buffer and its metadata object */
  *buffer = gst_buffer_new ();
  meta = (GstWlMeta *) gst_buffer_add_meta (*buffer, GST_WL_META_INFO, NULL);
  meta->pool = self;
  meta->wbuffer = wl_shm_pool_create_buffer (self->wl_pool, offset,
      width, height, stride, format);
  meta->used_by_compositor = FALSE;

  /* configure listening to wl_buffer.release */
  g_mutex_lock (&self->buffers_map_mutex);
  g_hash_table_insert (self->buffers_map, meta->wbuffer, *buffer);
  g_mutex_unlock (&self->buffers_map_mutex);

  wl_buffer_add_listener (meta->wbuffer, &buffer_listener, self);

  /* add the allocated memory on the GstBuffer */
  gst_buffer_append_memory (*buffer,
      gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
          size, 0, size, NULL, NULL));

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create buffer");
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 16
0
static GstFlowReturn
video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstVideoBufferPool *vpool = GST_VIDEO_BUFFER_POOL_CAST (pool);
  GstVideoBufferPoolPrivate *priv = vpool->priv;
  GstVideoInfo *info;

  info = &priv->info;

  GST_DEBUG_OBJECT (pool, "alloc %" G_GSIZE_FORMAT, info->size);

  *buffer =
      gst_buffer_new_allocate (priv->allocator, info->size, &priv->params);
  if (*buffer == NULL)
    goto no_memory;

  if (priv->add_videometa) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");

    gst_buffer_add_video_meta_full (*buffer, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info),
        GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
        GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride);
  }

  return GST_FLOW_OK;

  /* ERROR */
no_memory:
  {
    GST_WARNING_OBJECT (pool, "can't create memory");
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 17
0
/* check whether the decoded surface size has changed */
static gboolean
is_surface_resolution_changed (GstVaapiDecode * decode,
    GstVaapiSurface * surface)
{
  GstVideoInfo *vinfo = &decode->decoded_info;
  GstVideoFormat surface_format;
  guint surface_width, surface_height;

  g_return_val_if_fail (surface != NULL, FALSE);

  gst_vaapi_surface_get_size (surface, &surface_width, &surface_height);

  if (GST_VIDEO_INFO_WIDTH (vinfo) == surface_width
      && GST_VIDEO_INFO_HEIGHT (vinfo) == surface_height)
    return FALSE;

  /* doing gst_vaapi_surface_get_format() only if necessary since it
   * execute vaDeriveImage in the background. This will usually get
   * executed only once */
  surface_format = GST_VIDEO_INFO_FORMAT (vinfo);
  if (surface_format == GST_VIDEO_FORMAT_UNKNOWN) {
    surface_format = gst_vaapi_surface_get_format (surface);

    /* if the VA context delivers a currently unrecognized format
     * (ICM3, e.g.), we can assume NV12 "safely" */
    if (surface_format == GST_VIDEO_FORMAT_UNKNOWN
        || surface_format == GST_VIDEO_FORMAT_ENCODED)
      surface_format = GST_VIDEO_FORMAT_NV12;
  }

  gst_video_info_set_format (vinfo, surface_format, surface_width,
      surface_height);

  return TRUE;
}
static gboolean
plugin_update_sinkpad_info_from_buffer (GstVaapiPluginBase * plugin,
    GstBuffer * buf)
{
  GstVideoInfo *const vip = &plugin->sinkpad_info;
  GstVideoMeta *vmeta;
  guint i;

  vmeta = gst_buffer_get_video_meta (buf);
  if (!vmeta)
    return TRUE;

  if (GST_VIDEO_INFO_FORMAT (vip) != vmeta->format ||
      GST_VIDEO_INFO_WIDTH (vip) != vmeta->width ||
      GST_VIDEO_INFO_HEIGHT (vip) != vmeta->height ||
      GST_VIDEO_INFO_N_PLANES (vip) != vmeta->n_planes)
    return FALSE;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vip); ++i) {
    GST_VIDEO_INFO_PLANE_OFFSET (vip, i) = vmeta->offset[i];
    GST_VIDEO_INFO_PLANE_STRIDE (vip, i) = vmeta->stride[i];
  }
  GST_VIDEO_INFO_SIZE (vip) = gst_buffer_get_size (buf);
  return TRUE;
}
Ejemplo n.º 19
0
gboolean
gst_niimaqsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstNiImaqSrc *src = GST_NIIMAQSRC (bsrc);
  gboolean res = TRUE;
  int depth, ncomps;
  GstVideoInfo vinfo;

  res = gst_video_info_from_caps (&vinfo, caps);
  if (!res) {
    GST_WARNING_OBJECT (src, "Unable to parse video info from caps");
    return res;
  }
  src->format = GST_VIDEO_INFO_FORMAT (&vinfo);
  src->width = GST_VIDEO_INFO_WIDTH (&vinfo);
  src->height = GST_VIDEO_INFO_HEIGHT (&vinfo);

  /* this will handle byte alignment (i.e. row multiple of 4 bytes) */
  src->framesize = GST_VIDEO_INFO_SIZE (&vinfo);

  gst_base_src_set_blocksize (bsrc, src->framesize);

  ncomps = GST_VIDEO_INFO_N_COMPONENTS (&vinfo);
  depth = GST_VIDEO_INFO_COMP_DEPTH (&vinfo, 0);

  /* use this so NI can give us proper byte alignment */
  src->rowpixels =
      GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0) / (ncomps * depth / 8);

  GST_LOG_OBJECT (src, "Caps set, framesize=%d, rowpixels=%d",
      src->framesize, src->rowpixels);

  return res;
}
Ejemplo n.º 20
0
bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
#ifdef GST_API_VERSION_1
    GstVideoInfo info;
    if (!gst_video_info_from_caps(&info, caps))
        return false;

    format = GST_VIDEO_INFO_FORMAT(&info);
    size.setWidth(GST_VIDEO_INFO_WIDTH(&info));
    size.setHeight(GST_VIDEO_INFO_HEIGHT(&info));
    pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info);
    pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info);
    stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
#else
    gint width, height;
    if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps)
        || !gst_video_format_parse_caps(caps, &format, &width, &height)
        || !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                    &pixelAspectRatioDenominator))
        return false;
    size.setWidth(width);
    size.setHeight(height);
    stride = size.width() * 4;
#endif

    return true;
}
/**
 * ensure_sinkpad_buffer_pool:
 * @plugin: a #GstVaapiPluginBase
 * @caps: the initial #GstCaps for the resulting buffer pool
 *
 * Makes sure the sink pad video buffer pool is created with the
 * appropriate @caps.
 *
 * Returns: %TRUE if successful, %FALSE otherwise.
 */
static gboolean
ensure_sinkpad_buffer_pool (GstVaapiPluginBase * plugin, GstCaps * caps)
{
  GstBufferPool *pool;
  GstCaps *pool_caps;
  GstStructure *config;
  GstVideoInfo vi;
  gboolean need_pool;

  if (!gst_vaapi_plugin_base_ensure_display (plugin))
    return FALSE;

  if (plugin->sinkpad_buffer_pool) {
    config = gst_buffer_pool_get_config (plugin->sinkpad_buffer_pool);
    gst_buffer_pool_config_get_params (config, &pool_caps, NULL, NULL, NULL);
    need_pool = !gst_caps_is_equal (caps, pool_caps);
    gst_structure_free (config);
    if (!need_pool)
      return TRUE;
    g_clear_object (&plugin->sinkpad_buffer_pool);
    plugin->sinkpad_buffer_size = 0;
  }

  pool = gst_vaapi_video_buffer_pool_new (plugin->display);
  if (!pool)
    goto error_create_pool;

  gst_video_info_init (&vi);
  gst_video_info_from_caps (&vi, caps);
  if (GST_VIDEO_INFO_FORMAT (&vi) == GST_VIDEO_FORMAT_ENCODED) {
    GST_DEBUG ("assume video buffer pool format is NV12");
    gst_video_info_set_format (&vi, GST_VIDEO_FORMAT_NV12,
        GST_VIDEO_INFO_WIDTH (&vi), GST_VIDEO_INFO_HEIGHT (&vi));
  }
  plugin->sinkpad_buffer_size = vi.size;

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps,
      plugin->sinkpad_buffer_size, 0, 0);
  gst_buffer_pool_config_add_option (config,
      GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META);
  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
  if (!gst_buffer_pool_set_config (pool, config))
    goto error_pool_config;
  plugin->sinkpad_buffer_pool = pool;
  return TRUE;

  /* ERRORS */
error_create_pool:
  {
    GST_ERROR ("failed to create buffer pool");
    return FALSE;
  }
error_pool_config:
  {
    GST_ERROR ("failed to reset buffer pool config");
    gst_object_unref (pool);
    return FALSE;
  }
}
Ejemplo n.º 22
0
static gboolean
_init_download (GstGLDownload * download)
{
  GstVideoFormat v_format;
  guint out_width, out_height;
  GstVideoInfo in_info;

  v_format = GST_VIDEO_INFO_FORMAT (&download->info);
  out_width = GST_VIDEO_INFO_WIDTH (&download->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&download->info);

  if (download->initted)
    return TRUE;

  GST_TRACE ("initializing texture download for format %s",
      gst_video_format_to_string (v_format));

  if (USING_GLES2 (download->context) && !USING_GLES3 (download->context)) {
    /* GL_RGBA is the only officially supported texture format in GLES2 */
    if (v_format == GST_VIDEO_FORMAT_RGB || v_format == GST_VIDEO_FORMAT_BGR) {
      gst_gl_context_set_error (download->context, "Cannot download RGB "
          "textures in GLES2");
      return FALSE;
    }
  }

  gst_video_info_set_format (&in_info, GST_VIDEO_FORMAT_RGBA, out_width,
      out_height);

  gst_gl_color_convert_set_format (download->convert, &in_info,
      &download->info);

  return TRUE;
}
Ejemplo n.º 23
0
static gboolean
gst_phoenixsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc);
  GstVideoInfo vinfo;
  GstStructure *s = gst_caps_get_structure (caps, 0);

  GST_DEBUG_OBJECT (src, "The caps being set are %" GST_PTR_FORMAT, caps);

  gst_video_info_from_caps (&vinfo, caps);

  if (g_str_equal ("video/x-bayer", gst_structure_get_name (s))) {
    gint width;
    const gchar *format;
    gst_structure_get_int (s, "width", &width);
    gst_structure_get_int (s, "height", &src->height);
    format = gst_structure_get_string (s, "format");
    if (g_str_has_suffix (format, "16"))
      src->gst_stride = GST_ROUND_UP_4 (width * 2);
    else
      src->gst_stride = GST_ROUND_UP_4 (width);
  } else if (GST_VIDEO_INFO_FORMAT (&vinfo) != GST_VIDEO_FORMAT_UNKNOWN) {
    src->gst_stride = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0);
    src->height = vinfo.height;
  } else {
    goto unsupported_caps;
  }

  return TRUE;

unsupported_caps:
  GST_ERROR_OBJECT (src, "Unsupported caps: %" GST_PTR_FORMAT, caps);
  return FALSE;
}
static gboolean
ensure_image (GstVaapiVideoMemory * mem)
{
  if (!mem->image && mem->use_direct_rendering) {
    mem->image = gst_vaapi_surface_derive_image (mem->surface);
    if (!mem->image) {
      GST_WARNING ("failed to derive image, fallbacking to copy");
      mem->use_direct_rendering = FALSE;
    } else if (gst_vaapi_surface_get_format (mem->surface) !=
        GST_VIDEO_INFO_FORMAT (mem->image_info)) {
      gst_vaapi_object_replace (&mem->image, NULL);
      mem->use_direct_rendering = FALSE;
    }
  }

  if (!mem->image) {
    GstVaapiVideoAllocator *const allocator =
        GST_VAAPI_VIDEO_ALLOCATOR_CAST (GST_MEMORY_CAST (mem)->allocator);

    mem->image = gst_vaapi_video_pool_get_object (allocator->image_pool);
    if (!mem->image)
      return FALSE;
  }
  gst_vaapi_video_meta_set_image (mem->meta, mem->image);
  return TRUE;
}
static inline void
allocator_configure_image_info (GstVaapiDisplay * display,
    GstVaapiVideoAllocator * allocator)
{
  GstVaapiImage *image = NULL;
  const GstVideoInfo *vinfo;

  if (allocator->has_direct_rendering) {
    allocator->image_info = allocator->surface_info;
    return;
  }

  vinfo = &allocator->video_info;

  if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED)
    gst_video_info_set_format (&allocator->image_info, GST_VIDEO_FORMAT_I420,
        GST_VIDEO_INFO_WIDTH (vinfo), GST_VIDEO_INFO_HEIGHT (vinfo));
  else
      allocator->image_info = *vinfo;

  image = new_image (display, &allocator->image_info);
  if (!image)
    goto bail;
  if (!gst_vaapi_image_map (image))
    goto bail;

  gst_video_info_update_from_image (&allocator->image_info, image);
  gst_vaapi_image_unmap (image);

bail:
  if (image)
    gst_vaapi_object_unref (image);
}
Ejemplo n.º 26
0
static void gst_imx_ipu_blitter_init_dummy_black_buffer(GstImxIpuBlitter *ipu_blitter)
{
	GstVideoInfo video_info;

	gst_video_info_init(&video_info);
	gst_video_info_set_format(&video_info, GST_VIDEO_FORMAT_RGBx, 64, 64);

	ipu_blitter->dummy_black_buffer = gst_buffer_new_allocate(ipu_blitter->allocator, GST_VIDEO_INFO_SIZE(&video_info), NULL);
	gst_buffer_memset(ipu_blitter->dummy_black_buffer, 0, 0, GST_VIDEO_INFO_SIZE(&video_info));

	gst_buffer_add_video_meta_full(
		ipu_blitter->dummy_black_buffer,
		GST_VIDEO_FRAME_FLAG_NONE,
		GST_VIDEO_INFO_FORMAT(&video_info),
		GST_VIDEO_INFO_WIDTH(&video_info),
		GST_VIDEO_INFO_HEIGHT(&video_info),
		GST_VIDEO_INFO_N_PLANES(&video_info),
		&(GST_VIDEO_INFO_PLANE_OFFSET(&video_info, 0)),
		&(GST_VIDEO_INFO_PLANE_STRIDE(&video_info, 0))
	);

	{
		GstImxPhysMemory *imx_phys_mem_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->dummy_black_buffer, 0);
		GstImxPhysMemMeta *phys_mem_meta = (GstImxPhysMemMeta *)GST_IMX_PHYS_MEM_META_ADD(ipu_blitter->dummy_black_buffer);

		phys_mem_meta->phys_addr = imx_phys_mem_mem->phys_addr;
	}
}
Ejemplo n.º 27
0
static void
_gen_texture_full (GstGLContext * context, GenTextureFull * data)
{
  const GstGLFuncs *gl = context->gl_vtable;
  GstVideoGLTextureType tex_type;
  GstVideoFormat v_format;
  GLint glinternalformat = 0;
  GLenum glformat = 0;
  GLenum gltype = 0;

  gl->GenTextures (1, &data->result);
  gl->BindTexture (GL_TEXTURE_2D, data->result);

  v_format = GST_VIDEO_INFO_FORMAT (data->info);
  tex_type = gst_gl_texture_type_from_format (context, v_format, data->comp);
  glformat = gst_gl_format_from_gl_texture_type (tex_type);
  gltype = GL_UNSIGNED_BYTE;
  if (v_format == GST_VIDEO_FORMAT_RGB16)
    gltype = GL_UNSIGNED_SHORT_5_6_5;
  glinternalformat = gst_gl_sized_gl_format_from_gl_format_type (context,
      glformat, gltype);

  gl->TexImage2D (GL_TEXTURE_2D, 0, glinternalformat,
      GST_VIDEO_INFO_COMP_WIDTH (data->info, data->comp),
      GST_VIDEO_INFO_COMP_HEIGHT (data->info, data->comp), 0, glformat, gltype,
      NULL);

  gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
Ejemplo n.º 28
0
static inline gboolean
is_chroma_type_supported (GstVaapiEncoder * encoder)
{
  GstVaapiContextInfo *const cip = &encoder->context_info;
  const GstVideoFormat fmt =
    GST_VIDEO_INFO_FORMAT (GST_VAAPI_ENCODER_VIDEO_INFO (encoder));
  guint format = 0;

  if (fmt == GST_VIDEO_FORMAT_ENCODED)
    return TRUE;

  if (cip->chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420 &&
      cip->chroma_type != GST_VAAPI_CHROMA_TYPE_YUV422)
    goto unsupported;

  if (!get_config_attribute (encoder, VAConfigAttribRTFormat, &format))
    return FALSE;

  if (!(format & from_GstVaapiChromaType (cip->chroma_type)))
    goto unsupported;

  return TRUE;

unsupported:
  {
    GST_ERROR ("We only support YUV 4:2:0 and YUV 4:2:2 for encoding. "
        "Please try to use vaapipostproc to convert the input format.");
    return FALSE;
  }
}
static gboolean
gst_chroma_hold_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
    GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
  GstChromaHold *self = GST_CHROMA_HOLD (vfilter);

  GST_CHROMA_HOLD_LOCK (self);

  GST_DEBUG_OBJECT (self,
      "Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps);

  self->format = GST_VIDEO_INFO_FORMAT (in_info);
  self->width = GST_VIDEO_INFO_WIDTH (in_info);
  self->height = GST_VIDEO_INFO_HEIGHT (in_info);

  if (!gst_chroma_hold_set_process_function (self)) {
    GST_WARNING_OBJECT (self, "No processing function for this caps");
    GST_CHROMA_HOLD_UNLOCK (self);
    return FALSE;
  }

  GST_CHROMA_HOLD_UNLOCK (self);

  return TRUE;
}
Ejemplo n.º 30
0
static VkFormat
_vk_format_from_video_info (GstVideoInfo * v_info)
{
  switch (GST_VIDEO_INFO_FORMAT (v_info)) {
    case GST_VIDEO_FORMAT_RGBA:
      if (GST_VIDEO_INFO_COLORIMETRY (v_info).transfer ==
          GST_VIDEO_TRANSFER_SRGB)
        return VK_FORMAT_R8G8B8A8_SRGB;
      else
        return VK_FORMAT_R8G8B8A8_UNORM;
    case GST_VIDEO_FORMAT_RGB:
      if (GST_VIDEO_INFO_COLORIMETRY (v_info).transfer ==
          GST_VIDEO_TRANSFER_SRGB)
        return VK_FORMAT_R8G8B8_SRGB;
      else
        return VK_FORMAT_R8G8B8_UNORM;
    case GST_VIDEO_FORMAT_BGRA:
      if (GST_VIDEO_INFO_COLORIMETRY (v_info).transfer ==
          GST_VIDEO_TRANSFER_SRGB)
        return VK_FORMAT_B8G8R8A8_SRGB;
      else
        return VK_FORMAT_B8G8R8A8_UNORM;
    case GST_VIDEO_FORMAT_BGR:
      if (GST_VIDEO_INFO_COLORIMETRY (v_info).transfer ==
          GST_VIDEO_TRANSFER_SRGB)
        return VK_FORMAT_B8G8R8_SRGB;
      else
        return VK_FORMAT_B8G8R8_UNORM;
    default:
      return VK_FORMAT_UNDEFINED;
  }
}