/**
 * gst_vaapi_plugin_base_decide_allocation:
 * @plugin: a #GstVaapiPluginBase
 * @query: the allocation query to parse
 * @feature: the desired #GstVaapiCapsFeature, or zero to find the
 *   preferred one
 *
 * Decides allocation parameters for the downstream elements.
 *
 * Returns: %TRUE if successful, %FALSE otherwise.
 */
gboolean
gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin,
    GstQuery * query, guint feature)
{
  GstCaps *caps = NULL;
  GstBufferPool *pool;
  GstStructure *config;
  GstVideoInfo vi;
  guint size, min, max;
  gboolean update_pool = FALSE;
  gboolean has_video_meta = FALSE;
  gboolean has_video_alignment = FALSE;
#if (USE_GLX || USE_EGL)
  gboolean has_texture_upload_meta = FALSE;
  guint idx;
#endif

  g_return_val_if_fail (plugin->display != NULL, FALSE);

  gst_query_parse_allocation (query, &caps, NULL);

  /* We don't need any GL context beyond this point if not requested
     so explicitly through GstVideoGLTextureUploadMeta */
  gst_object_replace (&plugin->gl_context, NULL);

  if (!caps)
    goto error_no_caps;

  if (!feature)
    feature =
        gst_vaapi_find_preferred_caps_feature (plugin->srcpad,
        GST_VIDEO_FORMAT_ENCODED, NULL);

  has_video_meta = gst_query_find_allocation_meta (query,
      GST_VIDEO_META_API_TYPE, NULL);

#if (USE_GLX || USE_EGL)
  has_texture_upload_meta = gst_query_find_allocation_meta (query,
      GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) &&
      (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META);

#if USE_GST_GL_HELPERS
  if (has_texture_upload_meta) {
    const GstStructure *params;
    GstObject *gl_context;

    gst_query_parse_nth_allocation_meta (query, idx, &params);
    if (params) {
      if (gst_structure_get (params, "gst.gl.GstGLContext", GST_GL_TYPE_CONTEXT,
              &gl_context, NULL) && gl_context) {
        gst_vaapi_plugin_base_set_gl_context (plugin, gl_context);
        gst_object_unref (gl_context);
      }
    }
  }
#endif
#endif

  /* Make sure the display we pass down to the buffer pool is actually
     the expected one, especially when the downstream element requires
     a GLX or EGL display */
  if (!gst_vaapi_plugin_base_ensure_display (plugin))
    goto error_ensure_display;

  gst_video_info_init (&vi);
  gst_video_info_from_caps (&vi, caps);
  if (GST_VIDEO_INFO_FORMAT (&vi) == GST_VIDEO_FORMAT_ENCODED)
    gst_video_info_set_format (&vi, GST_VIDEO_FORMAT_I420,
        GST_VIDEO_INFO_WIDTH (&vi), GST_VIDEO_INFO_HEIGHT (&vi));

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update_pool = TRUE;
    size = MAX (size, vi.size);
    if (pool) {
      /* Check whether downstream element proposed a bufferpool but did
         not provide a correct propose_allocation() implementation */
      has_video_alignment = gst_buffer_pool_has_option (pool,
          GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
    }
  } else {
    pool = NULL;
    size = vi.size;
    min = max = 0;
  }

  /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */
  if (!pool || !gst_buffer_pool_has_option (pool,
          GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
    GST_INFO_OBJECT (plugin, "%s. Making a new pool", pool == NULL ? "No pool" :
        "Pool hasn't GstVaapiVideoMeta");
    if (pool)
      gst_object_unref (pool);
    pool = gst_vaapi_video_buffer_pool_new (plugin->display);
    if (!pool)
      goto error_create_pool;

    config = gst_buffer_pool_get_config (pool);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META);
    if (!gst_buffer_pool_set_config (pool, config))
      goto config_failed;
  }

  /* Check whether GstVideoMeta, or GstVideoAlignment, is needed (raw video) */
  if (has_video_meta) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_META))
      goto config_failed;
  } else if (has_video_alignment) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT))
      goto config_failed;
  }

  /* GstVideoGLTextureUploadMeta (OpenGL) */
#if (USE_GLX || USE_EGL)
  if (has_texture_upload_meta) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META))
      goto config_failed;
  }
#endif

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  g_clear_object (&plugin->srcpad_buffer_pool);
  plugin->srcpad_buffer_pool = pool;
  return TRUE;

  /* ERRORS */
error_no_caps:
  {
    GST_ERROR_OBJECT (plugin, "no caps specified");
    return FALSE;
  }
error_ensure_display:
  {
    GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d",
        plugin->display_type_req);
    return FALSE;
  }
error_create_pool:
  {
    GST_ERROR_OBJECT (plugin, "failed to create buffer pool");
    return FALSE;
  }
config_failed:
  {
    if (pool)
      gst_object_unref (pool);
    GST_ELEMENT_ERROR (plugin, RESOURCE, SETTINGS,
        ("Failed to configure the buffer pool"),
        ("Configuration is most likely invalid, please report this issue."));
    return FALSE;
  }
}
Beispiel #2
0
static gboolean
gst_gl_mixer_decide_allocation (GstGLMixer * mix, GstQuery * query)
{
  GstGLMixerClass *mixer_class = GST_GL_MIXER_GET_CLASS (mix);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  GError *error = NULL;
  guint idx;
  guint out_width, out_height;
  GstGLContext *other_context = NULL;
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!gst_gl_ensure_display (mix, &mix->display))
    return FALSE;

  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
    GstGLContext *context;
    const GstStructure *upload_meta_params;
    gpointer handle;
    gchar *type;
    gchar *apis;

    gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
    if (upload_meta_params) {
      if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
              GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
        GstGLContext *old = mix->context;

        mix->context = context;
        if (old)
          gst_object_unref (old);
      } else if (gst_structure_get (upload_meta_params, "gst.gl.context.handle",
              G_TYPE_POINTER, &handle, "gst.gl.context.type", G_TYPE_STRING,
              &type, "gst.gl.context.apis", G_TYPE_STRING, &apis, NULL)
          && handle) {
        GstGLPlatform platform = GST_GL_PLATFORM_NONE;
        GstGLAPI gl_apis;

        GST_DEBUG ("got GL context handle 0x%p with type %s and apis %s",
            handle, type, apis);

        platform = gst_gl_platform_from_string (type);
        gl_apis = gst_gl_api_from_string (apis);

        if (gl_apis && platform)
          other_context =
              gst_gl_context_new_wrapped (mix->display, (guintptr) handle,
              platform, gl_apis);
      }
    }
  }

  if (!mix->context) {
    mix->context = gst_gl_context_new (mix->display);
    if (!gst_gl_context_create (mix->context, other_context, &error))
      goto context_error;
  }

  out_width = GST_VIDEO_INFO_WIDTH (&vagg->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&vagg->info);

  g_mutex_lock (&mix->priv->gl_resource_lock);
  mix->priv->gl_resource_ready = FALSE;
  if (mix->fbo) {
    gst_gl_context_del_fbo (mix->context, mix->fbo, mix->depthbuffer);
    mix->fbo = 0;
    mix->depthbuffer = 0;
  }

  if (!gst_gl_context_gen_fbo (mix->context, out_width, out_height,
          &mix->fbo, &mix->depthbuffer)) {
    g_cond_signal (&mix->priv->gl_resource_cond);
    g_mutex_unlock (&mix->priv->gl_resource_lock);
    goto context_error;
  }

  if (mix->out_tex_id)
    gst_gl_context_del_texture (mix->context, &mix->out_tex_id);
  gst_gl_context_gen_texture (mix->context, &mix->out_tex_id,
      GST_VIDEO_FORMAT_RGBA, out_width, out_height);

  if (mixer_class->set_caps)
    mixer_class->set_caps (mix, caps);

  mix->priv->gl_resource_ready = TRUE;
  g_cond_signal (&mix->priv->gl_resource_cond);
  g_mutex_unlock (&mix->priv->gl_resource_lock);

  if (!pool)
    pool = gst_gl_buffer_pool_new (mix->context);

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);

  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return TRUE;

context_error:
  {
    GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    return FALSE;
  }
}
static gboolean
gst_gl_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
{
  GstGLFilter *filter = GST_GL_FILTER (trans);
  GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (filter);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  guint idx;
  GError *error = NULL;
  guint in_width, in_height, out_width, out_height;

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!gst_gl_ensure_display (filter, &filter->display))
    return FALSE;

  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
    GstGLContext *context;
    const GstStructure *upload_meta_params;

    gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
    if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
            GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
      GstGLContext *old = filter->context;

      filter->context = context;
      if (old)
        gst_object_unref (old);
    }
  }

  if (!filter->context) {
    filter->context = gst_gl_context_new (filter->display);
    if (!gst_gl_context_create (filter->context, filter->other_context, &error))
      goto context_error;
  }

  in_width = GST_VIDEO_INFO_WIDTH (&filter->in_info);
  in_height = GST_VIDEO_INFO_HEIGHT (&filter->in_info);
  out_width = GST_VIDEO_INFO_WIDTH (&filter->out_info);
  out_height = GST_VIDEO_INFO_HEIGHT (&filter->out_info);

  if (!filter->upload) {
    filter->upload = gst_gl_upload_new (filter->context);
    gst_gl_upload_init_format (filter->upload, filter->in_info,
        filter->out_info);
  }
  //blocking call, generate a FBO
  if (!gst_gl_context_gen_fbo (filter->context, out_width, out_height,
          &filter->fbo, &filter->depthbuffer))
    goto context_error;

  gst_gl_context_gen_texture (filter->context, &filter->in_tex_id,
      GST_VIDEO_FORMAT_RGBA, in_width, in_height);

  gst_gl_context_gen_texture (filter->context, &filter->out_tex_id,
      GST_VIDEO_FORMAT_RGBA, out_width, out_height);

  if (filter_class->display_init_cb != NULL) {
    gst_gl_context_thread_add (filter->context, gst_gl_filter_start_gl, filter);
  }

  if (filter_class->onInitFBO) {
    if (!filter_class->onInitFBO (filter))
      goto error;
  }

  if (!pool)
    pool = gst_gl_buffer_pool_new (filter->context);

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);
  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return TRUE;

context_error:
  {
    GST_ELEMENT_ERROR (trans, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    return FALSE;
  }
error:
  {
    GST_ELEMENT_ERROR (trans, LIBRARY, INIT,
        ("Subclass failed to initialize."), (NULL));
    return FALSE;
  }
}
/**
 * gst_vaapi_plugin_base_decide_allocation:
 * @plugin: a #GstVaapiPluginBase
 * @query: the allocation query to parse
 * @feature: the desired #GstVaapiCapsFeature, or zero to find the
 *   preferred one
 *
 * Decides allocation parameters for the downstream elements.
 *
 * Returns: %TRUE if successful, %FALSE otherwise.
 */
gboolean
gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin,
    GstQuery * query)
{
  GstCaps *caps = NULL;
  GstBufferPool *pool;
  GstVideoInfo vi;
  guint size, min, max, pool_options;
  gboolean update_pool = FALSE, update_allocator = FALSE;
#if (USE_GLX || USE_EGL)
  guint idx;
#endif

  gst_query_parse_allocation (query, &caps, NULL);
  if (!caps)
    goto error_no_caps;

  pool_options = 0;
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL))
    pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_META;

#if (USE_GLX || USE_EGL)
  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) &&
      gst_vaapi_caps_feature_contains (caps,
          GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META))
    pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_GL_TEXTURE_UPLOAD;

#if USE_GST_GL_HELPERS
  if (!plugin->gl_context &&
      (pool_options & GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_GL_TEXTURE_UPLOAD)) {
    const GstStructure *params;
    GstObject *gl_context;

    gst_query_parse_nth_allocation_meta (query, idx, &params);
    if (params) {
      if (gst_structure_get (params, "gst.gl.GstGLContext", GST_TYPE_GL_CONTEXT,
              &gl_context, NULL) && gl_context) {
        gst_vaapi_plugin_base_set_gl_context (plugin, gl_context);
        gst_object_unref (gl_context);
      }
    }
  }
#endif
#endif

  /* Make sure the display we pass down to the buffer pool is actually
     the expected one, especially when the downstream element requires
     a GLX or EGL display */
  if (!gst_vaapi_plugin_base_ensure_display (plugin))
    goto error_ensure_display;

  if (!gst_video_info_from_caps (&vi, caps))
    goto error_invalid_caps;
  gst_video_info_force_nv12_if_encoded (&vi);

  if (gst_query_get_n_allocation_params (query) > 0)
    update_allocator = TRUE;

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update_pool = TRUE;
    size = MAX (size, GST_VIDEO_INFO_SIZE (&vi));
    if (pool) {
      /* Check whether downstream element proposed a bufferpool but did
         not provide a correct propose_allocation() implementation */
      if (gst_buffer_pool_has_option (pool,
              GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT))
        pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT;

      /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */
      if (!gst_buffer_pool_has_option (pool,
              GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
        GST_INFO_OBJECT (plugin, "ignoring non-VAAPI pool: %" GST_PTR_FORMAT,
            pool);
        g_clear_object (&pool);
      }
    }
  } else {
    pool = NULL;
    size = GST_VIDEO_INFO_SIZE (&vi);
    min = max = 0;
  }

  if (!pool) {
    if (!ensure_srcpad_allocator (plugin, &vi, caps))
      goto error;
    pool = gst_vaapi_plugin_base_create_pool (plugin, caps, size, min, max,
        pool_options, plugin->srcpad_allocator);
    if (!pool)
      goto error;
  }

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, plugin->srcpad_allocator,
        NULL);
  else
    gst_query_add_allocation_param (query, plugin->srcpad_allocator, NULL);

  g_clear_object (&plugin->srcpad_buffer_pool);
  plugin->srcpad_buffer_pool = pool;
  return TRUE;

  /* ERRORS */
error_no_caps:
  {
    GST_ERROR_OBJECT (plugin, "no caps specified");
    return FALSE;
  }
error_invalid_caps:
  {
    GST_ERROR_OBJECT (plugin, "invalid caps %" GST_PTR_FORMAT, caps);
    return FALSE;
  }
error_ensure_display:
  {
    GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d",
        plugin->display_type_req);
    return FALSE;
  }
error:
  {
    /* error message already sent */
    return FALSE;
  }
}
/* Based on gstbasetextoverlay.c */
static gboolean
gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps)
{
  gboolean upstream_has_meta = FALSE;
  gboolean caps_has_meta = FALSE;
  gboolean alloc_has_meta = FALSE;
  gboolean attach = FALSE;
  gboolean ret = TRUE;
  guint width, height;
  GstCapsFeatures *f;
  GstCaps *overlay_caps;
  GstQuery *query;
  guint alloc_index;

  GST_DEBUG_OBJECT (self, "performing negotiation");

  /* Clear any pending reconfigure to avoid negotiating twice */
  gst_pad_check_reconfigure (self->srcpad);

  self->window_width = self->window_height = 0;

  if (!caps)
    caps = gst_pad_get_current_caps (self->sinkpad);
  else
    gst_caps_ref (caps);

  if (!caps || gst_caps_is_empty (caps))
    goto no_format;

  /* Check if upstream caps have meta */
  if ((f = gst_caps_get_features (caps, 0))) {
    upstream_has_meta = gst_caps_features_contains (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
  }

  /* Initialize dimensions */
  width = self->info.width;
  height = self->info.height;

  if (upstream_has_meta) {
    overlay_caps = gst_caps_ref (caps);
  } else {
    GstCaps *peercaps;

    /* BaseTransform requires caps for the allocation query to work */
    overlay_caps = gst_caps_copy (caps);
    f = gst_caps_get_features (overlay_caps, 0);
    gst_caps_features_add (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);

    /* Then check if downstream accept overlay composition in caps */
    /* FIXME: We should probably check if downstream *prefers* the
     * overlay meta, and only enforce usage of it if we can't handle
     * the format ourselves and thus would have to drop the overlays.
     * Otherwise we should prefer what downstream wants here.
     */
    peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps);
    caps_has_meta = !gst_caps_is_empty (peercaps);
    gst_caps_unref (peercaps);

    GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta);
  }

  if (upstream_has_meta || caps_has_meta) {
    /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
     * from allocation query */
    ret = gst_pad_set_caps (self->srcpad, overlay_caps);

    /* First check if the allocation meta has compositon */
    query = gst_query_new_allocation (overlay_caps, FALSE);

    if (!gst_pad_peer_query (self->srcpad, query)) {
      /* no problem, we use the query defaults */
      GST_DEBUG_OBJECT (self, "ALLOCATION query failed");

      /* In case we were flushing, mark reconfigure and fail this method,
       * will make it retry */
      if (GST_PAD_IS_FLUSHING (self->srcpad))
        ret = FALSE;
    }

    alloc_has_meta = gst_query_find_allocation_meta (query,
        GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);

    GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta);

    if (alloc_has_meta) {
      const GstStructure *params;

      gst_query_parse_nth_allocation_meta (query, alloc_index, &params);
      if (params) {
        if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
                "height", G_TYPE_UINT, &height, NULL)) {
          GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height);
          g_assert (width != 0 && height != 0);
        }
      }
    }

    gst_query_unref (query);
  }

  /* Update render size if needed */
  self->window_width = width;
  self->window_height = height;

  /* For backward compatbility, we will prefer bliting if downstream
   * allocation does not support the meta. In other case we will prefer
   * attaching, and will fail the negotiation in the unlikely case we are
   * force to blit, but format isn't supported. */

  if (upstream_has_meta) {
    attach = TRUE;
  } else if (caps_has_meta) {
    if (alloc_has_meta) {
      attach = TRUE;
    } else {
      /* Don't attach unless we cannot handle the format */
      attach = !can_blend_caps (caps);
    }
  } else {
    ret = can_blend_caps (caps);
  }

  /* If we attach, then pick the overlay caps */
  if (attach) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps);
    /* Caps where already sent */
  } else if (ret) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps);
    ret = gst_pad_set_caps (self->srcpad, caps);
  }

  self->attach_compo_to_buffer = attach;

  if (!ret) {
    GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure");
    gst_pad_mark_reconfigure (self->srcpad);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0,
      caps, self->window_width, self->window_height, NULL);

  gst_caps_unref (overlay_caps);
  gst_caps_unref (caps);

  return ret;

no_format:
  {
    if (caps)
      gst_caps_unref (caps);
    gst_pad_mark_reconfigure (self->srcpad);
    return FALSE;
  }
}