static gboolean gst_openh264dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstVideoCodecState *state;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;

  if (!GST_VIDEO_DECODER_CLASS (gst_openh264dec_parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  state = gst_video_decoder_get_output_state (decoder);

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }

  gst_buffer_pool_set_config (pool, config);

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_object_unref (pool);
  gst_video_codec_state_unref (state);

  return TRUE;
}
Beispiel #2
0
static gboolean
gst_pngdec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
{
  GstBufferPool *pool = NULL;
  GstStructure *config;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
    return FALSE;

  if (gst_query_get_n_allocation_pools (query) > 0)
    gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);

  if (pool == NULL)
    return FALSE;

  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }
  gst_buffer_pool_set_config (pool, config);
  gst_object_unref (pool);

  return TRUE;
}
Beispiel #3
0
static gboolean gst_imx_v4l2src_decide_allocation(GstBaseSrc *bsrc,
		GstQuery *query)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(bsrc);
	struct v4l2_format fmt;
	GstBufferPool *pool;
	guint size, min, max;
	gboolean update;
	GstStructure *config;
	GstCaps *caps;

	gst_query_parse_allocation(query, &caps, NULL);

	/* Determine min and max */
	if (gst_query_get_n_allocation_pools(query) > 0)
	{
		gst_query_parse_nth_allocation_pool(query, 0, NULL, NULL,
				&min, &max);
		update = TRUE;
	}
	else
	{
		min = max = 0;
		update = FALSE;
	}

	if (min != 0)
		/* Need an extra buffer to capture while other buffers
		 * are downstream */
		min += 1;
	else
		min = v4l2src->queue_size;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	size = fmt.fmt.pix.sizeimage;

	/* no repooling; leads to stream off situation due to pool start/stop */
	pool = gst_base_src_get_buffer_pool(bsrc);
	if (!pool) {
		pool = gst_imx_v4l2_buffer_pool_new(v4l2src->fd_obj_v4l);
		config = gst_buffer_pool_get_config(pool);
		gst_buffer_pool_config_set_params(config, caps, size, min, max);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
		gst_buffer_pool_set_config(pool, config);
	}

	if (update)
		gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
	else
		gst_query_add_allocation_pool(query, pool, size, min, max);

	gst_object_unref(pool);

	return TRUE;
}
static gboolean
gst_gdk_pixbuf_dec_setup_pool (GstGdkPixbufDec * filter, GstVideoInfo * info)
{
  GstCaps *target;
  GstQuery *query;
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  target = gst_pad_get_current_caps (filter->srcpad);

  /* try to get a bufferpool now */
  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (target, TRUE);

  if (!gst_pad_peer_query (filter->srcpad, query)) {
    /* not a problem, we use the query defaults */
    GST_DEBUG_OBJECT (filter, "ALLOCATION query failed");
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  } else {
    pool = NULL;
    size = info->size;
    min = max = 0;
  }

  gst_query_unref (query);

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_buffer_pool_new ();
  }

  /* and configure */
  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, target, size, min, max);
  gst_buffer_pool_set_config (pool, config);

  if (filter->pool) {
    gst_buffer_pool_set_active (filter->pool, FALSE);
    gst_object_unref (filter->pool);
  }
  filter->pool = pool;

  /* and activate */
  gst_buffer_pool_set_active (filter->pool, TRUE);

  gst_caps_unref (target);

  return TRUE;
}
static gboolean
gst_gl_base_mixer_do_bufferpool (GstGLBaseMixer * mix, GstCaps * outcaps)
{
    GstQuery *query;
    gboolean result = TRUE;
    GstBufferPool *pool = NULL;
    GstAllocator *allocator;
    GstAllocationParams params;
    GstAggregator *agg = GST_AGGREGATOR (mix);

    /* find a pool for the negotiated caps now */
    GST_DEBUG_OBJECT (mix, "doing allocation query");
    query = gst_query_new_allocation (outcaps, TRUE);
    if (!gst_pad_peer_query (agg->srcpad, query)) {
        /* not a problem, just debug a little */
        GST_DEBUG_OBJECT (mix, "peer ALLOCATION query failed");
    }

    GST_DEBUG_OBJECT (mix, "calling decide_allocation");
    result = gst_gl_base_mixer_decide_allocation (mix, query);

    GST_DEBUG_OBJECT (mix, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, result,
                      query);

    if (!result)
        goto no_decide_allocation;

    /* we got configuration from our peer or the decide_allocation method,
     * parse them */
    if (gst_query_get_n_allocation_params (query) > 0) {
        gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
    } else {
        allocator = NULL;
        gst_allocation_params_init (&params);
    }

    if (gst_query_get_n_allocation_pools (query) > 0)
        gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);

    /* now store */
    result =
        gst_gl_base_mixer_set_allocation (mix, pool, allocator, &params, query);

    return result;

    /* Errors */
no_decide_allocation:
    {
        GST_WARNING_OBJECT (mix, "Failed to decide allocation");
        gst_query_unref (query);

        return result;
    }
}
Beispiel #6
0
static GstFlowReturn
gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
    GstVideoInfo * info)
{
  GstQuery *query;
  GstBufferPool *pool = NULL;
  guint size, min, max;
  GstStructure *config;

  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (caps, TRUE);

  if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
    /* not a problem, we use the defaults of query */
    GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  } else {
    GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
    size = info->size;
    min = max = 0;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_video_buffer_pool_new ();
  }

  if (depay->pool)
    gst_object_unref (depay->pool);
  depay->pool = pool;

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    /* just set the metadata, if the pool can support it we will transparently use
     * it through the video info API. We could also see if the pool support this
     * metadata and only activate it then. */
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }

  gst_buffer_pool_set_config (pool, config);
  /* and activate */
  gst_buffer_pool_set_active (pool, TRUE);

  gst_query_unref (query);

  return GST_FLOW_OK;
}
Beispiel #7
0
static void
gst_dvdec_negotiate_pool (GstDVDec * dec, GstCaps * caps, GstVideoInfo * info)
{
  GstQuery *query;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;

  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (caps, TRUE);

  if (!gst_pad_peer_query (dec->srcpad, query)) {
    GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints");
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    size = MAX (size, info->size);
  } else {
    pool = NULL;
    size = info->size;
    min = max = 0;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_video_buffer_pool_new ();
  }

  if (dec->pool) {
    gst_buffer_pool_set_active (dec->pool, FALSE);
    gst_object_unref (dec->pool);
  }
  dec->pool = pool;

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    /* just set the option, if the pool can support it we will transparently use
     * it through the video info API. We could also see if the pool support this
     * option and only activate it then. */
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }
  gst_buffer_pool_set_config (pool, config);

  /* and activate */
  gst_buffer_pool_set_active (pool, TRUE);

  gst_query_unref (query);
}
static gboolean
gst_video_rate_propose_allocation (GstBaseTransform * trans,
    GstQuery * decide_query, GstQuery * query)
{
  GstBaseTransformClass *klass = GST_BASE_TRANSFORM_CLASS (parent_class);
  gboolean res;

  /* We should always be passthrough */
  g_return_val_if_fail (decide_query == NULL, FALSE);

  res = klass->propose_allocation (trans, NULL, query);

  if (res) {
    guint i = 0;
    guint n_allocation;
    guint down_min = 0;

    n_allocation = gst_query_get_n_allocation_pools (query);

    while (i < n_allocation) {
      GstBufferPool *pool = NULL;
      guint size, min, max;

      gst_query_parse_nth_allocation_pool (query, i, &pool, &size, &min, &max);

      if (min == max) {
        if (pool)
          gst_object_unref (pool);
        gst_query_remove_nth_allocation_pool (query, i);
        n_allocation--;
        down_min = MAX (min, down_min);
        continue;
      }

      gst_query_set_nth_allocation_pool (query, i, pool, size, min + 1, max);
      if (pool)
        gst_object_unref (pool);
      i++;
    }

    if (n_allocation == 0) {
      GstCaps *caps;
      GstVideoInfo info;

      gst_query_parse_allocation (query, &caps, NULL);
      gst_video_info_from_caps (&info, caps);

      gst_query_add_allocation_pool (query, NULL, info.size, down_min + 1, 0);
    }
  }

  return res;
}
static gboolean
theora_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstTheoraDec *dec = GST_THEORA_DEC (decoder);
  GstVideoCodecState *state;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  state = gst_video_decoder_get_output_state (decoder);

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  dec->can_crop = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    dec->can_crop =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (dec->can_crop) {
    GstVideoInfo info = state->info;
    GstCaps *caps;

    /* Calculate uncropped size */
    gst_video_info_set_format (&info, info.finfo->format, dec->info.frame_width,
        dec->info.frame_height);
    size = MAX (size, info.size);
    caps = gst_video_info_to_caps (&info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_buffer_pool_set_config (pool, config);

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_object_unref (pool);
  gst_video_codec_state_unref (state);

  return TRUE;
}
static gboolean
gst_openni2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
{
  GstBufferPool *pool;
  guint size, min, max;
  gboolean update;
  GstStructure *config;
  GstCaps *caps;
  GstVideoInfo info;

  gst_query_parse_allocation (query, &caps, NULL);
  gst_video_info_from_caps (&info, caps);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update = TRUE;
  } else {
    pool = NULL;
    min = max = 0;
    size = info.size;
    update = FALSE;
  }

  GST_DEBUG_OBJECT (bsrc, "allocation: size:%u min:%u max:%u pool:%"
      GST_PTR_FORMAT " caps:%" GST_PTR_FORMAT, size, min, max, pool, caps);

  if (!pool)
    pool = gst_video_buffer_pool_new ();

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    GST_DEBUG_OBJECT (pool, "activate Video Meta");
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }

  gst_buffer_pool_set_config (pool, config);

  if (update)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
}
static gboolean
gst_video_test_src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
{
  GstVideoTestSrc *videotestsrc;
  GstBufferPool *pool;
  gboolean update;
  guint size, min, max;
  GstStructure *config;

  videotestsrc = GST_VIDEO_TEST_SRC (bsrc);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    /* adjust size */
    size = MAX (size, videotestsrc->info.size);
    update = TRUE;
  } else {
    pool = NULL;
    size = videotestsrc->info.size;
    min = max = 0;
    update = FALSE;
  }

  /* no downstream pool, make our own */
  if (pool == NULL) {
    if (videotestsrc->bayer)
      pool = gst_buffer_pool_new ();
    else
      pool = gst_video_buffer_pool_new ();
  }

  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
  }
  gst_buffer_pool_set_config (pool, config);

  if (update)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  if (pool)
    gst_object_unref (pool);

  return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
}
Beispiel #12
0
static gboolean
gst_mfc_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMFCDec *self = GST_MFC_DEC (decoder);
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  g_assert (gst_query_get_n_allocation_pools (query) > 0);
  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  g_assert (pool != NULL);

  self->has_cropping = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    self->has_cropping =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (self->has_cropping) {
    GstVideoInfo info;
    GstCaps *caps;

    /* Calculate uncropped size */
    gst_buffer_pool_config_get_params (config, &caps, &size, &min, &max);
    gst_video_info_from_caps (&info, caps);
    gst_video_info_set_format (&info, self->format, self->width, self->height);
    size = MAX (size, info.size);
    caps = gst_video_info_to_caps (&info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_buffer_pool_set_config (pool, config);
  gst_object_unref (pool);

  return TRUE;
}
static gboolean
gst_rfb_src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
{
  GstBufferPool *pool = NULL;
  guint size, min = 1, max = 0;
  GstStructure *config;
  GstCaps *caps;
  GstVideoInfo info;
  gboolean ret;

  gst_query_parse_allocation (query, &caps, NULL);

  if (!caps || !gst_video_info_from_caps (&info, caps))
    return FALSE;

  while (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    /* TODO We restrict to the exact size as we don't support strides or
     * special padding */
    if (size == info.size)
      break;

    gst_query_remove_nth_allocation_pool (query, 0);
    gst_object_unref (pool);
    pool = NULL;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_video_buffer_pool_new ();
    size = info.size;
    min = 1;
    max = 0;
    gst_query_add_allocation_pool (query, pool, size, min, max);
  }

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  ret = gst_buffer_pool_set_config (pool, config);
  gst_object_unref (pool);

  return ret;
}
/* configure the allocation query that was answered downstream, we can configure
 * some properties on it. Only called when not in passthrough mode. */
static gboolean
gst_video_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
{
  GstBufferPool *pool = NULL;
  GstStructure *config;
  guint min, max, size;
  gboolean update_pool;
  GstCaps *outcaps = NULL;

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_query_parse_allocation (query, &outcaps, NULL);
    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, outcaps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!pool)
    pool = gst_video_buffer_pool_new ();

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
  if (outcaps)
    gst_buffer_pool_config_set_params (config, outcaps, size, 0, 0);
  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
      query);
}
/**
 * gst_vaapi_plugin_base_decide_allocation:
 * @plugin: a #GstVaapiPluginBase
 * @query: the allocation query to parse
 * @feature: the desired #GstVaapiCapsFeature, or zero to find the
 *   preferred one
 *
 * Decides allocation parameters for the downstream elements.
 *
 * Returns: %TRUE if successful, %FALSE otherwise.
 */
gboolean
gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin,
    GstQuery * query, guint feature)
{
  GstCaps *caps = NULL;
  GstBufferPool *pool;
  GstStructure *config;
  GstVideoInfo vi;
  guint size, min, max;
  gboolean update_pool = FALSE;
  gboolean has_video_meta = FALSE;
  gboolean has_video_alignment = FALSE;
#if (USE_GLX || USE_EGL)
  gboolean has_texture_upload_meta = FALSE;
  guint idx;
#endif

  g_return_val_if_fail (plugin->display != NULL, FALSE);

  gst_query_parse_allocation (query, &caps, NULL);

  /* We don't need any GL context beyond this point if not requested
     so explicitly through GstVideoGLTextureUploadMeta */
  gst_object_replace (&plugin->gl_context, NULL);

  if (!caps)
    goto error_no_caps;

  if (!feature)
    feature =
        gst_vaapi_find_preferred_caps_feature (plugin->srcpad,
        GST_VIDEO_FORMAT_ENCODED, NULL);

  has_video_meta = gst_query_find_allocation_meta (query,
      GST_VIDEO_META_API_TYPE, NULL);

#if (USE_GLX || USE_EGL)
  has_texture_upload_meta = gst_query_find_allocation_meta (query,
      GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) &&
      (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META);

#if USE_GST_GL_HELPERS
  if (has_texture_upload_meta) {
    const GstStructure *params;
    GstObject *gl_context;

    gst_query_parse_nth_allocation_meta (query, idx, &params);
    if (params) {
      if (gst_structure_get (params, "gst.gl.GstGLContext", GST_GL_TYPE_CONTEXT,
              &gl_context, NULL) && gl_context) {
        gst_vaapi_plugin_base_set_gl_context (plugin, gl_context);
        gst_object_unref (gl_context);
      }
    }
  }
#endif
#endif

  /* Make sure the display we pass down to the buffer pool is actually
     the expected one, especially when the downstream element requires
     a GLX or EGL display */
  if (!gst_vaapi_plugin_base_ensure_display (plugin))
    goto error_ensure_display;

  gst_video_info_init (&vi);
  gst_video_info_from_caps (&vi, caps);
  if (GST_VIDEO_INFO_FORMAT (&vi) == GST_VIDEO_FORMAT_ENCODED)
    gst_video_info_set_format (&vi, GST_VIDEO_FORMAT_I420,
        GST_VIDEO_INFO_WIDTH (&vi), GST_VIDEO_INFO_HEIGHT (&vi));

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update_pool = TRUE;
    size = MAX (size, vi.size);
    if (pool) {
      /* Check whether downstream element proposed a bufferpool but did
         not provide a correct propose_allocation() implementation */
      has_video_alignment = gst_buffer_pool_has_option (pool,
          GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
    }
  } else {
    pool = NULL;
    size = vi.size;
    min = max = 0;
  }

  /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */
  if (!pool || !gst_buffer_pool_has_option (pool,
          GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
    GST_INFO_OBJECT (plugin, "%s. Making a new pool", pool == NULL ? "No pool" :
        "Pool hasn't GstVaapiVideoMeta");
    if (pool)
      gst_object_unref (pool);
    pool = gst_vaapi_video_buffer_pool_new (plugin->display);
    if (!pool)
      goto error_create_pool;

    config = gst_buffer_pool_get_config (pool);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META);
    if (!gst_buffer_pool_set_config (pool, config))
      goto config_failed;
  }

  /* Check whether GstVideoMeta, or GstVideoAlignment, is needed (raw video) */
  if (has_video_meta) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_META))
      goto config_failed;
  } else if (has_video_alignment) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT))
      goto config_failed;
  }

  /* GstVideoGLTextureUploadMeta (OpenGL) */
#if (USE_GLX || USE_EGL)
  if (has_texture_upload_meta) {
    if (!gst_vaapi_plugin_base_set_pool_config (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META))
      goto config_failed;
  }
#endif

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  g_clear_object (&plugin->srcpad_buffer_pool);
  plugin->srcpad_buffer_pool = pool;
  return TRUE;

  /* ERRORS */
error_no_caps:
  {
    GST_ERROR_OBJECT (plugin, "no caps specified");
    return FALSE;
  }
error_ensure_display:
  {
    GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d",
        plugin->display_type_req);
    return FALSE;
  }
error_create_pool:
  {
    GST_ERROR_OBJECT (plugin, "failed to create buffer pool");
    return FALSE;
  }
config_failed:
  {
    if (pool)
      gst_object_unref (pool);
    GST_ELEMENT_ERROR (plugin, RESOURCE, SETTINGS,
        ("Failed to configure the buffer pool"),
        ("Configuration is most likely invalid, please report this issue."));
    return FALSE;
  }
}
static gboolean
gst_monoscope_src_negotiate (GstMonoscope * monoscope)
{
  GstCaps *othercaps, *target;
  GstStructure *structure;
  GstCaps *templ;
  GstQuery *query;
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  templ = gst_pad_get_pad_template_caps (monoscope->srcpad);

  GST_DEBUG_OBJECT (monoscope, "performing negotiation");

  /* see what the peer can do */
  othercaps = gst_pad_peer_query_caps (monoscope->srcpad, NULL);
  if (othercaps) {
    target = gst_caps_intersect (othercaps, templ);
    gst_caps_unref (othercaps);
    gst_caps_unref (templ);

    if (gst_caps_is_empty (target))
      goto no_format;

    target = gst_caps_truncate (target);
  } else {
    target = templ;
  }

  target = gst_caps_make_writable (target);
  structure = gst_caps_get_structure (target, 0);
  gst_structure_fixate_field_nearest_int (structure, "width", 320);
  gst_structure_fixate_field_nearest_int (structure, "height", 240);
  gst_structure_fixate_field_nearest_fraction (structure, "framerate", 25, 1);

  gst_monoscope_src_setcaps (monoscope, target);

  /* try to get a bufferpool now */
  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (target, TRUE);

  if (!gst_pad_peer_query (monoscope->srcpad, query)) {
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  } else {
    pool = NULL;
    size = monoscope->outsize;
    min = max = 0;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_buffer_pool_new ();
  }

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, target, size, min, max);
  gst_buffer_pool_set_config (pool, config);

  if (monoscope->pool) {
    gst_buffer_pool_set_active (monoscope->pool, TRUE);
    gst_object_unref (monoscope->pool);
  }
  monoscope->pool = pool;

  /* and activate */
  gst_buffer_pool_set_active (pool, TRUE);

  gst_caps_unref (target);

  return TRUE;

no_format:
  {
    gst_caps_unref (target);
    return FALSE;
  }
}
Beispiel #17
0
static gboolean
gst_gl_mixer_decide_allocation (GstGLMixer * mix, GstQuery * query)
{
  GstGLMixerClass *mixer_class = GST_GL_MIXER_GET_CLASS (mix);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  GError *error = NULL;
  guint idx;
  guint out_width, out_height;
  GstGLContext *other_context = NULL;
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!gst_gl_ensure_display (mix, &mix->display))
    return FALSE;

  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
    GstGLContext *context;
    const GstStructure *upload_meta_params;
    gpointer handle;
    gchar *type;
    gchar *apis;

    gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
    if (upload_meta_params) {
      if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
              GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
        GstGLContext *old = mix->context;

        mix->context = context;
        if (old)
          gst_object_unref (old);
      } else if (gst_structure_get (upload_meta_params, "gst.gl.context.handle",
              G_TYPE_POINTER, &handle, "gst.gl.context.type", G_TYPE_STRING,
              &type, "gst.gl.context.apis", G_TYPE_STRING, &apis, NULL)
          && handle) {
        GstGLPlatform platform = GST_GL_PLATFORM_NONE;
        GstGLAPI gl_apis;

        GST_DEBUG ("got GL context handle 0x%p with type %s and apis %s",
            handle, type, apis);

        platform = gst_gl_platform_from_string (type);
        gl_apis = gst_gl_api_from_string (apis);

        if (gl_apis && platform)
          other_context =
              gst_gl_context_new_wrapped (mix->display, (guintptr) handle,
              platform, gl_apis);
      }
    }
  }

  if (!mix->context) {
    mix->context = gst_gl_context_new (mix->display);
    if (!gst_gl_context_create (mix->context, other_context, &error))
      goto context_error;
  }

  out_width = GST_VIDEO_INFO_WIDTH (&vagg->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&vagg->info);

  g_mutex_lock (&mix->priv->gl_resource_lock);
  mix->priv->gl_resource_ready = FALSE;
  if (mix->fbo) {
    gst_gl_context_del_fbo (mix->context, mix->fbo, mix->depthbuffer);
    mix->fbo = 0;
    mix->depthbuffer = 0;
  }

  if (!gst_gl_context_gen_fbo (mix->context, out_width, out_height,
          &mix->fbo, &mix->depthbuffer)) {
    g_cond_signal (&mix->priv->gl_resource_cond);
    g_mutex_unlock (&mix->priv->gl_resource_lock);
    goto context_error;
  }

  if (mix->out_tex_id)
    gst_gl_context_del_texture (mix->context, &mix->out_tex_id);
  gst_gl_context_gen_texture (mix->context, &mix->out_tex_id,
      GST_VIDEO_FORMAT_RGBA, out_width, out_height);

  if (mixer_class->set_caps)
    mixer_class->set_caps (mix, caps);

  mix->priv->gl_resource_ready = TRUE;
  g_cond_signal (&mix->priv->gl_resource_cond);
  g_mutex_unlock (&mix->priv->gl_resource_lock);

  if (!pool)
    pool = gst_gl_buffer_pool_new (mix->context);

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);

  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return TRUE;

context_error:
  {
    GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    return FALSE;
  }
}
/**
 * gst_vaapi_plugin_base_decide_allocation:
 * @plugin: a #GstVaapiPluginBase
 * @query: the allocation query to parse
 * @feature: the desired #GstVaapiCapsFeature, or zero to find the
 *   preferred one
 *
 * Decides allocation parameters for the downstream elements.
 *
 * Returns: %TRUE if successful, %FALSE otherwise.
 */
gboolean
gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin,
    GstQuery * query)
{
  GstCaps *caps = NULL;
  GstBufferPool *pool;
  GstVideoInfo vi;
  guint size, min, max, pool_options;
  gboolean update_pool = FALSE, update_allocator = FALSE;
#if (USE_GLX || USE_EGL)
  guint idx;
#endif

  gst_query_parse_allocation (query, &caps, NULL);
  if (!caps)
    goto error_no_caps;

  pool_options = 0;
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL))
    pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_META;

#if (USE_GLX || USE_EGL)
  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) &&
      gst_vaapi_caps_feature_contains (caps,
          GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META))
    pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_GL_TEXTURE_UPLOAD;

#if USE_GST_GL_HELPERS
  if (!plugin->gl_context &&
      (pool_options & GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_GL_TEXTURE_UPLOAD)) {
    const GstStructure *params;
    GstObject *gl_context;

    gst_query_parse_nth_allocation_meta (query, idx, &params);
    if (params) {
      if (gst_structure_get (params, "gst.gl.GstGLContext", GST_TYPE_GL_CONTEXT,
              &gl_context, NULL) && gl_context) {
        gst_vaapi_plugin_base_set_gl_context (plugin, gl_context);
        gst_object_unref (gl_context);
      }
    }
  }
#endif
#endif

  /* Make sure the display we pass down to the buffer pool is actually
     the expected one, especially when the downstream element requires
     a GLX or EGL display */
  if (!gst_vaapi_plugin_base_ensure_display (plugin))
    goto error_ensure_display;

  if (!gst_video_info_from_caps (&vi, caps))
    goto error_invalid_caps;
  gst_video_info_force_nv12_if_encoded (&vi);

  if (gst_query_get_n_allocation_params (query) > 0)
    update_allocator = TRUE;

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update_pool = TRUE;
    size = MAX (size, GST_VIDEO_INFO_SIZE (&vi));
    if (pool) {
      /* Check whether downstream element proposed a bufferpool but did
         not provide a correct propose_allocation() implementation */
      if (gst_buffer_pool_has_option (pool,
              GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT))
        pool_options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT;

      /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */
      if (!gst_buffer_pool_has_option (pool,
              GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
        GST_INFO_OBJECT (plugin, "ignoring non-VAAPI pool: %" GST_PTR_FORMAT,
            pool);
        g_clear_object (&pool);
      }
    }
  } else {
    pool = NULL;
    size = GST_VIDEO_INFO_SIZE (&vi);
    min = max = 0;
  }

  if (!pool) {
    if (!ensure_srcpad_allocator (plugin, &vi, caps))
      goto error;
    pool = gst_vaapi_plugin_base_create_pool (plugin, caps, size, min, max,
        pool_options, plugin->srcpad_allocator);
    if (!pool)
      goto error;
  }

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, plugin->srcpad_allocator,
        NULL);
  else
    gst_query_add_allocation_param (query, plugin->srcpad_allocator, NULL);

  g_clear_object (&plugin->srcpad_buffer_pool);
  plugin->srcpad_buffer_pool = pool;
  return TRUE;

  /* ERRORS */
error_no_caps:
  {
    GST_ERROR_OBJECT (plugin, "no caps specified");
    return FALSE;
  }
error_invalid_caps:
  {
    GST_ERROR_OBJECT (plugin, "invalid caps %" GST_PTR_FORMAT, caps);
    return FALSE;
  }
error_ensure_display:
  {
    GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d",
        plugin->display_type_req);
    return FALSE;
  }
error:
  {
    /* error message already sent */
    return FALSE;
  }
}
static GstFlowReturn
gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstY4mDec *y4mdec;
  int n_avail;
  GstFlowReturn flow_ret = GST_FLOW_OK;
#define MAX_HEADER_LENGTH 80
  char header[MAX_HEADER_LENGTH];
  int i;
  int len;

  y4mdec = GST_Y4M_DEC (parent);

  GST_DEBUG_OBJECT (y4mdec, "chain");

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG ("got discont");
    gst_adapter_clear (y4mdec->adapter);
  }

  gst_adapter_push (y4mdec->adapter, buffer);
  n_avail = gst_adapter_available (y4mdec->adapter);

  if (!y4mdec->have_header) {
    gboolean ret;
    GstCaps *caps;
    GstQuery *query;

    if (n_avail < MAX_HEADER_LENGTH)
      return GST_FLOW_OK;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);

    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }

    ret = gst_y4m_dec_parse_header (y4mdec, header);
    if (!ret) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG header"), (NULL));
      return GST_FLOW_ERROR;
    }

    y4mdec->header_size = strlen (header) + 1;
    gst_adapter_flush (y4mdec->adapter, y4mdec->header_size);

    caps = gst_video_info_to_caps (&y4mdec->info);
    ret = gst_pad_set_caps (y4mdec->srcpad, caps);

    query = gst_query_new_allocation (caps, FALSE);
    y4mdec->video_meta = FALSE;

    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, FALSE);
      gst_object_unref (y4mdec->pool);
    }
    y4mdec->pool = NULL;

    if (gst_pad_peer_query (y4mdec->srcpad, query)) {
      y4mdec->video_meta =
          gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);

      /* We only need a pool if we need to do stride conversion for downstream */
      if (!y4mdec->video_meta && memcmp (&y4mdec->info, &y4mdec->out_info,
              sizeof (y4mdec->info)) != 0) {
        GstBufferPool *pool = NULL;
        GstAllocator *allocator = NULL;
        GstAllocationParams params;
        GstStructure *config;
        guint size, min, max;

        if (gst_query_get_n_allocation_params (query) > 0) {
          gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
        } else {
          allocator = NULL;
          gst_allocation_params_init (&params);
        }

        if (gst_query_get_n_allocation_pools (query) > 0) {
          gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min,
              &max);
          size = MAX (size, y4mdec->out_info.size);
        } else {
          pool = NULL;
          size = y4mdec->out_info.size;
          min = max = 0;
        }

        if (pool == NULL) {
          pool = gst_video_buffer_pool_new ();
        }

        config = gst_buffer_pool_get_config (pool);
        gst_buffer_pool_config_set_params (config, caps, size, min, max);
        gst_buffer_pool_config_set_allocator (config, allocator, &params);
        gst_buffer_pool_set_config (pool, config);

        if (allocator)
          gst_object_unref (allocator);

        y4mdec->pool = pool;
      }
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBufferPool *pool;
      GstStructure *config;

      /* No pool, create our own if we need to do stride conversion */
      pool = gst_video_buffer_pool_new ();
      config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, caps, y4mdec->out_info.size, 0,
          0);
      gst_buffer_pool_set_config (pool, config);
      y4mdec->pool = pool;
    }
    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, TRUE);
    }
    gst_query_unref (query);
    gst_caps_unref (caps);
    if (!ret) {
      GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad");
      return GST_FLOW_ERROR;
    }

    y4mdec->have_header = TRUE;
  }

  if (y4mdec->have_new_segment) {
    GstEvent *event;
    GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.start);
    GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.stop);
    GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.time);
    GstSegment seg;

    gst_segment_init (&seg, GST_FORMAT_TIME);
    seg.start = start;
    seg.stop = stop;
    seg.time = time;
    event = gst_event_new_segment (&seg);

    gst_pad_push_event (y4mdec->srcpad, event);
    //gst_event_unref (event);

    y4mdec->have_new_segment = FALSE;
    y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec,
        y4mdec->segment.time);
    GST_DEBUG ("new frame_index %d", y4mdec->frame_index);

  }

  while (1) {
    n_avail = gst_adapter_available (y4mdec->adapter);
    if (n_avail < MAX_HEADER_LENGTH)
      break;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);
    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }
    if (memcmp (header, "FRAME", 5) != 0) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG frame"), (NULL));
      flow_ret = GST_FLOW_ERROR;
      break;
    }

    len = strlen (header);
    if (n_avail < y4mdec->info.size + len + 1) {
      /* not enough data */
      GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT,
          n_avail, y4mdec->info.size + len + 1);
      break;
    }

    gst_adapter_flush (y4mdec->adapter, len + 1);

    buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size);

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index);
    GST_BUFFER_DURATION (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) -
        GST_BUFFER_TIMESTAMP (buffer);

    y4mdec->frame_index++;

    if (y4mdec->video_meta) {
      gst_buffer_add_video_meta_full (buffer, 0, y4mdec->info.finfo->format,
          y4mdec->info.width, y4mdec->info.height, y4mdec->info.finfo->n_planes,
          y4mdec->info.offset, y4mdec->info.stride);
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBuffer *outbuf;
      GstVideoFrame iframe, oframe;
      gint i, j;
      gint w, h, istride, ostride;
      guint8 *src, *dest;

      /* Allocate a new buffer and do stride conversion */
      g_assert (y4mdec->pool != NULL);

      flow_ret = gst_buffer_pool_acquire_buffer (y4mdec->pool, &outbuf, NULL);
      if (flow_ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        break;
      }

      gst_video_frame_map (&iframe, &y4mdec->info, buffer, GST_MAP_READ);
      gst_video_frame_map (&oframe, &y4mdec->out_info, outbuf, GST_MAP_WRITE);

      for (i = 0; i < 3; i++) {
        w = GST_VIDEO_FRAME_COMP_WIDTH (&iframe, i);
        h = GST_VIDEO_FRAME_COMP_HEIGHT (&iframe, i);
        istride = GST_VIDEO_FRAME_COMP_STRIDE (&iframe, i);
        ostride = GST_VIDEO_FRAME_COMP_STRIDE (&oframe, i);
        src = GST_VIDEO_FRAME_COMP_DATA (&iframe, i);
        dest = GST_VIDEO_FRAME_COMP_DATA (&oframe, i);

        for (j = 0; j < h; j++) {
          memcpy (dest, src, w);

          dest += ostride;
          src += istride;
        }
      }

      gst_video_frame_unmap (&iframe);
      gst_video_frame_unmap (&oframe);
      gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      gst_buffer_unref (buffer);
      buffer = outbuf;
    }

    flow_ret = gst_pad_push (y4mdec->srcpad, buffer);
    if (flow_ret != GST_FLOW_OK)
      break;
  }

  GST_DEBUG ("returning %d", flow_ret);

  return flow_ret;
}
static gboolean
gst_gl_test_src_decide_allocation (GstBaseSrc * basesrc, GstQuery * query)
{
  GstGLTestSrc *src = GST_GL_TEST_SRC (basesrc);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  GError *error = NULL;

  if (!gst_gl_ensure_element_data (src, &src->display, &src->other_context))
    return FALSE;

  gst_gl_display_filter_gl_api (src->display, SUPPORTED_GL_APIS);

  _find_local_gl_context (src);

  if (!src->context) {
    GST_OBJECT_LOCK (src->display);
    do {
      if (src->context) {
        gst_object_unref (src->context);
        src->context = NULL;
      }
      /* just get a GL context.  we don't care */
      src->context =
          gst_gl_display_get_gl_context_for_thread (src->display, NULL);
      if (!src->context) {
        if (!gst_gl_display_create_context (src->display, src->other_context,
                &src->context, &error)) {
          GST_OBJECT_UNLOCK (src->display);
          goto context_error;
        }
      }
    } while (!gst_gl_display_add_context (src->display, src->context));
    GST_OBJECT_UNLOCK (src->display);
  }

  if ((gst_gl_context_get_gl_api (src->context) & SUPPORTED_GL_APIS) == 0)
    goto unsupported_gl_api;

  gst_gl_context_thread_add (src->context,
      (GstGLContextThreadFunc) _src_generate_fbo_gl, src);
  if (!src->fbo)
    goto context_error;

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!pool || !GST_IS_GL_BUFFER_POOL (pool)) {
    /* can't use this pool */
    if (pool)
      gst_object_unref (pool);
    pool = gst_gl_buffer_pool_new (src->context);
  }
  config = gst_buffer_pool_get_config (pool);

  gst_buffer_pool_config_set_params (config, caps, size, min, max);
  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
  if (gst_query_find_allocation_meta (query, GST_GL_SYNC_META_API_TYPE, NULL))
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_GL_SYNC_META);
  gst_buffer_pool_config_add_option (config,
      GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META);

  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_gl_test_src_init_shader (src);

  gst_object_unref (pool);

  return TRUE;

unsupported_gl_api:
  {
    GstGLAPI gl_api = gst_gl_context_get_gl_api (src->context);
    gchar *gl_api_str = gst_gl_api_to_string (gl_api);
    gchar *supported_gl_api_str = gst_gl_api_to_string (SUPPORTED_GL_APIS);
    GST_ELEMENT_ERROR (src, RESOURCE, BUSY,
        ("GL API's not compatible context: %s supported: %s", gl_api_str,
            supported_gl_api_str), (NULL));

    g_free (supported_gl_api_str);
    g_free (gl_api_str);
    return FALSE;
  }
context_error:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    g_clear_error (&error);
    if (src->context)
      gst_object_unref (src->context);
    src->context = NULL;
    return FALSE;
  }
}
static gboolean gst_imx_blitter_video_transform_decide_allocation(GstBaseTransform *transform, GstQuery *query)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstCaps *outcaps;
	GstBufferPool *pool = NULL;
	guint size, min = 0, max = 0;
	GstStructure *config;
	GstVideoInfo vinfo;
	gboolean update_pool;

	g_assert(blitter_video_transform->blitter != NULL);

	gst_query_parse_allocation(query, &outcaps, NULL);
	gst_video_info_init(&vinfo);
	gst_video_info_from_caps(&vinfo, outcaps);

	GST_DEBUG_OBJECT(blitter_video_transform, "num allocation pools: %d", gst_query_get_n_allocation_pools(query));

	/* Look for an allocator which can allocate physical memory buffers */
	if (gst_query_get_n_allocation_pools(query) > 0)
	{
		for (guint i = 0; i < gst_query_get_n_allocation_pools(query); ++i)
		{
			gst_query_parse_nth_allocation_pool(query, i, &pool, &size, &min, &max);
			if (gst_buffer_pool_has_option(pool, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM))
				break;
		}

		size = MAX(size, vinfo.size);
		update_pool = TRUE;
	}
	else
	{
		pool = NULL;
		size = vinfo.size;
		min = max = 0;
		update_pool = FALSE;
	}

	/* Either no pool or no pool with the ability to allocate physical memory buffers
	 * has been found -> create a new pool */
	if ((pool == NULL) || !gst_buffer_pool_has_option(pool, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM))
	{
		if (pool == NULL)
			GST_DEBUG_OBJECT(blitter_video_transform, "no pool present; creating new pool");
		else
			GST_DEBUG_OBJECT(blitter_video_transform, "no pool supports physical memory buffers; creating new pool");
		pool = gst_imx_base_blitter_create_bufferpool(blitter_video_transform->blitter, outcaps, size, min, max, NULL, NULL);
	}
	else
	{
		config = gst_buffer_pool_get_config(pool);
		gst_buffer_pool_config_set_params(config, outcaps, size, min, max);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
		gst_buffer_pool_set_config(pool, config);
	}

	GST_DEBUG_OBJECT(
		blitter_video_transform,
		"pool config:  outcaps: %" GST_PTR_FORMAT "  size: %u  min buffers: %u  max buffers: %u",
		(gpointer)outcaps,
		size,
		min,
		max
	);

	if (update_pool)
		gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
	else
		gst_query_add_allocation_pool(query, pool, size, min, max);

	if (pool != NULL)
		gst_object_unref(pool);

	return TRUE;
}
static gboolean
gst_mpeg2dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMpeg2dec *dec = GST_MPEG2DEC (decoder);
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config, *down_config = NULL;
  GstAllocator *allocator;
  GstAllocationParams params;
  gboolean update_allocator;
  gboolean has_videometa = FALSE;
  GstCaps *caps;

  /* Get rid of ancient pool */
  if (dec->downstream_pool) {
    gst_buffer_pool_set_active (dec->downstream_pool, FALSE);
    gst_object_unref (dec->downstream_pool);
    dec->downstream_pool = NULL;
  }

  /* Get negotiated allocation caps */
  gst_query_parse_allocation (query, &caps, NULL);

  /* Set allocation parameters to guarantee 16-byte aligned output buffers */
  if (gst_query_get_n_allocation_params (query) > 0) {
    gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
    update_allocator = TRUE;
  } else {
    allocator = NULL;
    gst_allocation_params_init (&params);
    update_allocator = FALSE;
  }

  params.align = MAX (params.align, 15);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, allocator, &params);
  else
    gst_query_add_allocation_param (query, allocator, &params);

  /* Now chain up to the parent class to guarantee that we can
   * get a buffer pool from the query */
  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query)) {
    if (allocator)
      gst_object_unref (allocator);
    return FALSE;
  }

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    has_videometa = TRUE;
  }

  if (dec->need_alignment) {
    /* If downstream does not support video meta, we will have to copy, keep
     * the downstream pool to avoid double copying */
    if (!has_videometa) {
      dec->downstream_pool = pool;
      pool = NULL;
      down_config = config;
      config = NULL;
      min = 2;
      max = 0;
    }

    /* In case downstream support video meta, but the downstream pool does not
     * have alignment support, discard downstream pool and use video pool */
    else if (!gst_buffer_pool_has_option (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) {
      gst_object_unref (pool);
      pool = NULL;
      gst_structure_free (config);
      config = NULL;
    }

    if (!pool)
      pool = gst_mpeg2dec_create_generic_pool (allocator, &params, caps, size,
          min, max, &config);

    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
    gst_buffer_pool_config_set_video_alignment (config, &dec->valign);
  }

  if (allocator)
    gst_object_unref (allocator);

  /* If we are copying out, we'll need to setup and activate the other pool */
  if (dec->downstream_pool) {
    if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config)) {
      down_config = gst_buffer_pool_get_config (dec->downstream_pool);
      if (!gst_buffer_pool_config_validate_params (down_config, caps, size, min,
              max)) {
        gst_structure_free (down_config);
        goto config_failed;
      }

      if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config))
        goto config_failed;
    }

    if (!gst_buffer_pool_set_active (dec->downstream_pool, TRUE))
      goto activate_failed;
  }

  /* Now configure the pool, if the pool had made some changes, it will
   * return FALSE. Validate the changes ...*/
  if (!gst_buffer_pool_set_config (pool, config)) {
    config = gst_buffer_pool_get_config (pool);

    /* Check basic params */
    if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
      gst_structure_free (config);
      goto config_failed;
    }

    /* If needed, check that resulting alignment is still valid */
    if (dec->need_alignment) {
      GstVideoAlignment valign;

      if (!gst_buffer_pool_config_get_video_alignment (config, &valign)) {
        gst_structure_free (config);
        goto config_failed;
      }

      if (valign.padding_left != 0 || valign.padding_top != 0
          || valign.padding_right < dec->valign.padding_right
          || valign.padding_bottom < dec->valign.padding_bottom) {
        gst_structure_free (config);
        goto config_failed;
      }
    }

    if (!gst_buffer_pool_set_config (pool, config))
      goto config_failed;
  }

  /* For external pools, we need to check strides */
  if (!GST_IS_VIDEO_BUFFER_POOL (pool) && has_videometa) {
    GstBuffer *buffer;
    const GstVideoFormatInfo *finfo;
    GstVideoMeta *vmeta;
    gint uv_stride;

    if (!gst_buffer_pool_set_active (pool, TRUE))
      goto activate_failed;

    if (gst_buffer_pool_acquire_buffer (pool, &buffer, NULL) != GST_FLOW_OK) {
      gst_buffer_pool_set_active (pool, FALSE);
      goto acquire_failed;
    }

    vmeta = gst_buffer_get_video_meta (buffer);
    finfo = gst_video_format_get_info (vmeta->format);

    /* Check that strides are compatible. In this case, we can scale the
     * stride directly since all the pixel strides for the formats we support
     * is 1 */
    uv_stride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, 1, vmeta->stride[0]);
    if (uv_stride != vmeta->stride[1] || uv_stride != vmeta->stride[2]) {
      gst_buffer_pool_set_active (pool, FALSE);
      gst_object_unref (pool);

      pool = gst_mpeg2dec_create_generic_pool (allocator, &params, caps, size,
          min, max, &config);

      if (dec->need_alignment) {
        gst_buffer_pool_config_add_option (config,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
        gst_buffer_pool_config_set_video_alignment (config, &dec->valign);
      }

      /* Generic pool don't fail on _set_config() */
      gst_buffer_pool_set_config (pool, config);
    }

    gst_buffer_unref (buffer);
  }

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  gst_object_unref (pool);

  return TRUE;

config_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to configure buffer pool"),
      ("Configuration is most likely invalid, please report this issue."));
  return FALSE;

activate_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to activate buffer pool"), (NULL));
  return FALSE;

acquire_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to acquire a buffer"), (NULL));
  return FALSE;
}
Beispiel #23
0
static gboolean
gst_mpeg2dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMpeg2dec *dec = GST_MPEG2DEC (decoder);
  GstVideoCodecState *state;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;
  GstAllocator *allocator;
  GstAllocationParams params;
  gboolean update_allocator;

  /* Set allocation parameters to guarantee 16-byte aligned output buffers */
  if (gst_query_get_n_allocation_params (query) > 0) {
    gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
    update_allocator = TRUE;
  } else {
    allocator = NULL;
    gst_allocation_params_init (&params);
    update_allocator = FALSE;
  }

  params.align = MAX (params.align, 15);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, allocator, &params);
  else
    gst_query_add_allocation_param (query, allocator, &params);
  if (allocator)
    gst_object_unref (allocator);

  /* Now chain up to the parent class to guarantee that we can
   * get a buffer pool from the query */
  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  state = gst_video_decoder_get_output_state (decoder);

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  dec->has_cropping = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    dec->has_cropping =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (dec->has_cropping) {
    GstCaps *caps;

    /* Calculate uncropped size */
    size = MAX (size, dec->decoded_info.size);
    caps = gst_video_info_to_caps (&dec->decoded_info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_buffer_pool_set_config (pool, config);

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_object_unref (pool);
  gst_video_codec_state_unref (state);

  return TRUE;
}
Beispiel #24
0
static gboolean
gst_v4l2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
{
  GstV4l2Src *src;
  GstV4l2Object *obj;
  GstBufferPool *pool;
  guint size, min, max;
  gboolean update;

  src = GST_V4L2SRC (bsrc);
  obj = src->v4l2object;

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
    update = TRUE;
  } else {
    pool = NULL;
    min = max = 0;
    size = 0;
    update = FALSE;
  }

  GST_DEBUG_OBJECT (src, "allocation: size:%u min:%u max:%u pool:%"
      GST_PTR_FORMAT, size, min, max, pool);

  if (min != 0) {
    /* if there is a min-buffers suggestion, use it. We add 1 because we need 1
     * buffer extra to capture while the other two buffers are downstream */
    min += 1;
  } else {
    min = 2;
  }

  /* select a pool */
  switch (obj->mode) {
    case GST_V4L2_IO_RW:
      if (pool == NULL) {
        /* no downstream pool, use our own then */
        GST_DEBUG_OBJECT (src,
            "read/write mode: no downstream pool, using our own");
        pool = GST_BUFFER_POOL_CAST (obj->pool);
        size = obj->sizeimage;
      } else {
        /* in READ/WRITE mode, prefer a downstream pool because our own pool
         * doesn't help much, we have to write to it as well */
        GST_DEBUG_OBJECT (src, "read/write mode: using downstream pool");
        /* use the bigest size, when we use our own pool we can't really do any
         * other size than what the hardware gives us but for downstream pools
         * we can try */
        size = MAX (size, obj->sizeimage);
      }
      break;
    case GST_V4L2_IO_MMAP:
    case GST_V4L2_IO_USERPTR:
    case GST_V4L2_IO_DMABUF:
      /* in streaming mode, prefer our own pool */
      pool = GST_BUFFER_POOL_CAST (obj->pool);
      size = obj->sizeimage;
      GST_DEBUG_OBJECT (src,
          "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
      break;
    case GST_V4L2_IO_AUTO:
    default:
      GST_WARNING_OBJECT (src, "unhandled mode");
      break;
  }

  if (pool) {
    GstStructure *config;
    GstCaps *caps;

    config = gst_buffer_pool_get_config (pool);
    gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);

    /* if downstream supports video metadata, add this to the pool config */
    if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
      GST_DEBUG_OBJECT (pool, "activate Video Meta");
      gst_buffer_pool_config_add_option (config,
          GST_BUFFER_POOL_OPTION_VIDEO_META);
    }

    gst_buffer_pool_set_config (pool, config);
  }

  if (update)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
}
Beispiel #25
0
static gboolean
gst_msdkdec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);
  GstBufferPool *pool = NULL;
  GstStructure *pool_config = NULL;
  GstCaps *pool_caps /*, *negotiated_caps */ ;
  guint size, min_buffers, max_buffers;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  /* Get the buffer pool config decided by the base class. The base
     class ensures that there will always be at least a 0th pool in
     the query. */
  gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
  pool_config = gst_buffer_pool_get_config (pool);

  /* Get the caps of pool and increase the min and max buffers by async_depth,
   * we will always have that number of decode operations in-flight */
  gst_buffer_pool_config_get_params (pool_config, &pool_caps, &size,
      &min_buffers, &max_buffers);
  min_buffers += thiz->async_depth;
  if (max_buffers)
    max_buffers += thiz->async_depth;

  /* increase the min_buffers by 1 for smooth display in render pipeline */
  min_buffers += 1;

  /* this will get updated with msdk requirement */
  thiz->min_prealloc_buffers = min_buffers;

  if (_gst_caps_has_feature (pool_caps, GST_CAPS_FEATURE_MEMORY_DMABUF)) {
    GST_INFO_OBJECT (decoder, "This MSDK decoder uses DMABuf memory");
    thiz->use_video_memory = thiz->use_dmabuf = TRUE;
  }

  /* Initialize MSDK decoder before new bufferpool tries to alloc each buffer,
   * which requires information of frame allocation.
   * No effect if already initialized.
   */
  if (!gst_msdkdec_init_decoder (thiz))
    return FALSE;

  /* get the updated min_buffers which account the msdk requirement too */
  min_buffers = thiz->min_prealloc_buffers;

  /* Decoder always use its own pool. So we create a pool if msdk apis
   * previously requested for allocation (do_realloc = TRUE) */
  if (thiz->do_realloc || !thiz->pool) {
    if (thiz->pool)
      gst_object_replace ((GstObject **) & thiz->pool, NULL);
    GST_INFO_OBJECT (decoder, "create new MSDK bufferpool");
    thiz->pool =
        gst_msdkdec_create_buffer_pool (thiz, &thiz->output_info, min_buffers);
    if (!thiz->pool)
      goto failed_to_create_pool;
  }

  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)
      && gst_buffer_pool_has_option (pool,
          GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) {
    GstStructure *config;
    GstAllocator *allocator;

    /* If downstream supports video meta and video alignment,
     * we can replace our own msdk bufferpool and use it
     */
    /* Remove downstream's pool */
    gst_structure_free (pool_config);
    gst_object_unref (pool);

    pool = gst_object_ref (thiz->pool);

    /* Set the allocator of new msdk bufferpool */
    config = gst_buffer_pool_get_config (GST_BUFFER_POOL_CAST (pool));

    if (gst_buffer_pool_config_get_allocator (config, &allocator, NULL))
      gst_query_set_nth_allocation_param (query, 0, allocator, NULL);
    gst_structure_free (config);
  } else {
    /* Unfortunately, dowstream doesn't have videometa or alignment support,
     * we keep msdk pool as a side-pool that will be decoded into and
     * then copied from.
     */
    GST_INFO_OBJECT (decoder, "Keep MSDK bufferpool as a side-pool");

    /* Update params to downstream's pool */
    gst_buffer_pool_config_set_params (pool_config, pool_caps, size,
        min_buffers, max_buffers);
    if (!gst_buffer_pool_set_config (pool, pool_config))
      goto error_set_config;
    gst_video_info_from_caps (&thiz->non_msdk_pool_info, pool_caps);

    /* update width and height with actual negotiated values */
    GST_VIDEO_INFO_WIDTH (&thiz->non_msdk_pool_info) =
        GST_VIDEO_INFO_WIDTH (&thiz->output_info);
    GST_VIDEO_INFO_HEIGHT (&thiz->non_msdk_pool_info) =
        GST_VIDEO_INFO_HEIGHT (&thiz->output_info);
  }

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min_buffers,
      max_buffers);

  if (pool)
    gst_object_unref (pool);


  return TRUE;

failed_to_create_pool:
  GST_ERROR_OBJECT (decoder, "failed to set buffer pool config");
  if (pool)
    gst_object_unref (pool);
  return FALSE;

error_set_config:
  GST_ERROR_OBJECT (decoder, "failed to set buffer pool config");
  if (pool)
    gst_object_unref (pool);
  return FALSE;
}
Beispiel #26
0
static gboolean
gst_goom_src_negotiate (GstGoom * goom)
{
  GstCaps *othercaps, *target;
  GstStructure *structure;
  GstCaps *templ;
  GstQuery *query;
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  templ = gst_pad_get_pad_template_caps (goom->srcpad);

  GST_DEBUG_OBJECT (goom, "performing negotiation");

  /* see what the peer can do */
  othercaps = gst_pad_peer_query_caps (goom->srcpad, NULL);
  if (othercaps) {
    target = gst_caps_intersect (othercaps, templ);
    gst_caps_unref (othercaps);
    gst_caps_unref (templ);

    if (gst_caps_is_empty (target))
      goto no_format;

    target = gst_caps_truncate (target);
  } else {
    target = templ;
  }

  structure = gst_caps_get_structure (target, 0);
  gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
  gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
  gst_structure_fixate_field_nearest_fraction (structure, "framerate",
      DEFAULT_FPS_N, DEFAULT_FPS_D);

  gst_goom_src_setcaps (goom, target);

  /* try to get a bufferpool now */
  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (target, TRUE);

  if (!gst_pad_peer_query (goom->srcpad, query)) {
    /* no problem, we use the query defaults */
    GST_DEBUG_OBJECT (goom, "ALLOCATION query failed");
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  } else {
    pool = NULL;
    size = goom->outsize;
    min = max = 0;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_buffer_pool_new ();
  }

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, target, size, min, max);
  gst_buffer_pool_set_config (pool, config);

  if (goom->pool) {
    gst_buffer_pool_set_active (goom->pool, FALSE);
    gst_object_unref (goom->pool);
  }
  goom->pool = pool;

  /* and activate */
  gst_buffer_pool_set_active (pool, TRUE);

  gst_caps_unref (target);

  return TRUE;

no_format:
  {
    gst_caps_unref (target);
    return FALSE;
  }
}
Beispiel #27
0
static gboolean
gst_gl_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
{
  GstGLFilter *filter = GST_GL_FILTER (trans);
  GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (filter);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  guint idx;
  GError *error = NULL;
  guint in_width, in_height, out_width, out_height;

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!gst_gl_ensure_display (filter, &filter->display))
    return FALSE;

  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
    GstGLContext *context;
    const GstStructure *upload_meta_params;

    gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
    if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
            GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
      GstGLContext *old = filter->context;

      filter->context = context;
      if (old)
        gst_object_unref (old);
    }
  }

  if (!filter->context) {
    filter->context = gst_gl_context_new (filter->display);
    if (!gst_gl_context_create (filter->context, filter->other_context, &error))
      goto context_error;
  }

  in_width = GST_VIDEO_INFO_WIDTH (&filter->in_info);
  in_height = GST_VIDEO_INFO_HEIGHT (&filter->in_info);
  out_width = GST_VIDEO_INFO_WIDTH (&filter->out_info);
  out_height = GST_VIDEO_INFO_HEIGHT (&filter->out_info);

  if (!filter->upload) {
    filter->upload = gst_gl_upload_new (filter->context);
    gst_gl_upload_init_format (filter->upload, filter->in_info,
        filter->out_info);
  }
  //blocking call, generate a FBO
  if (!gst_gl_context_gen_fbo (filter->context, out_width, out_height,
          &filter->fbo, &filter->depthbuffer))
    goto context_error;

  gst_gl_context_gen_texture (filter->context, &filter->in_tex_id,
      GST_VIDEO_FORMAT_RGBA, in_width, in_height);

  gst_gl_context_gen_texture (filter->context, &filter->out_tex_id,
      GST_VIDEO_FORMAT_RGBA, out_width, out_height);

  if (filter_class->display_init_cb != NULL) {
    gst_gl_context_thread_add (filter->context, gst_gl_filter_start_gl, filter);
  }

  if (filter_class->onInitFBO) {
    if (!filter_class->onInitFBO (filter))
      goto error;
  }

  if (!pool)
    pool = gst_gl_buffer_pool_new (filter->context);

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);
  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return TRUE;

context_error:
  {
    GST_ELEMENT_ERROR (trans, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    return FALSE;
  }
error:
  {
    GST_ELEMENT_ERROR (trans, LIBRARY, INIT,
        ("Subclass failed to initialize."), (NULL));
    return FALSE;
  }
}