コード例 #1
0
/**
 * gst_vaapi_profile_get_caps:
 * @profile: a #GstVaapiProfile
 *
 * Converts a #GstVaapiProfile into the corresponding #GstCaps. If no
 * matching caps were found, %NULL is returned.
 *
 * Return value: the newly allocated #GstCaps, or %NULL if none was found
 */
GstCaps *
gst_vaapi_profile_get_caps(GstVaapiProfile profile)
{
    const GstVaapiProfileMap *m;
    GstCaps *out_caps, *caps;

    out_caps = gst_caps_new_empty();
    if (!out_caps)
        return NULL;

    for (m = gst_vaapi_profiles; m->profile; m++) {
        if (m->profile != profile)
            continue;
        caps = gst_caps_from_string(m->media_str);
        if (!caps)
            continue;
        gst_caps_set_simple(
            caps,
            "profile", G_TYPE_STRING, m->profile_str,
            NULL
        );
        out_caps = gst_caps_merge(out_caps, caps);
    }
    return out_caps;
}
コード例 #2
0
static GstCaps *
gst_visual_gl_getcaps (GstPad * pad)
{
  GstCaps *ret;
  GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad));
  int depths;

  if (!visual->actor) {
    ret = gst_caps_copy (gst_pad_get_pad_template_caps (visual->srcpad));
    goto beach;
  }

  ret = gst_caps_new_empty ();
  depths = visual_actor_get_supported_depth (visual->actor);
  if (depths < 0) {
    /* FIXME: set an error */
    goto beach;
  }
  if ((depths & VISUAL_VIDEO_DEPTH_GL) == 0) {
    /* We don't handle non-GL plugins */
    goto beach;
  }

  GST_DEBUG_OBJECT (visual, "libvisual-gl plugin supports depths %u (0x%04x)",
      depths, depths);
  /* only do GL output */
  gst_caps_append (ret, gst_caps_from_string (GST_GL_VIDEO_CAPS));

beach:

  GST_DEBUG_OBJECT (visual, "returning caps %" GST_PTR_FORMAT, ret);
  gst_object_unref (visual);
  return ret;
}
コード例 #3
0
ファイル: gstdspvpp.c プロジェクト: kpykc/ardrone2_gstreamer
static gboolean
sink_setcaps(GstPad *pad,
	     GstCaps *caps)
{
	GstDspVpp *self;
	GstDspBase *base;
	GstCaps *out_caps;
	gboolean ret;

	self = GST_DSP_VPP(GST_PAD_PARENT(pad));
	base = GST_DSP_BASE(self);

	{
		gchar *str = gst_caps_to_string(caps);
		pr_info(self, "sink caps: %s", str);
		g_free(str);
	}

	base->codec = &td_vpp_codec;

	du_port_alloc_buffers(base->ports[0], 4);
	du_port_alloc_buffers(base->ports[1], 4);

	out_caps = gst_caps_new_empty();
	configure_caps(self, caps, out_caps);
	base->tmp_caps = out_caps;

	ret = gst_pad_set_caps(pad, caps);

	if (!ret)
		return FALSE;

	return TRUE;
}
コード例 #4
0
ファイル: gstav_h263enc.c プロジェクト: ceyusa/gst-av
static GstCaps *
generate_sink_template(void)
{
	GstCaps *caps, *templ;
	struct size {
		int width;
		int height;
	} sizes[] = {
		{ 352, 288 },
		{ 704, 576 },
		{ 176, 144 },
		{ 128, 96 },
	};

	caps = gst_caps_new_empty();
	templ = gst_caps_new_simple("video/x-raw-yuv",
			"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I', '4', '2', '0'),
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 12, 11,
			NULL);

	for (unsigned i = 0; i < ARRAY_SIZE(sizes); i++) {
		GstCaps *tmp;
		tmp = gst_caps_copy(templ);
		gst_caps_set_simple(tmp,
				"width", G_TYPE_INT, sizes[i].width,
				"height", G_TYPE_INT, sizes[i].height,
				NULL);
		gst_caps_append(caps, tmp);
	}

	gst_caps_unref(templ);

	return caps;
}
コード例 #5
0
/*
 * cheese_camera_device_filter_caps:
 * @device: the #CheeseCameraDevice
 * @caps: the #GstCaps that the device supports
 * @formats: an array of strings of video formats, in the form axb, where a and
 * b are in units of pixels
 *
 * Filter the supplied @caps with %CHEESE_MAXIMUM_RATE to only allow @formats
 * which can reach the desired framerate.
 *
 * Returns: the filtered #GstCaps
 */
static GstCaps *
cheese_camera_device_filter_caps (CheeseCameraDevice *device,
                                  GstCaps *caps,
                                  const gchar const *formats[])
{
  GstCaps *filter;
  GstCaps *allowed;
  gsize i;

  filter = gst_caps_new_empty ();

  for (i = 0; formats[i] != NULL; i++)
  {
    gst_caps_append (filter,
                     gst_caps_new_simple (formats[i],
                                          "framerate", GST_TYPE_FRACTION_RANGE,
                                          0, 1, CHEESE_MAXIMUM_RATE, 1,
                                          NULL));
  }

  allowed = gst_caps_intersect (caps, filter);

  GST_DEBUG ("Supported caps %" GST_PTR_FORMAT, caps);
  GST_DEBUG ("Filter caps %" GST_PTR_FORMAT, filter);
  GST_DEBUG ("Filtered caps %" GST_PTR_FORMAT, allowed);

  gst_caps_unref (filter);

  return allowed;
}
コード例 #6
0
static GstCaps *
gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter)
{
  GstV4l2Src *v4l2src;
  GstV4l2Object *obj;
  GstCaps *ret;
  GSList *walk;
  GSList *formats;

  v4l2src = GST_V4L2SRC (src);
  obj = v4l2src->v4l2object;

  if (!GST_V4L2_IS_OPEN (obj)) {
    return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (v4l2src));
  }

  if (v4l2src->probed_caps)
    return gst_caps_ref (v4l2src->probed_caps);

  formats = gst_v4l2_object_get_format_list (obj);

  ret = gst_caps_new_empty ();

  for (walk = formats; walk; walk = walk->next) {
    struct v4l2_fmtdesc *format;
    GstStructure *template;
コード例 #7
0
static GstCaps *
gst_fdkaacenc_get_caps (GstAudioEncoder * enc, GstCaps * filter)
{
  GstCaps *res, *caps;
  gint i;

  caps = gst_caps_new_empty ();

  for (i = 0; i < G_N_ELEMENTS (channel_layouts); i++) {
    guint64 channel_mask;
    GstCaps *tmp =
        gst_caps_make_writable (gst_pad_get_pad_template_caps
        (GST_AUDIO_ENCODER_SINK_PAD (enc)));

    if (channel_layouts[i].channels == 1) {
      gst_caps_set_simple (tmp, "channels", G_TYPE_INT,
          channel_layouts[i].channels, NULL);
    } else {
      gst_audio_channel_positions_to_mask (channel_layouts[i].positions,
          channel_layouts[i].channels, FALSE, &channel_mask);
      gst_caps_set_simple (tmp, "channels", G_TYPE_INT,
          channel_layouts[i].channels, "channel-mask", GST_TYPE_BITMASK,
          channel_mask, NULL);
    }

    gst_caps_append (caps, tmp);
  }

  res = gst_audio_encoder_proxy_getcaps (enc, caps, filter);
  gst_caps_unref (caps);

  return res;
}
コード例 #8
0
ファイル: gstyuvtorgb.cpp プロジェクト: ksb2go/gst-plugins
/* copies the given caps */
static GstCaps *
gst_yuv_to_rgb_caps_remove_format_info (GstCaps * caps)
{
  GstStructure *st;
  gint i, n;
  GstCaps *res;

  res = gst_caps_new_empty ();

  n = gst_caps_get_size (caps);
  for (i = 0; i < n; i++) {
    st = gst_caps_get_structure (caps, i);

    /* If this is already expressed by the existing caps
     * skip this structure */
    if (i > 0 && gst_caps_is_subset_structure (res, st))
      continue;

    st = gst_structure_copy (st);
    gst_structure_remove_fields (st, "format",
        "colorimetry", "chroma-site", NULL);

    gst_caps_append_structure (res, st);
  }

  return res;
}
コード例 #9
0
ファイル: gstglmixer.c プロジェクト: ego5710/gst-plugins-bad
/* copies the given caps */
static GstCaps *
gst_gl_mixer_caps_remove_format_info (GstCaps * caps)
{
    GstStructure *st;
    GstCapsFeatures *f;
    gint i, n;
    GstCaps *res;

    res = gst_caps_new_empty ();

    n = gst_caps_get_size (caps);
    for (i = 0; i < n; i++) {
        st = gst_caps_get_structure (caps, i);
        f = gst_caps_get_features (caps, i);

        /* If this is already expressed by the existing caps
         * skip this structure */
        if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
            continue;

        st = gst_structure_copy (st);
        /* Only remove format info for the cases when we can actually convert */
        if (!gst_caps_features_is_any (f)
                && gst_caps_features_is_equal (f,
                                               GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY))
            gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
                                         NULL);
        gst_structure_remove_fields (st, "width", "height", NULL);

        gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
    }

    return res;
}
コード例 #10
0
static GstCaps *
gst_vaapidecode_sink_getcaps (GstVideoDecoder * vdec, GstCaps * filter)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstCaps *result;

  if (decode->allowed_sinkpad_caps)
    goto bail;

  /* if we haven't a display yet, return our pad's template caps */
  if (!GST_VAAPI_PLUGIN_BASE_DISPLAY (decode))
    goto bail;

  /* if the allowed caps calculation fails, return an empty caps, so
   * the auto-plug can try other decoder */
  if (!gst_vaapidecode_ensure_allowed_sinkpad_caps (decode))
    return gst_caps_new_empty ();

bail:
  result = gst_video_decoder_proxy_getcaps (vdec, decode->allowed_sinkpad_caps,
      filter);

  GST_DEBUG_OBJECT (decode, "Returning sink caps %" GST_PTR_FORMAT, result);

  return result;
}
コード例 #11
0
GstCaps *
gst_opencv_caps_from_cv_image_type (int cv_type)
{
  GstCaps *c = gst_caps_new_empty ();
  switch (cv_type) {
    case CV_8UC1:
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY8")));
      break;
    case CV_8UC3:
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGB")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR")));
      break;
    case CV_8UC4:
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGBx")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("xRGB")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRx")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("xBGR")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGBA")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("ARGB")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRA")));
      gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("ABGR")));
      break;
    case CV_16UC1:
      gst_caps_append (c,
          gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY16_LE")));
      gst_caps_append (c,
          gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY16_BE")));
      break;
  }
  return c;
}
コード例 #12
0
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
    GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
    GstCaps *capslist;
    gint i;
    guint32 formats[] = {
        GST_MAKE_FOURCC ('I', '4', '2', '0'),
        GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
        GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
        GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
        GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
    };

    /* make a list of all available caps */
    capslist = gst_caps_new_empty ();
    for (i = 0; i < G_N_ELEMENTS (formats); i++) {
        gst_caps_append_structure (capslist,
                                   gst_structure_new ("video/x-raw-yuv",
                                           "format", GST_TYPE_FOURCC, formats[i],
                                           "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                           "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
    }

    sink_template = gst_pad_template_new ("sink",
                                          GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

    gst_element_class_add_pad_template (element_class, sink_template);
    gst_element_class_set_static_metadata (element_class, "SDL video sink",
                                           "Sink/Video", "An SDL-based videosink",
                                           "Ronald Bultje <*****@*****.**>, "
                                           "Edgard Lima <*****@*****.**>, "
                                           "Jan Schmidt <*****@*****.**>");
}
コード例 #13
0
static GstCaps *
gst_v4l2src_get_all_caps (void)
{
  static GstCaps *caps = NULL;

  if (caps == NULL) {
    GstStructure *structure;

    guint i;

    caps = gst_caps_new_empty ();
    for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
      structure = gst_v4l2src_v4l2fourcc_to_structure (gst_v4l2_formats[i]);
      if (structure) {
        gst_structure_set (structure,
            "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
            "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
        gst_caps_append_structure (caps, structure);
      }
    }
  }

  return caps;
}
コード例 #14
0
static GstCaps *
gst_v4l2sink_get_caps (GstBaseSink * bsink)
{
  GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
  GstCaps *ret;
  GSList *walk;
  GSList *formats;

  if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {
    /* FIXME: copy? */
    GST_DEBUG_OBJECT (v4l2sink, "device is not open");
    return gst_caps_copy (gst_pad_get_pad_template_caps (
        GST_BASE_SINK_PAD (v4l2sink)));
  }

  if (v4l2sink->probed_caps) {
    LOG_CAPS (v4l2sink, v4l2sink->probed_caps);
    return gst_caps_ref (v4l2sink->probed_caps);
  }

  formats = gst_v4l2_object_get_format_list (v4l2sink->v4l2object);

  ret = gst_caps_new_empty ();

  for (walk = v4l2sink->v4l2object->formats; walk; walk = walk->next) {
    struct v4l2_fmtdesc *format;

    GstStructure *template;
コード例 #15
0
ファイル: gstv4l2src.c プロジェクト: pli3/gst-plugins-good
static GstCaps *
gst_v4l2src_get_caps (GstBaseSrc * src)
{
  GstV4l2Src *v4l2src = GST_V4L2SRC (src);
  GstCaps *ret;
  GSList *walk;
  GSList *formats;

  if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
    /* FIXME: copy? */
    return
        gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
            (v4l2src)));
  }

  if (v4l2src->probed_caps)
    return gst_caps_ref (v4l2src->probed_caps);

  formats = gst_v4l2_object_get_format_list (v4l2src->v4l2object);

  ret = gst_caps_new_empty ();

  for (walk = formats; walk; walk = walk->next) {
    struct v4l2_fmtdesc *format;

    GstStructure *template;
コード例 #16
0
static GstCaps *
gst_gdk_pixbuf_get_capslist (void)
{
    GSList *slist;
    GSList *slist0;
    GstCaps *capslist = NULL;
    GstCaps *return_caps = NULL;

    capslist = gst_caps_new_empty ();
    slist0 = gdk_pixbuf_get_formats ();

    for (slist = slist0; slist; slist = g_slist_next (slist)) {
        GdkPixbufFormat *pixbuf_format;
        char **mimetypes;
        char **mimetype;

        pixbuf_format = slist->data;
        mimetypes = gdk_pixbuf_format_get_mime_types (pixbuf_format);

        for (mimetype = mimetypes; *mimetype; mimetype++) {
            gst_caps_append_structure (capslist, gst_structure_new (*mimetype, NULL));
        }
        g_strfreev (mimetypes);
    }
    g_slist_free (slist0);

    return_caps = gst_caps_intersect (capslist,
                                      gst_static_caps_get (&gst_gdk_pixbuf_sink_template.static_caps));

    gst_caps_unref (capslist);
    return return_caps;
}
コード例 #17
0
ファイル: gstglmixer.c プロジェクト: ego5710/gst-plugins-bad
GstCaps *
gst_gl_mixer_update_caps (GstGLMixer * mix, GstCaps * caps)
{
    GstCaps *result = NULL;
    GstCaps *glcaps = gst_gl_mixer_set_caps_features (caps,
                      GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
#if GST_GL_HAVE_PLATFORM_EGL
    GstCaps *eglcaps = gst_gl_mixer_set_caps_features (caps,
                       GST_CAPS_FEATURE_MEMORY_EGL_IMAGE);
#endif
    GstCaps *uploadcaps = gst_gl_mixer_set_caps_features (caps,
                          GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META);
    GstCaps *raw_caps =
        gst_caps_from_string (GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS));

    result = gst_caps_new_empty ();

    result = gst_caps_merge (result, glcaps);
#if GST_GL_HAVE_PLATFORM_EGL
    result = gst_caps_merge (result, eglcaps);
#endif
    result = gst_caps_merge (result, uploadcaps);
    result = gst_caps_merge (result, raw_caps);

    result = gst_caps_merge (result, gst_gl_mixer_caps_remove_format_info (caps));

    GST_DEBUG_OBJECT (mix, "returning %" GST_PTR_FORMAT, result);

    return result;
}
コード例 #18
0
GstCaps *
gst_opencv_caps_from_cv_image_type (int cv_type)
{
    GstCaps *caps = gst_caps_new_empty ();
    switch (cv_type) {
    case CV_8UC1:
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_GRAY8));
        break;
    case CV_8UC3:
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGB));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGR));
        break;
    case CV_8UC4:
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGBx));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_xRGB));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGRx));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_xBGR));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGBA));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_ARGB));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGRA));
        gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_ABGR));
        break;
    case CV_16UC1:
        gst_caps_append (caps,
                         gst_caps_from_string (GST_VIDEO_CAPS_GRAY16 ("1234")));
        gst_caps_append (caps,
                         gst_caps_from_string (GST_VIDEO_CAPS_GRAY16 ("4321")));
        break;
    }
    return caps;
}
コード例 #19
0
/**
 * gst_dvbsub_overlay_intersect_by_feature:
 *
 * Creates a new #GstCaps based on the following filtering rule.
 *
 * For each individual caps contained in given caps, if the
 * caps uses the given caps feature, keep a version of the caps
 * with the feature and an another one without. Otherwise, intersect
 * the caps with the given filter.
 *
 * Returns: the new #GstCaps
 */
static GstCaps *
gst_dvbsub_overlay_intersect_by_feature (GstCaps * caps,
    const gchar * feature, GstCaps * filter)
{
  int i, caps_size;
  GstCaps *new_caps;

  new_caps = gst_caps_new_empty ();

  caps_size = gst_caps_get_size (caps);
  for (i = 0; i < caps_size; i++) {
    GstStructure *caps_structure = gst_caps_get_structure (caps, i);
    GstCapsFeatures *caps_features =
        gst_caps_features_copy (gst_caps_get_features (caps, i));
    GstCaps *filtered_caps;
    GstCaps *simple_caps =
        gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
    gst_caps_set_features (simple_caps, 0, caps_features);

    if (gst_caps_features_contains (caps_features, feature)) {
      gst_caps_append (new_caps, gst_caps_copy (simple_caps));

      gst_caps_features_remove (caps_features, feature);
      filtered_caps = gst_caps_ref (simple_caps);
    } else {
      filtered_caps = gst_caps_intersect_full (simple_caps, filter,
          GST_CAPS_INTERSECT_FIRST);
    }

    gst_caps_unref (simple_caps);
    gst_caps_append (new_caps, filtered_caps);
  }

  return new_caps;
}
コード例 #20
0
ファイル: gstalsa.c プロジェクト: pli3/gst-plugins-base
static GstCaps *
gst_alsa_detect_formats (GstObject * obj, snd_pcm_hw_params_t * hw_params,
    GstCaps * in_caps)
{
  snd_pcm_format_mask_t *mask;
  GstStructure *s;
  GstCaps *caps;
  gint i;

  snd_pcm_format_mask_malloc (&mask);
  snd_pcm_hw_params_get_format_mask (hw_params, mask);

  caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (in_caps); ++i) {
    GstStructure *scopy;
    gint w, width = 0, depth = 0;

    s = gst_caps_get_structure (in_caps, i);
    if (!gst_structure_has_name (s, "audio/x-raw-int")) {
      GST_DEBUG_OBJECT (obj, "skipping non-int format");
      continue;
    }
    if (!gst_structure_get_int (s, "width", &width) ||
        !gst_structure_get_int (s, "depth", &depth))
      continue;
    if (width == 0 || (width % 8) != 0)
      continue;                 /* Only full byte widths are valid */
    for (w = 0; w < G_N_ELEMENTS (pcmformats); w++)
      if (pcmformats[w].width == width && pcmformats[w].depth == depth)
        break;
    if (w == G_N_ELEMENTS (pcmformats))
      continue;                 /* Unknown format */

    if (snd_pcm_format_mask_test (mask, pcmformats[w].sformat) &&
        snd_pcm_format_mask_test (mask, pcmformats[w].uformat)) {
      /* template contains { true, false } or just one, leave it as it is */
      scopy = gst_structure_copy (s);
    } else if (snd_pcm_format_mask_test (mask, pcmformats[w].sformat)) {
      scopy = gst_structure_copy (s);
      gst_structure_set (scopy, "signed", G_TYPE_BOOLEAN, TRUE, NULL);
    } else if (snd_pcm_format_mask_test (mask, pcmformats[w].uformat)) {
      scopy = gst_structure_copy (s);
      gst_structure_set (scopy, "signed", G_TYPE_BOOLEAN, FALSE, NULL);
    } else {
      scopy = NULL;
    }
    if (scopy) {
      if (width > 8) {
        /* TODO: proper endianness detection, for now it's CPU endianness only */
        gst_structure_set (scopy, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
      }
      gst_caps_append_structure (caps, scopy);
    }
  }

  snd_pcm_format_mask_free (mask);
  gst_caps_unref (in_caps);
  return caps;
}
コード例 #21
0
ファイル: sdlvideosink.c プロジェクト: JJCG/gst-plugins-bad
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  gint i;
  guint32 formats[] = {
    GST_MAKE_FOURCC ('I', '4', '2', '0'),
    GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
    GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
        /*
           GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
           GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
         */
  };

  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
        gst_structure_new ("video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, formats[i],
            "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
  }

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_set_details (element_class, &gst_sdlvideosink_details);

}
コード例 #22
0
static GstCaps *
generate_sink_template (void)
{
  GstCaps *caps;
  GstStructure *struc;

  caps = gst_caps_new_empty ();

  struc = gst_structure_new ("audio/x-iLBC", NULL);

  {
    GValue list;
    GValue val;

    list.g_type = val.g_type = 0;

    g_value_init (&list, GST_TYPE_LIST);
    g_value_init (&val, G_TYPE_INT);

    g_value_set_int (&val, 20);
    gst_value_list_append_value (&list, &val);

    g_value_set_int (&val, 30);
    gst_value_list_append_value (&list, &val);

    gst_structure_set_value (struc, "mode", &list);

    g_value_unset (&val);
    g_value_unset (&list);
  }

  gst_caps_append_structure (caps, struc);

  return caps;
}
コード例 #23
0
ファイル: gstvaapidecode.c プロジェクト: cbetz421/gst-vaapi
static gboolean
gst_vaapidecode_ensure_allowed_caps(GstVaapiDecode *decode)
{
    GstCaps *decode_caps;
    guint i, n_decode_caps;

    if (decode->allowed_caps)
        return TRUE;

    if (!gst_vaapidecode_ensure_display(decode))
        goto error_no_display;

    decode_caps = gst_vaapi_display_get_decode_caps(decode->display);
    if (!decode_caps)
        goto error_no_decode_caps;
    n_decode_caps = gst_caps_get_size(decode_caps);

    decode->allowed_caps = gst_caps_new_empty();
    if (!decode->allowed_caps)
        goto error_no_memory;

    for (i = 0; i < n_decode_caps; i++) {
        GstStructure *structure;
        structure = gst_caps_get_structure(decode_caps, i);
        if (!structure)
            continue;
        structure = gst_structure_copy(structure);
        if (!structure)
            continue;
        gst_structure_remove_field(structure, "profile");
        gst_structure_set(
            structure,
            "width",  GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            NULL
        );
        gst_caps_merge_structure(decode->allowed_caps, structure);
    }

    gst_caps_unref(decode_caps);
    return TRUE;

    /* ERRORS */
error_no_display:
    {
        GST_DEBUG("failed to retrieve VA display");
        return FALSE;
    }
error_no_decode_caps:
    {
        GST_DEBUG("failed to retrieve VA decode caps");
        return FALSE;
    }
error_no_memory:
    {
        GST_DEBUG("failed to allocate allowed-caps set");
        gst_caps_unref(decode_caps);
        return FALSE;
    }
}
コード例 #24
0
static GstCaps *
theora_enc_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
{
    GstCaps *caps, *ret;
    char *supported_formats, *caps_string;

    supported_formats = theora_enc_get_supported_formats ();
    if (!supported_formats) {
        GST_WARNING ("no supported formats found. Encoder disabled?");
        return gst_caps_new_empty ();
    }

    caps_string = g_strdup_printf ("video/x-raw, "
                                   "format = (string) { %s }, "
                                   "framerate = (fraction) [1/MAX, MAX], "
                                   "width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]",
                                   supported_formats);
    caps = gst_caps_from_string (caps_string);
    g_free (caps_string);
    g_free (supported_formats);
    GST_DEBUG ("Supported caps: %" GST_PTR_FORMAT, caps);

    ret = gst_video_encoder_proxy_getcaps (encoder, caps, filter);
    gst_caps_unref (caps);

    return ret;
}
コード例 #25
0
ファイル: gstvdpvideobuffer.c プロジェクト: zsx/ossbuild
GstCaps *
gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device)
{
  GstCaps *caps;
  gint i;

  caps = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
    VdpStatus status;
    VdpBool is_supported;
    guint32 max_w, max_h;

    status =
        device->vdp_video_surface_query_capabilities (device->device,
        chroma_types[i], &is_supported, &max_w, &max_h);

    if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
      GST_ERROR_OBJECT (device,
          "Could not get query VDPAU video surface capabilites, "
          "Error returned from vdpau was: %s",
          device->vdp_get_error_string (status));

      goto error;
    }
    if (is_supported) {
      gint j;

      for (j = 0; j < G_N_ELEMENTS (formats); j++) {
        if (formats[j].chroma_type != chroma_types[i])
          continue;

        status =
            device->vdp_video_surface_query_ycbcr_capabilities (device->device,
            formats[j].chroma_type, formats[j].format, &is_supported);
        if (status != VDP_STATUS_OK
            && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
          GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, "
              "Error returned from vdpau was: %s",
              device->vdp_get_error_string (status));

          goto error;
        }

        if (is_supported) {
          GstCaps *format_caps;

          format_caps = gst_caps_new_simple ("video/x-raw-yuv",
              "format", GST_TYPE_FOURCC, formats[j].fourcc,
              "width", GST_TYPE_INT_RANGE, 1, max_w,
              "height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
          gst_caps_append (caps, format_caps);
        }
      }
    }
  }

error:
  return caps;
}
コード例 #26
0
/**
 * gst_encoding_profile_get_input_caps:
 * @profile: a #GstEncodingProfile
 *
 * Computes the full output caps that this @profile will be able to consume.
 *
 * Returns: (transfer full): The full caps the given @profile can consume. Call
 * gst_caps_unref() when you are done with the caps.
 */
GstCaps *
gst_encoding_profile_get_input_caps (GstEncodingProfile * profile)
{
  GstCaps *out, *tmp;
  GList *ltmp;
  GstStructure *st, *outst;
  GQuark out_name;
  guint i, len;
  GstCaps *fcaps;

  g_return_val_if_fail (GST_IS_ENCODING_PROFILE (profile), NULL);

  if (GST_IS_ENCODING_CONTAINER_PROFILE (profile)) {
    GstCaps *res = gst_caps_new_empty ();

    for (ltmp = GST_ENCODING_CONTAINER_PROFILE (profile)->encodingprofiles;
        ltmp; ltmp = ltmp->next) {
      GstEncodingProfile *sprof = (GstEncodingProfile *) ltmp->data;
      res = gst_caps_merge (res, gst_encoding_profile_get_input_caps (sprof));
    }
    return res;
  }

  fcaps = profile->format;

  /* fast-path */
  if ((profile->restriction == NULL) || gst_caps_is_any (profile->restriction))
    return gst_caps_ref (fcaps);

  /* Combine the format with the restriction caps */
  outst = gst_caps_get_structure (fcaps, 0);
  out_name = gst_structure_get_name_id (outst);
  tmp = gst_caps_new_empty ();
  len = gst_caps_get_size (profile->restriction);

  for (i = 0; i < len; i++) {
    st = gst_structure_copy (gst_caps_get_structure (profile->restriction, i));
    st->name = out_name;
    gst_caps_append_structure (tmp, st);
  }

  out = gst_caps_intersect (tmp, fcaps);
  gst_caps_unref (tmp);

  return out;
}
コード例 #27
0
ファイル: gstrealvideodec.c プロジェクト: spunktsch/svtplayer
static GstCaps *
gst_real_video_dec_getcaps (GstPad * pad)
{
  GstRealVideoDec *dec = GST_REAL_VIDEO_DEC (GST_PAD_PARENT (pad));
  GstCaps *res;

  if (dec->checked_modules) {
    GValue versions = { 0 };
    GValue version = { 0 };

    GST_LOG_OBJECT (dec, "constructing caps");
    res = gst_caps_new_empty ();

    g_value_init (&versions, GST_TYPE_LIST);
    g_value_init (&version, G_TYPE_INT);

    if (dec->valid_rv20) {
      g_value_set_int (&version, GST_REAL_VIDEO_DEC_VERSION_2);
      gst_value_list_append_value (&versions, &version);
    }
    if (dec->valid_rv30) {
      g_value_set_int (&version, GST_REAL_VIDEO_DEC_VERSION_3);
      gst_value_list_append_value (&versions, &version);
    }
    if (dec->valid_rv40) {
      g_value_set_int (&version, GST_REAL_VIDEO_DEC_VERSION_4);
      gst_value_list_append_value (&versions, &version);
    }

    if (gst_value_list_get_size (&versions) > 0) {
      res = gst_caps_new_simple ("video/x-pn-realvideo", NULL);
      gst_structure_set_value (gst_caps_get_structure (res, 0),
          "rmversion", &versions);
    } else {
      res = gst_caps_new_empty ();
    }
    g_value_unset (&versions);
    g_value_unset (&version);
  } else {
    GST_LOG_OBJECT (dec, "returning padtemplate caps");
    res = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
  }
  GST_LOG_OBJECT (dec, "returning caps %" GST_PTR_FORMAT, res);

  return res;
}
コード例 #28
0
static void
cheese_camera_device_init (CheeseCameraDevice *device)
{
    CheeseCameraDevicePrivate *priv = cheese_camera_device_get_instance_private (device);

  priv->name = g_strdup (_("Unknown device"));
  priv->caps = gst_caps_new_empty ();
}
コード例 #29
0
static gboolean
gst_vaapidecode_ensure_allowed_sinkpad_caps (GstVaapiDecode * decode)
{
  GstCaps *caps, *allowed_sinkpad_caps;
  GArray *profiles;
  guint i;

  profiles =
      gst_vaapi_display_get_decode_profiles (GST_VAAPI_PLUGIN_BASE_DISPLAY
      (decode));
  if (!profiles)
    goto error_no_profiles;

  allowed_sinkpad_caps = gst_caps_new_empty ();
  if (!allowed_sinkpad_caps)
    goto error_no_memory;

  for (i = 0; i < profiles->len; i++) {
    const GstVaapiProfile profile =
        g_array_index (profiles, GstVaapiProfile, i);
    const gchar *media_type_name;
    const gchar *profile_name;
    GstStructure *structure;

    media_type_name = gst_vaapi_profile_get_media_type_name (profile);
    if (!media_type_name)
      continue;

    caps = gst_caps_from_string (media_type_name);
    if (!caps)
      continue;
    structure = gst_caps_get_structure (caps, 0);

    profile_name = gst_vaapi_profile_get_name (profile);
    if (profile_name)
      gst_structure_set (structure, "profile", G_TYPE_STRING,
          profile_name, NULL);

    allowed_sinkpad_caps = gst_caps_merge (allowed_sinkpad_caps, caps);
  }
  decode->allowed_sinkpad_caps = gst_caps_simplify (allowed_sinkpad_caps);

  g_array_unref (profiles);
  return TRUE;

  /* ERRORS */
error_no_profiles:
  {
    GST_ERROR ("failed to retrieve VA decode profiles");
    return FALSE;
  }
error_no_memory:
  {
    GST_ERROR ("failed to allocate allowed-caps set");
    g_array_unref (profiles);
    return FALSE;
  }
}
コード例 #30
0
static GstCaps *
gst_dshowvideosrc_getcaps_from_enum_mediatypes (GstDshowVideoSrc * src, IPin * pin)
{
    GstCaps *caps = NULL;
    IEnumMediaTypes *enum_mediatypes = NULL;
    HRESULT hres = S_OK;
    GstCapturePinMediaType *pin_mediatype = NULL;

    hres = pin->EnumMediaTypes (&enum_mediatypes);
    if (FAILED (hres)) {
        GST_ERROR ("Failed to retrieve IEnumMediaTypes (error=0x%x)", hres);
        return NULL;
    }

    caps = gst_caps_new_empty ();

    while ((pin_mediatype = gst_dshow_new_pin_mediatype_from_enum_mediatypes (pin, enum_mediatypes)) != NULL) {

        GstCaps *mediacaps = NULL;
        GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);
        GstVideoInfo info;

        gst_video_info_init(&info);
        gst_video_info_set_format(&info, video_format, pin_mediatype->defaultWidth, pin_mediatype->defaultHeight);
        info.fps_n = pin_mediatype->defaultFPS;
        info.fps_d = 1;
        info.par_n = 1;
        info.par_d = 1;

        if(video_format == GST_VIDEO_FORMAT_ENCODED) {
            if(gst_dshow_check_mediatype(pin_mediatype->mediatype, MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
                mediacaps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, info.width,
                                                "height", G_TYPE_INT, info.height,
                                                "framerate", GST_TYPE_FRACTION, info.fps_n, info.fps_d);
            }
        }
        else if (video_format != GST_VIDEO_FORMAT_UNKNOWN)
            mediacaps = gst_video_info_to_caps (&info);

        if (mediacaps) {
            src->pins_mediatypes =
                g_list_append (src->pins_mediatypes, pin_mediatype);
            gst_caps_append (caps, mediacaps);
        } else {
            /* failed to convert dshow caps */
            gst_dshow_free_pin_mediatype (pin_mediatype);
        }
    }

    enum_mediatypes->Release ();

    if (caps && gst_caps_is_empty (caps)) {
        gst_caps_unref (caps);
        caps = NULL;
    }

    return caps;
}