Exemplo n.º 1
0
/* copies the given caps */
static GstCaps *
gst_csp_caps_remove_format_info (GstCaps * caps)
{
  GstStructure *yuvst, *rgbst, *grayst;

  /* We know there's only one structure since we're given simple caps */
  caps = gst_caps_copy (caps);

  yuvst = gst_caps_get_structure (caps, 0);

  gst_structure_set_name (yuvst, "video/x-raw-yuv");
  gst_structure_remove_fields (yuvst, "format", "endianness", "depth",
      "bpp", "red_mask", "green_mask", "blue_mask", "alpha_mask",
      "palette_data", "color-matrix", NULL);

  rgbst = gst_structure_copy (yuvst);
  gst_structure_set_name (rgbst, "video/x-raw-rgb");
  gst_structure_remove_fields (rgbst, "color-matrix", "chroma-site", NULL);

  grayst = gst_structure_copy (rgbst);
  gst_structure_set_name (grayst, "video/x-raw-gray");

  gst_caps_append_structure (caps, rgbst);
  gst_caps_append_structure (caps, grayst);

  return caps;
}
Exemplo n.º 2
0
static GstCaps *
generate_sink_template(void)
{
	GstCaps *caps;
	GstStructure *struc;

	caps = gst_caps_new_empty();

	struc = gst_structure_new("video/x-h263",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-h264",
			"alignment", G_TYPE_STRING, "au",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/mpeg",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-divx",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-xvid",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-3ivx",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-wmv",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-vp8",
			NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-theora",
			NULL);

	gst_caps_append_structure(caps, struc);

	return caps;
}
Exemplo n.º 3
0
static GstCaps *
gst_vorbis_enc_generate_sink_caps (void)
{
  GstCaps *caps = gst_caps_new_empty ();
  int i, c;

  gst_caps_append_structure (caps, gst_structure_new ("audio/x-raw-float",
          "rate", GST_TYPE_INT_RANGE, 1, 200000,
          "channels", G_TYPE_INT, 1,
          "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32,
          NULL));

  gst_caps_append_structure (caps, gst_structure_new ("audio/x-raw-float",
          "rate", GST_TYPE_INT_RANGE, 1, 200000,
          "channels", G_TYPE_INT, 2,
          "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32,
          NULL));

  for (i = 3; i <= 8; i++) {
    GValue chanpos = { 0 };
    GValue pos = { 0 };
    GstStructure *structure;

    g_value_init (&chanpos, GST_TYPE_ARRAY);
    g_value_init (&pos, GST_TYPE_AUDIO_CHANNEL_POSITION);

    for (c = 0; c < i; c++) {
      g_value_set_enum (&pos, gst_vorbis_channel_positions[i - 1][c]);
      gst_value_array_append_value (&chanpos, &pos);
    }
    g_value_unset (&pos);

    structure = gst_structure_new ("audio/x-raw-float",
        "rate", GST_TYPE_INT_RANGE, 1, 200000,
        "channels", G_TYPE_INT, i,
        "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
    gst_structure_set_value (structure, "channel-positions", &chanpos);
    g_value_unset (&chanpos);

    gst_caps_append_structure (caps, structure);
  }

  gst_caps_append_structure (caps, gst_structure_new ("audio/x-raw-float",
          "rate", GST_TYPE_INT_RANGE, 1, 200000,
          "channels", GST_TYPE_INT_RANGE, 9, 256,
          "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32,
          NULL));

  return caps;
}
Exemplo n.º 4
0
static void
gst_egl_sink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  GstPadTemplate *sink_template = NULL;
  gint i;
  guint32 formats[][2] = {
    {GST_MAKE_FOURCC ('N', 'V', '1', '2'), GST_VIDEO_FORMAT_NV12},
    {GST_MAKE_FOURCC ('Y', 'V', '1', '2'), GST_VIDEO_FORMAT_YV12},
    {GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'), GST_VIDEO_FORMAT_UYVY},
    {GST_MAKE_FOURCC ('I', '4', '2', '0'), GST_VIDEO_FORMAT_I420}
  };

  gst_element_class_set_details_simple (element_class, "OpenGL video sink",
      "Sink/Video", "A videosink based on OpenGL",
      "Julien Isorce <*****@*****.**>");
  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
			gst_structure_new ("video/x-raw-yuv",
				"format",
				GST_TYPE_FOURCC,
				formats[i][0], "width",
				GST_TYPE_INT_RANGE, 1,
				G_MAXINT, "height",
				GST_TYPE_INT_RANGE, 1,
				G_MAXINT, "framerate",
				GST_TYPE_FRACTION_RANGE,
				0, 1, G_MAXINT, 1,
				"width_align", G_TYPE_INT, gst_egl_platform_get_alignment_h(formats[i][1]),
				"height_align", G_TYPE_INT, gst_egl_platform_get_alignment_v(formats[i][1]),
				NULL));
  }
  gst_caps_append_structure (capslist,
		  gst_structure_new ("video/x-raw-rgb",
			  "bpp", G_TYPE_INT, 32,
			  "depth", GST_TYPE_INT_RANGE, 24, 32,
			  "width_align", G_TYPE_INT, gst_egl_platform_get_alignment_h(GST_VIDEO_FORMAT_RGBA),
			  "height_align", G_TYPE_INT, gst_egl_platform_get_alignment_v(GST_VIDEO_FORMAT_RGBA),
			  NULL));

  sink_template = gst_pad_template_new ("sink",
		  GST_PAD_SINK, GST_PAD_ALWAYS,
		  capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
}
Exemplo n.º 5
0
void test_double_append()
{
  GstStructure *s1;
  GstCaps *c1;
  //xmlfile = "test_double_append";
      std_log(LOG_FILENAME_LINE, "Test Started test_double_append");
  c1 = gst_caps_new_any ();
  s1 = gst_structure_from_string ("audio/x-raw-int,rate=44100", NULL);
  gst_caps_append_structure (c1, s1);
  ASSERT_CRITICAL (gst_caps_append_structure (c1, s1));

  gst_caps_unref (c1);
  std_log(LOG_FILENAME_LINE, "Test Successful");
      create_xml(0);
}
Exemplo n.º 6
0
void test_mutability()
{
  GstStructure *s1;
  GstCaps *c1;
  gint ret;
  //xmlfile = "test_mutability";
      std_log(LOG_FILENAME_LINE, "Test Started test_mutability");
  c1 = gst_caps_new_any ();
  s1 = gst_structure_from_string ("audio/x-raw-int,rate=44100", NULL);
  gst_structure_set (s1, "rate", G_TYPE_INT, 48000, NULL);
  gst_caps_append_structure (c1, s1);
  gst_structure_set (s1, "rate", G_TYPE_INT, 22500, NULL);
  gst_caps_ref (c1);
  ASSERT_CRITICAL (gst_structure_set (s1, "rate", G_TYPE_INT, 11250, NULL));
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 22500);
  ASSERT_CRITICAL (gst_caps_set_simple (c1, "rate", G_TYPE_INT, 11250, NULL));
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 22500);
  gst_caps_unref (c1);
  gst_structure_set (s1, "rate", G_TYPE_INT, 11250, NULL);
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 11250);
  gst_caps_set_simple (c1, "rate", G_TYPE_INT, 1, NULL);
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 1);
  gst_caps_unref (c1);
  std_log(LOG_FILENAME_LINE, "Test Successful");
      create_xml(0);
}
Exemplo n.º 7
0
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  gint i;
  guint32 formats[] = {
    GST_MAKE_FOURCC ('I', '4', '2', '0'),
    GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
    GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
    GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
    GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
  };

  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
        gst_structure_new ("video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, formats[i],
            "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
  }

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_set_metadata (element_class, "SDL video sink",
      "Sink/Video", "An SDL-based videosink",
      "Ronald Bultje <*****@*****.**>, "
      "Edgard Lima <*****@*****.**>, "
      "Jan Schmidt <*****@*****.**>");
}
Exemplo n.º 8
0
static GstCaps *
legacyresample_transform_caps (GstBaseTransform * base,
    GstPadDirection direction, GstCaps * caps)
{
  const GValue *val;
  GstStructure *s;
  GstCaps *res;

  /* transform single caps into input_caps + input_caps with the rate
   * field set to our supported range. This ensures that upstream knows
   * about downstream's prefered rate(s) and can negotiate accordingly. */
  res = gst_caps_copy (caps);

  /* first, however, check if the caps contain a range for the rate field, in
   * which case that side isn't going to care much about the exact sample rate
   * chosen and we should just assume things will get fixated to something sane
   * and we may just as well offer our full range instead of the range in the
   * caps. If the rate is not an int range value, it's likely to express a
   * real preference or limitation and we should maintain that structure as
   * preference by putting it first into the transformed caps, and only add
   * our full rate range as second option  */
  s = gst_caps_get_structure (res, 0);
  val = gst_structure_get_value (s, "rate");
  if (val == NULL || GST_VALUE_HOLDS_INT_RANGE (val)) {
    /* overwrite existing range, or add field if it doesn't exist yet */
    gst_structure_set (s, "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
  } else {
    /* append caps with full range to existing caps with non-range rate field */
    s = gst_structure_copy (s);
    gst_structure_set (s, "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
    gst_caps_append_structure (res, s);
  }

  return res;
}
Exemplo n.º 9
0
static GstCaps *
gst_v4l2src_get_all_caps (void)
{
  static GstCaps *caps = NULL;

  if (caps == NULL) {
    GstStructure *structure;

    guint i;

    caps = gst_caps_new_empty ();
    for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
      structure = gst_v4l2src_v4l2fourcc_to_structure (gst_v4l2_formats[i]);
      if (structure) {
        gst_structure_set (structure,
            "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
            "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
        gst_caps_append_structure (caps, structure);
      }
    }
  }

  return caps;
}
static GstCaps *
gst_gdk_pixbuf_get_capslist (void)
{
    GSList *slist;
    GSList *slist0;
    GstCaps *capslist = NULL;
    GstCaps *return_caps = NULL;
    GstCaps *tmpl_caps;

    capslist = gst_caps_new_empty ();
    slist0 = gdk_pixbuf_get_formats ();

    for (slist = slist0; slist; slist = g_slist_next (slist)) {
        GdkPixbufFormat *pixbuf_format;
        char **mimetypes;
        char **mimetype;

        pixbuf_format = slist->data;
        mimetypes = gdk_pixbuf_format_get_mime_types (pixbuf_format);

        for (mimetype = mimetypes; *mimetype; mimetype++) {
            gst_caps_append_structure (capslist, gst_structure_new (*mimetype, NULL));
        }
        g_strfreev (mimetypes);
    }
    g_slist_free (slist0);

    tmpl_caps = gst_static_caps_get (&gst_gdk_pixbuf_sink_template.static_caps);
    return_caps = gst_caps_intersect (capslist, tmpl_caps);

    gst_caps_unref (tmpl_caps);
    gst_caps_unref (capslist);
    return return_caps;
}
Exemplo n.º 11
0
/*
 * \brief Build the sink of SU's pipeline, if this is a reidrection, modify the caps to pass it to the SP pipeline
 * \param pipeline the pipepline of the stream
 * \param input the las element of the pipeline, (avenc_mp4 or capsfilter) as we built our own source
 * \param caps the caps built from the SDP to replace if you have MPEG-4 or J2K video
 * \return GstElement* the last element added to the pipeline (RAW: return input, MPEG: mpeg4videoparse, J2k:capsfilter)
 */
static GstElement *handle_redirection_SU_pipeline ( GstElement *pipeline, GstCaps *caps, GstElement *input){

	GstStructure *video_caps = gst_caps_get_structure( caps , 0 );

	g_debug("handle_redirection_SU_pipeline: checks and handles Service User's pipeline for redirection");

	/* in case MPEG4 video type has been detected */
	if  (gst_structure_has_name( video_caps, "video/mpeg")){

		/* Add the MPEG-4 parser in SU pipeline */
		GstElement *parser 	= gst_element_factory_make_log ("mpeg4videoparse", MPEG4PARSER_NAME);
		if ( !parser )
			return NULL;

		g_debug("add %s to pipeline", MPEG4PARSER_NAME);

		gst_bin_add(GST_BIN(pipeline),parser);

		if ( !gst_element_link_log(input, parser))
			return NULL;

		input = parser;

	}
	/* in case J2K video type has been detected */
	else if  ( g_strv_contains ( J2K_STR_NAMES, gst_structure_get_name(video_caps))){

		GstElement *capsfilter = gst_element_factory_make_log("capsfilter", CAPSFITER_J2K_NAME );

		GstCaps *caps_jpeg2000 = get_rtpj2kpay_allowed_caps();

		/* Put the source in the pipeline */
		g_object_set (capsfilter, "caps", caps_jpeg2000 , NULL);

		g_debug("add %s to pipeline", CAPSFITER_J2K_NAME );

		gst_bin_add(GST_BIN(pipeline),capsfilter);

		if ( !gst_element_link_log(input,capsfilter )){
			g_critical("JPEG2000 format can only be %s", gst_caps_to_string( caps_jpeg2000 ) );
			return NULL;
		}

		input = capsfilter;
	}

	/*
	 * Run a new typefind to update caps in order to succeed to run a last typefind in the pipeline of the SP of the
	 * redirection. Update caps in consequence
	 */
	video_caps = type_detection(GST_BIN(pipeline), input, NULL);
	if( !video_caps )
		return NULL;

	gst_caps_append_structure( caps , gst_structure_copy ( video_caps ) );
	gst_caps_remove_structure ( caps, 0 ) ;

	return input;

}
Exemplo n.º 12
0
static GstCaps *
gst_video_scale_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps)
{
  GstCaps *ret;
  GstStructure *structure;

  /* this function is always called with a simple caps */
  g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);

  GST_DEBUG_OBJECT (trans,
      "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
      (direction == GST_PAD_SINK) ? "sink" : "src");

  ret = gst_caps_copy (caps);
  structure = gst_structure_copy (gst_caps_get_structure (ret, 0));

  gst_structure_set (structure,
      "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
      "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

  /* if pixel aspect ratio, make a range of it */
  if (gst_structure_has_field (structure, "pixel-aspect-ratio")) {
    gst_structure_set (structure, "pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE,
        1, G_MAXINT, G_MAXINT, 1, NULL);
  }
  gst_caps_append_structure (ret, structure);

  GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
Exemplo n.º 13
0
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  gint i;
  guint32 formats[] = {
    GST_MAKE_FOURCC ('I', '4', '2', '0'),
    GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
    GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
        /*
           GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
           GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
         */
  };

  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
        gst_structure_new ("video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, formats[i],
            "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
  }

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_set_details (element_class, &gst_sdlvideosink_details);

}
Exemplo n.º 14
0
static GstCaps *
generate_sink_template (void)
{
  GstCaps *caps;
  GstStructure *struc;

  caps = gst_caps_new_empty ();

  struc = gst_structure_new ("audio/x-iLBC", NULL);

  {
    GValue list;
    GValue val;

    list.g_type = val.g_type = 0;

    g_value_init (&list, GST_TYPE_LIST);
    g_value_init (&val, G_TYPE_INT);

    g_value_set_int (&val, 20);
    gst_value_list_append_value (&list, &val);

    g_value_set_int (&val, 30);
    gst_value_list_append_value (&list, &val);

    gst_structure_set_value (struc, "mode", &list);

    g_value_unset (&val);
    g_value_unset (&list);
  }

  gst_caps_append_structure (caps, struc);

  return caps;
}
Exemplo n.º 15
0
/* threadsafe because this gets called as the plugin is loaded */
static GstCaps *
gst_video_test_src_getcaps (GstBaseSrc * unused)
{
  static GstCaps *capslist = NULL;

  if (!capslist) {
    GstCaps *caps;
    GstStructure *structure;
    int i;

    caps = gst_caps_new_empty ();
    for (i = 0; i < n_fourccs; i++) {
      structure = paint_get_structure (fourcc_list + i);
      gst_structure_set (structure,
          "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
          "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
      gst_caps_append_structure (caps, structure);
    }

    capslist = caps;
  }

  return gst_caps_copy (capslist);
}
Exemplo n.º 16
0
static gboolean gst_avdtp_sink_update_caps(GstAvdtpSink *self)
{
	sbc_capabilities_t *sbc;
	mpeg_capabilities_t *mpeg;
	GstStructure *sbc_structure;
	GstStructure *mpeg_structure;
	gchar *tmp;

	GST_LOG_OBJECT(self, "updating device caps");

	sbc = (void *) gst_avdtp_find_caps(self, BT_A2DP_SBC_SINK);
	mpeg = (void *) gst_avdtp_find_caps(self, BT_A2DP_MPEG12_SINK);

	sbc_structure = gst_avdtp_sink_parse_sbc_caps(self, sbc);
	mpeg_structure = gst_avdtp_sink_parse_mpeg_caps(self, mpeg);

	if (self->dev_caps != NULL)
		gst_caps_unref(self->dev_caps);
	self->dev_caps = gst_caps_new_full(sbc_structure, NULL);
	if (mpeg_structure != NULL)
		gst_caps_append_structure(self->dev_caps, mpeg_structure);

	tmp = gst_caps_to_string(self->dev_caps);
	GST_DEBUG_OBJECT(self, "Device capabilities: %s", tmp);
	g_free(tmp);

	return TRUE;
}
Exemplo n.º 17
0
static GstCaps *
gst_alsa_detect_formats (GstObject * obj, snd_pcm_hw_params_t * hw_params,
    GstCaps * in_caps)
{
  snd_pcm_format_mask_t *mask;
  GstStructure *s;
  GstCaps *caps;
  gint i;

  snd_pcm_format_mask_malloc (&mask);
  snd_pcm_hw_params_get_format_mask (hw_params, mask);

  caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (in_caps); ++i) {
    GstStructure *scopy;
    gint w, width = 0, depth = 0;

    s = gst_caps_get_structure (in_caps, i);
    if (!gst_structure_has_name (s, "audio/x-raw-int")) {
      GST_DEBUG_OBJECT (obj, "skipping non-int format");
      continue;
    }
    if (!gst_structure_get_int (s, "width", &width) ||
        !gst_structure_get_int (s, "depth", &depth))
      continue;
    if (width == 0 || (width % 8) != 0)
      continue;                 /* Only full byte widths are valid */
    for (w = 0; w < G_N_ELEMENTS (pcmformats); w++)
      if (pcmformats[w].width == width && pcmformats[w].depth == depth)
        break;
    if (w == G_N_ELEMENTS (pcmformats))
      continue;                 /* Unknown format */

    if (snd_pcm_format_mask_test (mask, pcmformats[w].sformat) &&
        snd_pcm_format_mask_test (mask, pcmformats[w].uformat)) {
      /* template contains { true, false } or just one, leave it as it is */
      scopy = gst_structure_copy (s);
    } else if (snd_pcm_format_mask_test (mask, pcmformats[w].sformat)) {
      scopy = gst_structure_copy (s);
      gst_structure_set (scopy, "signed", G_TYPE_BOOLEAN, TRUE, NULL);
    } else if (snd_pcm_format_mask_test (mask, pcmformats[w].uformat)) {
      scopy = gst_structure_copy (s);
      gst_structure_set (scopy, "signed", G_TYPE_BOOLEAN, FALSE, NULL);
    } else {
      scopy = NULL;
    }
    if (scopy) {
      if (width > 8) {
        /* TODO: proper endianness detection, for now it's CPU endianness only */
        gst_structure_set (scopy, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
      }
      gst_caps_append_structure (caps, scopy);
    }
  }

  snd_pcm_format_mask_free (mask);
  gst_caps_unref (in_caps);
  return caps;
}
Exemplo n.º 18
0
/* copies the given caps */
static GstCaps *
gst_yuv_to_rgb_caps_remove_format_info (GstCaps * caps)
{
  GstStructure *st;
  gint i, n;
  GstCaps *res;

  res = gst_caps_new_empty ();

  n = gst_caps_get_size (caps);
  for (i = 0; i < n; i++) {
    st = gst_caps_get_structure (caps, i);

    /* If this is already expressed by the existing caps
     * skip this structure */
    if (i > 0 && gst_caps_is_subset_structure (res, st))
      continue;

    st = gst_structure_copy (st);
    gst_structure_remove_fields (st, "format",
        "colorimetry", "chroma-site", NULL);

    gst_caps_append_structure (res, st);
  }

  return res;
}
Exemplo n.º 19
0
static GstCaps *
gst_dc1394_get_all_dc1394_caps (void)
{
    /*
       generate all possible caps

     */

    GstCaps *gcaps;
    gint i = 0;

    gcaps = gst_caps_new_empty ();
    // first, the fixed mode caps
    for (i = DC1394_VIDEO_MODE_MIN; i < DC1394_VIDEO_MODE_EXIF; i++) {
        GstStructure *gs = gst_structure_empty_new ("video");
        gint ret = gst_dc1394_caps_set_format_vmode_caps (gs, i);

        gst_structure_set (gs,
                           "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
        gst_structure_set (gs, "vmode", G_TYPE_INT, i, NULL);
        if (ret >= 0) {
            gst_caps_append_structure (gcaps, gs);
        }
    }

    // then Format 7 options

    for (i = DC1394_COLOR_CODING_MIN; i <= DC1394_COLOR_CODING_MAX; i++) {
        GstStructure *gs = gst_structure_empty_new ("video");

        //int ret =  gst_dc1394_caps_set_format_vmode_caps(gs, i);

        gst_structure_set (gs, "vmode", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

        gst_structure_set (gs,
                           "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
        gst_structure_set (gs,
                           "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

        if (gst_dc1394_set_caps_color (gs, i)) {
            gst_caps_append_structure (gcaps, gs);
        }
    }
    return gcaps;

}
Exemplo n.º 20
0
GstCaps *
gst_decklink_mode_get_template_caps (void)
{
  int i;
  GstCaps *caps;
  GstStructure *s;

  caps = gst_caps_new_empty ();
  for (i = 1; i < (int) G_N_ELEMENTS (modes); i++) {
    s = gst_decklink_mode_get_structure ((GstDecklinkModeEnum) i, bmdFormat8BitYUV);
    gst_caps_append_structure (caps, s);
    s = gst_decklink_mode_get_structure ((GstDecklinkModeEnum) i, bmdFormat8BitARGB);
    gst_caps_append_structure (caps, s);
  }

  return caps;
}
Exemplo n.º 21
0
GstCaps *
ks_video_get_all_caps (void)
{
  static GstCaps *caps = NULL;

  if (caps == NULL) {
    GstStructure *structure;
    caps = gst_caps_new_empty ();

    /* from Windows SDK 6.0 uuids.h */
    /* RGB formats */
    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_RGB555, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_RGB565, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_RGB24, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_RGB32, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    /* YUV formats */
    structure =
        ks_video_append_var_video_fields (gst_structure_new ("video/x-raw-yuv",
            NULL));
    gst_caps_append_structure (caps, structure);

    /* Other formats */
    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_MJPG, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    structure =
        ks_video_append_var_video_fields (ks_video_format_to_structure
        (MEDIASUBTYPE_dvsd, FORMAT_VideoInfo));
    gst_caps_append_structure (caps, structure);

    structure =                 /* no variable video fields (width, height, framerate) */
        ks_video_format_to_structure (MEDIASUBTYPE_dvsd, FORMAT_DvInfo);
    gst_caps_append_structure (caps, structure);
  }

  return caps;
}
Exemplo n.º 22
0
static GstCaps *
generate_sink_template (void)
{
    GstCaps *caps;
    GstStructure *struc;

    caps = gst_caps_new_empty ();

    struc = gst_structure_new ("video/mpeg",
                               "mpegversion", G_TYPE_INT, 4,
                               "systemstream", G_TYPE_BOOLEAN, FALSE,
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
                               NULL);

    gst_caps_append_structure (caps, struc);

    struc = gst_structure_new ("video/x-divx",
                               "divxversion", GST_TYPE_INT_RANGE, 4, 5,
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
                               NULL);

    gst_caps_append_structure (caps, struc);

    struc = gst_structure_new ("video/x-xvid",
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
                               NULL);

    gst_caps_append_structure (caps, struc);

    struc = gst_structure_new ("video/x-3ivx",
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
                               NULL);

    gst_caps_append_structure (caps, struc);

    return caps;
}
Exemplo n.º 23
0
GstCaps *
gst_decklink_mode_get_caps (GstDecklinkModeEnum e)
{
  GstCaps *caps;

  caps = gst_caps_new_empty ();
  gst_caps_append_structure (caps, gst_decklink_mode_get_structure (e));

  return caps;
}
Exemplo n.º 24
0
GstCaps *
gst_decklink_mode_get_caps (GstDecklinkModeEnum e, BMDPixelFormat f)
{
  GstCaps *caps;

  caps = gst_caps_new_empty ();
  gst_caps_append_structure (caps, gst_decklink_mode_get_structure (e, f));

  return caps;
}
Exemplo n.º 25
0
static GstCaps* gst_imx_blitter_video_transform_transform_caps(GstBaseTransform *transform, G_GNUC_UNUSED GstPadDirection direction, GstCaps *caps, GstCaps *filter)
{
	GstCaps *tmpcaps1, *tmpcaps2, *result;
	GstStructure *structure;
	gint i, n;

	tmpcaps1 = gst_caps_new_empty();
	n = gst_caps_get_size(caps);
	for (i = 0; i < n; i++)
	{
		structure = gst_caps_get_structure(caps, i);

		/* If this is already expressed by the existing caps
		 * skip this structure */
		if ((i > 0) && gst_caps_is_subset_structure(tmpcaps1, structure))
			continue;

		/* make copy */
		structure = gst_structure_copy(structure);
		gst_structure_set(
			structure,
			"width", GST_TYPE_INT_RANGE, 64, G_MAXINT,
			"height", GST_TYPE_INT_RANGE, 64, G_MAXINT,
			NULL
		);

		/* colorimetry is not supported by the videotransform element */
		gst_structure_remove_fields(structure, "format", "colorimetry", "chroma-site", NULL);

		/* if pixel aspect ratio, make a range of it */
		if (gst_structure_has_field(structure, "pixel-aspect-ratio"))
		{
			gst_structure_set(
				structure,
				"pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1,
				NULL
			);
		}
		gst_caps_append_structure(tmpcaps1, structure);
	}

	/* filter the resulting caps if necessary */
	if (filter != NULL)
	{
		tmpcaps2 = gst_caps_intersect_full(filter, tmpcaps1, GST_CAPS_INTERSECT_FIRST);
		gst_caps_unref(tmpcaps1);
		tmpcaps1 = tmpcaps2;
	}

	result = tmpcaps1;

	GST_DEBUG_OBJECT(transform, "transformed %" GST_PTR_FORMAT " into %" GST_PTR_FORMAT, (gpointer)caps, (gpointer)result);

	return result;
}
Exemplo n.º 26
0
static GstCaps *
gst_aravis_get_all_camera_caps (GstAravis *gst_aravis)
{
	GstCaps *caps;
	gint64 *pixel_formats;
	double min_frame_rate, max_frame_rate;
	int min_height, min_width;
	int max_height, max_width;
	unsigned int n_pixel_formats, i;

	g_return_val_if_fail (GST_IS_ARAVIS (gst_aravis), NULL);

	if (!ARV_IS_CAMERA (gst_aravis->camera))
		return NULL;

	GST_LOG_OBJECT (gst_aravis, "Get all camera caps");

	arv_camera_get_width_bounds (gst_aravis->camera, &min_width, &max_width);
	arv_camera_get_height_bounds (gst_aravis->camera, &min_height, &max_height);
	pixel_formats = arv_camera_get_available_pixel_formats (gst_aravis->camera, &n_pixel_formats);
	arv_camera_get_frame_rate_bounds (gst_aravis->camera, &min_frame_rate, &max_frame_rate);

	int min_frame_rate_numerator;
	int min_frame_rate_denominator;
	gst_util_double_to_fraction (min_frame_rate, &min_frame_rate_numerator, &min_frame_rate_denominator);

	int max_frame_rate_numerator;
	int max_frame_rate_denominator;
	gst_util_double_to_fraction (max_frame_rate, &max_frame_rate_numerator, &max_frame_rate_denominator);

	caps = gst_caps_new_empty ();
	for (i = 0; i < n_pixel_formats; i++) {
		const char *caps_string;

		caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_formats[i]);

		if (caps_string != NULL) {
			GstStructure *structure;

			structure = gst_structure_from_string (caps_string, NULL);
			gst_structure_set (structure,
					   "width", GST_TYPE_INT_RANGE, min_width, max_width,
					   "height", GST_TYPE_INT_RANGE, min_height, max_height,
					   "framerate", GST_TYPE_FRACTION_RANGE,
							   min_frame_rate_numerator, min_frame_rate_denominator,
							   max_frame_rate_numerator, max_frame_rate_denominator,
					   NULL);
			gst_caps_append_structure (caps, structure);
		}
	}

	g_free (pixel_formats);

	return caps;
}
Exemplo n.º 27
0
static GstCaps *
generate_sink_template (void)
{
  GstCaps *caps;
  GstStructure *struc;

  caps = gst_caps_new_empty ();

  struc = gst_structure_new ("audio/x-alaw",
      "rate", G_TYPE_INT, 8000, "channels", G_TYPE_INT, 1, NULL);

  gst_caps_append_structure (caps, struc);

  struc = gst_structure_new ("audio/x-mulaw",
      "rate", G_TYPE_INT, 8000, "channels", G_TYPE_INT, 1, NULL);

  gst_caps_append_structure (caps, struc);

  return caps;
}
static GstCaps *
gst_alpha_color_transform_caps (GstBaseTransform * btrans,
    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
  GstCaps *tmpl_caps = NULL;
  GstCaps *result = NULL, *local_caps = NULL;
  guint i;

  local_caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (caps); i++) {
    GstStructure *structure =
        gst_structure_copy (gst_caps_get_structure (caps, i));

    /* Remove any specific parameter from the structure */
    gst_structure_remove_field (structure, "format");
    gst_structure_remove_field (structure, "colorimetry");
    gst_structure_remove_field (structure, "chroma-site");

    gst_structure_set_name (structure, "video/x-raw");
    gst_caps_append_structure (local_caps, structure);
  }

  /* Get the appropriate template */
  if (direction == GST_PAD_SINK) {
    tmpl_caps = gst_static_pad_template_get_caps (&src_template);
  } else if (direction == GST_PAD_SRC) {
    tmpl_caps = gst_static_pad_template_get_caps (&sink_template);
  }

  /* Intersect with our template caps */
  result = gst_caps_intersect (local_caps, tmpl_caps);
  gst_caps_unref (tmpl_caps);
  gst_caps_unref (local_caps);

  result = gst_caps_simplify (result);

  GST_LOG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
      caps, result);

  if (filter) {
    GstCaps *intersection;

    GST_DEBUG_OBJECT (btrans, "Using filter caps %" GST_PTR_FORMAT, filter);
    intersection =
        gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (result);
    result = intersection;
    GST_DEBUG_OBJECT (btrans, "Intersection %" GST_PTR_FORMAT, result);
  }


  return result;
}
static void
check_filter_varargs (const gchar * name, GstEvent * event, gint num_buffers,
    const gchar * prop, va_list varargs)
{
  static const struct
  {
    const int width, height;
  } resolutions[] = { {
  384, 288}, {
  385, 289}, {
  385, 385}};
  gint i, n, r;
  gint size;
  GstCaps *allcaps, *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING);

  allcaps = gst_caps_normalize (templ);

  n = gst_caps_get_size (allcaps);

  for (i = 0; i < n; i++) {
    GstStructure *s = gst_caps_get_structure (allcaps, i);
    GstCaps *caps = gst_caps_new_empty ();

    gst_caps_append_structure (caps, gst_structure_copy (s));

    /* try various resolutions */
    for (r = 0; r < G_N_ELEMENTS (resolutions); ++r) {
      GstVideoInfo info;
      va_list args_cp;

      caps = gst_caps_make_writable (caps);
      gst_caps_set_simple (caps, "width", G_TYPE_INT, resolutions[r].width,
          "height", G_TYPE_INT, resolutions[r].height,
          "framerate", GST_TYPE_FRACTION, 25, 1, NULL);

      GST_DEBUG ("Testing with caps: %" GST_PTR_FORMAT, caps);
      gst_video_info_from_caps (&info, caps);
      size = GST_VIDEO_INFO_SIZE (&info);

      if (event)
        gst_event_ref (event);

      va_copy (args_cp, varargs);
      check_filter_caps (name, event, caps, size, num_buffers, prop, args_cp);
      va_end (args_cp);
    }

    gst_caps_unref (caps);
  }

  gst_caps_unref (allcaps);
  if (event)
    gst_event_unref (event);
}
GstElement *QGstreamerCaptureSession::buildVideoPreview()
{
    GstElement *previewElement = 0;

    if (m_viewfinderInterface) {
        GstElement *bin = gst_bin_new("video-preview-bin");
        GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview");
        GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview");
        GstElement *preview = m_viewfinderInterface->videoSink();

        gst_bin_add_many(GST_BIN(bin), colorspace, capsFilter, preview,  NULL);
        gst_element_link(colorspace,capsFilter);
        gst_element_link(capsFilter,preview);

        QSize resolution;
        qreal frameRate = 0;

        if (m_captureMode & Video) {
            QVideoEncoderSettings videoSettings = m_videoEncodeControl->videoSettings();
            resolution = videoSettings.resolution();
            frameRate = videoSettings.frameRate();
        } else if (m_captureMode & Image) {
            resolution = m_imageEncodeControl->imageSettings().resolution();
        }

        if (!resolution.isEmpty() || frameRate > 0.001) {
            GstCaps *caps = gst_caps_new_empty();
            QStringList structureTypes;
            structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";

            foreach(const QString &structureType, structureTypes) {
                GstStructure *structure = gst_structure_new(structureType.toAscii().constData(), NULL);

                if (!resolution.isEmpty()) {
                    gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL);
                    gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL);
                }

                if (frameRate > 0.001) {
                    QPair<int,int> rate = m_videoEncodeControl->rateAsRational();

                    //qDebug() << "frame rate:" << num << denum;

                    gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
                }

                gst_caps_append_structure(caps,structure);
            }

            //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);

            g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);

        }