// TODO: gets formats for cameras, when a format returns a range it gets // in steps /2 and *2 from min to max and max to min, for format7 it should be free to get any size static void get_supported_video_formats (ofGstDevice &webcam_device, GstCaps &caps, int desired_framerate) { int num_structures; num_structures = gst_caps_get_size (&caps); for (int i = 0; i < num_structures; i++){ GstStructure *structure; const GValue *width, *height; structure = gst_caps_get_structure (&caps, i); width = gst_structure_get_value (structure, "width"); height = gst_structure_get_value (structure, "height"); if (G_VALUE_HOLDS_INT (width)){ ofGstVideoFormat video_format; video_format.mimetype = gst_structure_get_name (structure); gst_structure_get_int (structure, "width", &(video_format.width)); gst_structure_get_int (structure, "height", &(video_format.height)); add_video_format(webcam_device, video_format, *structure, desired_framerate); }else if (GST_VALUE_HOLDS_INT_RANGE (width)){ int min_width, max_width, min_height, max_height; int cur_width, cur_height; min_width = gst_value_get_int_range_min (width); max_width = gst_value_get_int_range_max (width); min_height = gst_value_get_int_range_min (height); max_height = gst_value_get_int_range_max (height); cur_width = min_width; cur_height = min_height; while (cur_width <= max_width && cur_height <= max_height){ ofGstVideoFormat video_format; video_format.mimetype = gst_structure_get_name (structure); video_format.width = cur_width; video_format.height = cur_height; add_video_format(webcam_device, video_format, *structure, desired_framerate); cur_width *= 2; cur_height *= 2; } cur_width = max_width; cur_height = max_height; while (cur_width > min_width && cur_height > min_height){ ofGstVideoFormat video_format; video_format.mimetype = gst_structure_get_name (structure); video_format.width = cur_width; video_format.height = cur_height; add_video_format(webcam_device, video_format, *structure, desired_framerate); cur_width /= 2; cur_height /= 2; } }else{ ofLog(OF_LOG_ERROR, "unknown GValue type %s, for resolution width", G_VALUE_TYPE_NAME (width)); } } }
static VALUE int_range_to_a(VALUE self) { GValue *value; value = RVAL2GOBJ(self); return rb_ary_new3(2, INT2NUM(gst_value_get_int_range_min(value)), INT2NUM(gst_value_get_int_range_max(value))); }
static void int_range_rvalue2gvalue(VALUE value, GValue *result) { GValue *val; val = RVAL2GOBJ(value); gst_value_set_int_range(result, gst_value_get_int_range_min(val), gst_value_get_int_range_max(val)); }
static VALUE int_range_set_max(VALUE self, VALUE max) { GValue *value; value = RVAL2GOBJ(self); gst_value_set_int_range(value, gst_value_get_int_range_min(value), NUM2INT(max)); return Qnil; }
static void _get_int_range (GstStructure * s, const gchar * field, gint * min_v, gint * max_v) { const GValue *value; value = gst_structure_get_value (s, field); fail_unless (value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (value)); *min_v = gst_value_get_int_range_min (value); *max_v = gst_value_get_int_range_max (value); }
static void transform_value (GValue * dest, const GValue * src, GstPadDirection dir) { g_value_init (dest, G_VALUE_TYPE (src)); if (G_VALUE_HOLDS_INT (src)) { int x; x = g_value_get_int (src); if (dir == GST_PAD_SINK) { g_value_set_int (dest, x / 2); } else { g_value_set_int (dest, x * 2); } } else if (GST_VALUE_HOLDS_INT_RANGE (src)) { int min, max; min = gst_value_get_int_range_min (src); max = gst_value_get_int_range_max (src); if (dir == GST_PAD_SINK) { min = (min + 1) / 2; if (max == G_MAXINT) { max = G_MAXINT / 2; } else { max = (max + 1) / 2; } } else { if (max > G_MAXINT / 2) { max = G_MAXINT; } else { max = max * 2; } if (min > G_MAXINT / 2) { min = G_MAXINT; } else { min = min * 2; } } gst_value_set_int_range (dest, min, max); } else { /* FIXME */ g_warning ("case not handled"); g_value_set_int (dest, 100); } }
static PyObject * gi_gst_int_range_from_value (const GValue * value) { gint min, max, step; PyObject *int_range_type, *int_range, *range; min = gst_value_get_int_range_min (value); max = gst_value_get_int_range_max (value); step = gst_value_get_int_range_step (value); int_range_type = gi_gst_get_type ("IntRange"); range = PyObject_CallFunction ((PyObject *) & PyRange_Type, "iii", min, max, step); int_range = PyObject_CallFunction (int_range_type, "O", range); Py_DECREF (int_range_type); Py_DECREF (range); return int_range; }
static int lgm_device_fixate_int_value (const GValue * val) { int ret; if (G_VALUE_TYPE (val) == GST_TYPE_INT_RANGE) { ret = gst_value_get_int_range_min (val); } else if (G_VALUE_TYPE (val) == GST_TYPE_ARRAY) { const GValue *kid = gst_value_array_get_value (val, 0); ret = g_value_get_int (kid); } else if (G_VALUE_TYPE (val) == GST_TYPE_LIST) { const GValue *kid = gst_value_list_get_value (val, 0); ret = g_value_get_int (kid); } else { ret = g_value_get_int (val); } /* For sources returning template caps set width and height to 0 */ if (ret == 1) { ret = 0; } return ret; }
// TODO: gets formats for cameras, when a format return a range it gets // in steps /2 and *2 from min to max, for format7 it should be free to get any size static void get_supported_video_formats (ofGstDevice &webcam_device, GstCaps &caps) { int i; int num_structures; num_structures = gst_caps_get_size (&caps); for (i = 0; i < num_structures; i++) { GstStructure *structure; const GValue *width, *height; structure = gst_caps_get_structure (&caps, i); width = gst_structure_get_value (structure, "width"); height = gst_structure_get_value (structure, "height"); if (G_VALUE_HOLDS_INT (width)) { ofGstVideoFormat * video_format = new ofGstVideoFormat; video_format->mimetype = g_strdup (gst_structure_get_name (structure)); gst_structure_get_int (structure, "width", &(video_format->width)); gst_structure_get_int (structure, "height", &(video_format->height)); add_video_format(webcam_device, video_format, *structure); } else if (GST_VALUE_HOLDS_INT_RANGE (width)) { int min_width, max_width, min_height, max_height; int cur_width, cur_height; min_width = gst_value_get_int_range_min (width); max_width = gst_value_get_int_range_max (width); min_height = gst_value_get_int_range_min (height); max_height = gst_value_get_int_range_max (height); cur_width = min_width; cur_height = min_height; /* Gstreamer will sometimes give us a range with min_xxx == max_xxx, we use <= here (and not below) to make this work */ while (cur_width <= max_width && cur_height <= max_height) { ofGstVideoFormat * video_format = new ofGstVideoFormat; video_format->mimetype = g_strdup (gst_structure_get_name (structure)); video_format->width = cur_width; video_format->height = cur_height; add_video_format(webcam_device, video_format, *structure); cur_width *= 2; cur_height *= 2; } cur_width = max_width; cur_height = max_height; while (cur_width > min_width && cur_height > min_height) { ofGstVideoFormat * video_format = new ofGstVideoFormat; video_format->mimetype = g_strdup (gst_structure_get_name (structure)); video_format->width = cur_width; video_format->height = cur_height; add_video_format(webcam_device, video_format, *structure); cur_width /= 2; cur_height /= 2; } } else { g_critical ("GValue type %s, cannot be handled for resolution width", G_VALUE_TYPE_NAME (width)); } } /* Sort the format array (so that it will show sorted in the resolution selection GUI), and rebuild the hashtable (as that will be invalid after the sorting) */ sort (webcam_device.video_formats.begin(), webcam_device.video_formats.end(), resolution_compare); g_hash_table_remove_all (webcam_device.supported_resolutions); for (i = 0; i < webcam_device.num_video_formats; i++) { ofGstVideoFormat * format = webcam_device.video_formats[i]; g_hash_table_insert (webcam_device.supported_resolutions, g_strdup_printf ("%ix%i", format->width, format->height), GINT_TO_POINTER(i + 1)); } }
void test_simplify() { GstStructure *s1, *s2; gboolean did_simplify; GstCaps *caps; caps = gst_caps_from_string (non_simple_caps_string); fail_unless (caps != NULL, "gst_caps_from_string (non_simple_caps_string) failed"); did_simplify = gst_caps_do_simplify (caps); fail_unless (did_simplify == TRUE, "gst_caps_do_simplify() should have worked"); /* check simplified caps, should be: * * video/x-raw-rgb, bpp=(int)8, depth=(int)8, endianness=(int)1234, * framerate=(fraction)[ 1/100, 100 ], width=(int)[ 16, 4096 ], * height=(int)[ 16, 4096 ]; * video/x-raw-yuv, format=(fourcc){ YV12, YUY2, I420 }, * width=(int)[ 16, 4096 ], height=(int)[ 16, 4096 ], * framerate=(fraction)[ 1/100, 100 ] */ fail_unless (gst_caps_get_size (caps) == 2); s1 = gst_caps_get_structure (caps, 0); s2 = gst_caps_get_structure (caps, 1); fail_unless (s1 != NULL); fail_unless (s2 != NULL); if (!gst_structure_has_name (s1, "video/x-raw-rgb")) { GstStructure *tmp; tmp = s1; s1 = s2; s2 = tmp; } fail_unless (gst_structure_has_name (s1, "video/x-raw-rgb")); { const GValue *framerate_value; const GValue *width_value; const GValue *height_value; const GValue *val_fps; GValue test_fps = { 0, }; gint bpp, depth, endianness; gint min_width, max_width; gint min_height, max_height; fail_unless (gst_structure_get_int (s1, "bpp", &bpp)); fail_unless (bpp == 8); fail_unless (gst_structure_get_int (s1, "depth", &depth)); fail_unless (depth == 8); fail_unless (gst_structure_get_int (s1, "endianness", &endianness)); fail_unless (endianness == G_LITTLE_ENDIAN); g_value_init (&test_fps, GST_TYPE_FRACTION); framerate_value = gst_structure_get_value (s1, "framerate"); fail_unless (framerate_value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (framerate_value)); val_fps = gst_value_get_fraction_range_min (framerate_value); gst_value_set_fraction (&test_fps, 1, 100); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); val_fps = gst_value_get_fraction_range_max (framerate_value); gst_value_set_fraction (&test_fps, 100, 1); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); g_value_unset (&test_fps); width_value = gst_structure_get_value (s1, "width"); fail_unless (width_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (width_value)); min_width = gst_value_get_int_range_min (width_value); max_width = gst_value_get_int_range_max (width_value); fail_unless (min_width == 16 && max_width == 4096); height_value = gst_structure_get_value (s1, "height"); fail_unless (height_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (height_value)); min_height = gst_value_get_int_range_min (height_value); max_height = gst_value_get_int_range_max (height_value); fail_unless (min_height == 16 && max_height == 4096); } fail_unless (gst_structure_has_name (s2, "video/x-raw-yuv")); { const GValue *framerate_value; const GValue *format_value; const GValue *width_value; const GValue *height_value; const GValue *val_fps; GValue test_fps = { 0, }; gint min_width, max_width; gint min_height, max_height; format_value = gst_structure_get_value (s2, "format"); fail_unless (format_value != NULL); fail_unless (GST_VALUE_HOLDS_LIST (format_value)); fail_unless (gst_value_list_get_size (format_value) == 3); fail_unless (check_fourcc_list (format_value) == TRUE); g_value_init (&test_fps, GST_TYPE_FRACTION); framerate_value = gst_structure_get_value (s2, "framerate"); fail_unless (framerate_value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (framerate_value)); val_fps = gst_value_get_fraction_range_min (framerate_value); gst_value_set_fraction (&test_fps, 1, 100); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); val_fps = gst_value_get_fraction_range_max (framerate_value); gst_value_set_fraction (&test_fps, 100, 1); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); g_value_unset (&test_fps); width_value = gst_structure_get_value (s2, "width"); fail_unless (width_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (width_value)); min_width = gst_value_get_int_range_min (width_value); max_width = gst_value_get_int_range_max (width_value); fail_unless (min_width == 16 && max_width == 4096); height_value = gst_structure_get_value (s2, "height"); fail_unless (height_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (height_value)); min_height = gst_value_get_int_range_min (height_value); max_height = gst_value_get_int_range_max (height_value); fail_unless (min_height == 16 && max_height == 4096); } gst_caps_unref (caps); }
static gboolean gst_video_crop_transform_dimension_value (const GValue * src_val, gint delta, GValue * dest_val, GstPadDirection direction, gboolean dynamic) { gboolean ret = TRUE; if (G_VALUE_HOLDS_INT (src_val)) { gint ival = g_value_get_int (src_val); ival = gst_video_crop_transform_dimension (ival, delta); if (dynamic) { if (direction == GST_PAD_SRC) { if (ival == G_MAXINT) { g_value_init (dest_val, G_TYPE_INT); g_value_set_int (dest_val, ival); } else { g_value_init (dest_val, GST_TYPE_INT_RANGE); gst_value_set_int_range (dest_val, ival, G_MAXINT); } } else { if (ival == 1) { g_value_init (dest_val, G_TYPE_INT); g_value_set_int (dest_val, ival); } else { g_value_init (dest_val, GST_TYPE_INT_RANGE); gst_value_set_int_range (dest_val, 1, ival); } } } else { g_value_init (dest_val, G_TYPE_INT); g_value_set_int (dest_val, ival); } } else if (GST_VALUE_HOLDS_INT_RANGE (src_val)) { gint min = gst_value_get_int_range_min (src_val); gint max = gst_value_get_int_range_max (src_val); min = gst_video_crop_transform_dimension (min, delta); max = gst_video_crop_transform_dimension (max, delta); if (dynamic) { if (direction == GST_PAD_SRC) max = G_MAXINT; else min = 1; } if (min == max) { g_value_init (dest_val, G_TYPE_INT); g_value_set_int (dest_val, min); } else { g_value_init (dest_val, GST_TYPE_INT_RANGE); gst_value_set_int_range (dest_val, min, max); } } else if (GST_VALUE_HOLDS_LIST (src_val)) { gint i; g_value_init (dest_val, GST_TYPE_LIST); for (i = 0; i < gst_value_list_get_size (src_val); ++i) { const GValue *list_val; GValue newval = { 0, }; list_val = gst_value_list_get_value (src_val, i); if (gst_video_crop_transform_dimension_value (list_val, delta, &newval, direction, dynamic)) gst_value_list_append_value (dest_val, &newval); g_value_unset (&newval); } if (gst_value_list_get_size (dest_val) == 0) { g_value_unset (dest_val); ret = FALSE; } } else { ret = FALSE; } return ret; }
/* * cheese_camera_device_update_format_table: * @device: a #CheeseCameraDevice * * Clear the current list of video formats supported by the @device and create * it anew. */ static void cheese_camera_device_update_format_table (CheeseCameraDevice *device) { CheeseCameraDevicePrivate *priv = device->priv; guint i; guint num_structures; free_format_list (device); num_structures = gst_caps_get_size (priv->caps); for (i = 0; i < num_structures; i++) { GstStructure *structure; const GValue *width, *height, *framerate; structure = gst_caps_get_structure (priv->caps, i); width = gst_structure_get_value (structure, "width"); height = gst_structure_get_value (structure, "height"); framerate = gst_structure_get_value (structure, "framerate"); if (G_VALUE_HOLDS_INT (width)) { CheeseVideoFormatFull *format = g_slice_new0 (CheeseVideoFormatFull); gst_structure_get_int (structure, "width", &(format->width)); gst_structure_get_int (structure, "height", &(format->height)); cheese_camera_device_add_format (device, format, framerate); } else if (GST_VALUE_HOLDS_INT_RANGE (width)) { gint min_width, max_width, min_height, max_height; gint cur_width, cur_height; min_width = gst_value_get_int_range_min (width); max_width = gst_value_get_int_range_max (width); min_height = gst_value_get_int_range_min (height); max_height = gst_value_get_int_range_max (height); /* Some devices report a very small min_width / height down to reporting * 0x0 as minimum resolution, which causes an infinte loop below, limit * these to something reasonable. */ if (min_width < 160) min_width = 160; if (min_height < 120) min_height = 120; cur_width = min_width; cur_height = min_height; /* Gstreamer will sometimes give us a range with min_xxx == max_xxx, * we use <= here (and not below) to make this work */ while (cur_width <= max_width && cur_height <= max_height) { CheeseVideoFormatFull *format = g_slice_new0 (CheeseVideoFormatFull); /* Gstreamer wants resolutions for YUV formats where the width is * a multiple of 8, and the height is a multiple of 2 */ format->width = cur_width & ~7; format->height = cur_height & ~1; cheese_camera_device_add_format (device, format, framerate); cur_width *= 2; cur_height *= 2; } cur_width = max_width; cur_height = max_height; while (cur_width > min_width && cur_height > min_height) { CheeseVideoFormatFull *format = g_slice_new0 (CheeseVideoFormatFull); /* Gstreamer wants resolutions for YUV formats where the width is * a multiple of 8, and the height is a multiple of 2 */ format->width = cur_width & ~7; format->height = cur_height & ~1; cheese_camera_device_add_format (device, format, framerate); cur_width /= 2; cur_height /= 2; } } else { g_critical ("GValue type %s, cannot be handled for resolution width", G_VALUE_TYPE_NAME (width)); } } }
/** * fill FarsightCodec fields based on payloader capabilities * TODO: optimise using quarks */ static gboolean extract_field_data (GQuark field_id, const GValue *value, gpointer user_data) { /* TODO : This can be called several times from different rtp caps for the * same codec, it would be good to make sure any duplicate values are the * same, if not then we have several rtp elements that are giving different * caps information, therefore they need to be fixed */ FsCodec *codec = (FsCodec *) user_data; GType type = G_VALUE_TYPE (value); const gchar *field_name = g_quark_to_string (field_id); const gchar *tmp; if (0 == strcmp (field_name, "media")) { if (type != G_TYPE_STRING) { return FALSE; } tmp = g_value_get_string (value); if (strcmp (tmp, "audio") == 0) { codec->media_type = FS_MEDIA_TYPE_AUDIO; } else if (strcmp (tmp, "video") == 0) { codec->media_type = FS_MEDIA_TYPE_VIDEO; } } else if (0 == strcmp (field_name, "payload")) { if (type == GST_TYPE_INT_RANGE) { if (gst_value_get_int_range_min (value) < 96 || gst_value_get_int_range_max (value) > 255) { return FALSE; } } else if (type == G_TYPE_INT) { int id; id = g_value_get_int (value); if (id > 96) { /* Dynamic id that was explicitelly set ?? shouldn't happen */ return FALSE; } codec->id = id; } else { return FALSE; } } else if (0 == strcmp (field_name, "clock-rate")) { if (type == GST_TYPE_INT_RANGE) { /* set to 0, this should be checked by the optional parameters code later * in Farsight */ codec->clock_rate = 0; return TRUE; } else if (type != G_TYPE_INT) { return FALSE; } codec->clock_rate = g_value_get_int (value); } else if (0 == strcmp (field_name, "ssrc") || 0 == strcmp (field_name, "clock-base") || 0 == strcmp (field_name, "seqnum-base")) { // ignore these fields for now ; } else if (0 == strcmp (field_name, "encoding-name")) { if (type != G_TYPE_STRING) { return FALSE; } if (!codec->encoding_name) { codec->encoding_name = g_value_dup_string (value); } } else if (0 == strcmp (field_name, "encoding-params")) { if (type != G_TYPE_STRING) { return FALSE; } codec->channels = (guint) g_ascii_strtoull ( g_value_get_string (value), NULL, 10); } else { if (type == G_TYPE_STRING) fs_codec_add_optional_parameter (codec, field_name, g_value_get_string (value)); } return TRUE; }
static GstCaps * gst_alsa_detect_channels (GstObject * obj, snd_pcm_hw_params_t * hw_params, GstCaps * in_caps) { GstCaps *caps; guint min, max; gint min_chans, max_chans; gint err, i; GST_LOG_OBJECT (obj, "probing channels ..."); if ((err = snd_pcm_hw_params_get_channels_min (hw_params, &min)) < 0) goto min_chan_error; if ((err = snd_pcm_hw_params_get_channels_max (hw_params, &max)) < 0) goto max_chan_error; /* note: the above functions may return (guint) -1 */ min_chans = min; max_chans = max; if (min_chans < 0) { min_chans = 1; max_chans = GST_ALSA_MAX_CHANNELS; } else if (max_chans < 0) { max_chans = GST_ALSA_MAX_CHANNELS; } if (min_chans > max_chans) { gint temp; GST_WARNING_OBJECT (obj, "minimum channels > maximum channels (%d > %d), " "please fix your soundcard drivers", min, max); temp = min_chans; min_chans = max_chans; max_chans = temp; } /* pro cards seem to return large numbers for min_channels */ if (min_chans > GST_ALSA_MAX_CHANNELS) { GST_DEBUG_OBJECT (obj, "min_chans = %u, looks like a pro card", min_chans); if (max_chans < min_chans) { max_chans = min_chans; } else { /* only support [max_chans; max_chans] for these cards for now * to avoid inflating the source caps with loads of structures ... */ min_chans = max_chans; } } else { min_chans = MAX (min_chans, 1); max_chans = MIN (GST_ALSA_MAX_CHANNELS, max_chans); } GST_DEBUG_OBJECT (obj, "Min. channels = %d (%d)", min_chans, min); GST_DEBUG_OBJECT (obj, "Max. channels = %d (%d)", max_chans, max); caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (in_caps); ++i) { GstStructure *s; GType field_type; gint c_min = min_chans; gint c_max = max_chans; s = gst_caps_get_structure (in_caps, i); /* the template caps might limit the number of channels (like alsasrc), * in which case we don't want to return a superset, so hack around this * for the two common cases where the channels are either a fixed number * or a min/max range). Example: alsasrc template has channels = [1,2] and * the detection will claim to support 8 channels for device 'plughw:0' */ field_type = gst_structure_get_field_type (s, "channels"); if (field_type == G_TYPE_INT) { gst_structure_get_int (s, "channels", &c_min); gst_structure_get_int (s, "channels", &c_max); } else if (field_type == GST_TYPE_INT_RANGE) { const GValue *val; val = gst_structure_get_value (s, "channels"); c_min = CLAMP (gst_value_get_int_range_min (val), min_chans, max_chans); c_max = CLAMP (gst_value_get_int_range_max (val), min_chans, max_chans); } else { c_min = min_chans; c_max = max_chans; } caps_add_channel_configuration (caps, s, c_min, c_max); } gst_caps_unref (in_caps); return caps; /* ERRORS */ min_chan_error: { GST_ERROR_OBJECT (obj, "failed to query minimum channel count: %s", snd_strerror (err)); return NULL; } max_chan_error: { GST_ERROR_OBJECT (obj, "failed to query maximum channel count: %s", snd_strerror (err)); return NULL; } }
int main (int argc, char *argv[]) { /* Initialisation */ gst_init (&argc, &argv); GList *element_list = gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DEPAYLOADER, GST_RANK_NONE); GList *iter = element_list; while (iter != NULL) { g_print ("+++++\n"); g_print ("%s -- ", gst_element_factory_get_longname ((GstElementFactory *)iter->data)); g_print ("%s\n", gst_plugin_feature_get_name ((GstPluginFeature *)iter->data)); const GList *static_pads = gst_element_factory_get_static_pad_templates ((GstElementFactory *)iter->data); while (NULL != static_pads) { GstStaticPadTemplate *pad = (GstStaticPadTemplate *)static_pads->data; //the following is EMPTY gchar *caps_str = gst_caps_to_string (&pad->static_caps.caps); //g_free (caps_str); /* g_print ("string: %s\n", */ /* pad->static_caps.string); */ GstCaps *caps = gst_caps_from_string (pad->static_caps.string); guint caps_size = gst_caps_get_size (caps); if (! gst_caps_is_any (caps)) for (guint i = caps_size; i > 0; i--) { GstStructure *caps_struct = gst_caps_get_structure (caps, i-1); if (gst_structure_has_name (caps_struct,"application/x-rtp")) { g_print ("string: %s\n", gst_structure_to_string (caps_struct)); {//payload const GValue *val = gst_structure_get_value (caps_struct, "payload"); if (NULL != val) { //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val)); if(GST_VALUE_HOLDS_INT_RANGE(val)) { g_print ("payload min %d\n", gst_value_get_int_range_min (val)); } if (GST_VALUE_HOLDS_LIST(val)) { for (guint i = 0; i < gst_value_list_get_size (val); i++) { const GValue *item_val = gst_value_list_get_value (val, i); g_print ("payload list %d\n", g_value_get_int (item_val)); } } if (G_VALUE_HOLDS_INT (val)) { g_print ("payload int %d\n", g_value_get_int (val)); } } } { //encodeing-name const GValue *val = gst_structure_get_value (caps_struct, "encoding-name"); if (NULL != val) { //g_print ("encoding-name struct type %s\n", G_VALUE_TYPE_NAME (val)); if (GST_VALUE_HOLDS_LIST(val)) { for (guint i = 0; i < gst_value_list_get_size (val); i++) { const GValue *item_val = gst_value_list_get_value (val, i); g_print ("encoding-name list %s\n", g_value_get_string (item_val)); } } if (G_VALUE_HOLDS_STRING (val)) { g_print ("encoding-name string %s\n", g_value_get_string (val)); } } } {//media const GValue *val = gst_structure_get_value (caps_struct, "media"); if (NULL != val) { if (GST_VALUE_HOLDS_LIST(val)) { for (guint i = 0; i < gst_value_list_get_size (val); i++) { const GValue *item_val = gst_value_list_get_value (val, i); g_print ("media list %s\n", g_value_get_string (item_val)); } } if (G_VALUE_HOLDS_STRING (val)) { g_print ("media string %s\n", g_value_get_string (val)); } } } {//clock rate const GValue *val = gst_structure_get_value (caps_struct, "clock-rate"); if (NULL != val) { //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val)); if(GST_VALUE_HOLDS_INT_RANGE(val)) { g_print ("clock-rate min %d\n", gst_value_get_int_range_min (val)); } if (GST_VALUE_HOLDS_LIST(val)) { for (guint i = 0; i < gst_value_list_get_size (val); i++) { const GValue *item_val = gst_value_list_get_value (val, i); g_print ("clock-rate list %d\n", g_value_get_int (item_val)); } } if (G_VALUE_HOLDS_INT (val)) { g_print ("clock-rate int %d\n", g_value_get_int (val)); } } } /* g_print ("\nencoding-name %s\n", */ /* gst_structure_get_string (caps_struct, */ /* "encoding-name")); */ } } static_pads = g_list_next (static_pads); gst_caps_unref (caps); } iter = g_list_next (iter); } gst_plugin_feature_list_free (element_list); return 0; }
static VALUE int_range_get_min(VALUE self) { return INT2NUM(gst_value_get_int_range_min(RVAL2GOBJ(self))); }
static gboolean populate_field_settings (GQuark field, const GValue * value, gpointer pfx) { gchar *field_name ; gpointer *pfxd = (gpointer*)pfx; XAMediaRecorderAdaptationCtx *ctxx = (XAMediaRecorderAdaptationCtx *) *pfxd; field_name = (gchar*)g_quark_to_string (field); if((strcasecmp((const char*)field_name,"channels") == 0)) { if(GST_VALUE_HOLDS_INT_RANGE(value) == TRUE) { (ctxx)->audioEncSettings.channelsIn = gst_value_get_int_range_max (value); (ctxx)->audioEncSettings.channelsOut = gst_value_get_int_range_max (value); }else { (ctxx)->audioEncSettings.channelsIn = g_value_get_int(value); (ctxx)->audioEncSettings.channelsOut = g_value_get_int(value); } } if((strcasecmp((const char*)field_name,"depth") == 0)) { if(GST_VALUE_HOLDS_INT_RANGE(value) == TRUE) { (ctxx)->audioEncSettings.bitsPerSample = gst_value_get_int_range_min (value); }else { (ctxx)->audioEncSettings.bitsPerSample = g_value_get_int(value); } } if((strcasecmp((const char*)field_name,"endianness") == 0)) { if(GST_VALUE_HOLDS_INT_RANGE(value) == TRUE) { (ctxx)->audioEncSettings.blockAlignment = gst_value_get_int_range_min (value); }else { (ctxx)->audioEncSettings.blockAlignment = g_value_get_int(value); } } if((strcasecmp((const char*)field_name,"bitrate") == 0)) { if(GST_VALUE_HOLDS_INT_RANGE(value) == TRUE) { (ctxx)->audioEncSettings.bitRate = gst_value_get_int_range_min (value); }else { (ctxx)->audioEncSettings.bitRate = g_value_get_int(value); } } if((strcasecmp((const char*)field_name,"rate") == 0)) { if(GST_VALUE_HOLDS_INT_RANGE(value) == TRUE) { (ctxx)->audioEncSettings.sampleRate = gst_value_get_int_range_min (value)*1000; }else { (ctxx)->audioEncSettings.sampleRate = g_value_get_int(value) * 1000; } } return TRUE; }