コード例 #1
0
ファイル: gstaravis.c プロジェクト: daiemna/SDP_WS14
static GstCaps *
gst_aravis_get_all_camera_caps (GstAravis *gst_aravis)
{
	GstCaps *caps;
	gint64 *pixel_formats;
	double min_frame_rate, max_frame_rate;
	int min_height, min_width;
	int max_height, max_width;
	unsigned int n_pixel_formats, i;

	g_return_val_if_fail (GST_IS_ARAVIS (gst_aravis), NULL);

	if (!ARV_IS_CAMERA (gst_aravis->camera))
		return NULL;

	GST_LOG_OBJECT (gst_aravis, "Get all camera caps");

	arv_camera_get_width_bounds (gst_aravis->camera, &min_width, &max_width);
	arv_camera_get_height_bounds (gst_aravis->camera, &min_height, &max_height);
	pixel_formats = arv_camera_get_available_pixel_formats (gst_aravis->camera, &n_pixel_formats);
	arv_camera_get_frame_rate_bounds (gst_aravis->camera, &min_frame_rate, &max_frame_rate);

	int min_frame_rate_numerator;
	int min_frame_rate_denominator;
	gst_util_double_to_fraction (min_frame_rate, &min_frame_rate_numerator, &min_frame_rate_denominator);

	int max_frame_rate_numerator;
	int max_frame_rate_denominator;
	gst_util_double_to_fraction (max_frame_rate, &max_frame_rate_numerator, &max_frame_rate_denominator);

	caps = gst_caps_new_empty ();
	for (i = 0; i < n_pixel_formats; i++) {
		const char *caps_string;

		caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_formats[i]);

		if (caps_string != NULL) {
			GstStructure *structure;

			structure = gst_structure_from_string (caps_string, NULL);
			gst_structure_set (structure,
					   "width", GST_TYPE_INT_RANGE, min_width, max_width,
					   "height", GST_TYPE_INT_RANGE, min_height, max_height,
					   "framerate", GST_TYPE_FRACTION_RANGE,
							   min_frame_rate_numerator, min_frame_rate_denominator,
							   max_frame_rate_numerator, max_frame_rate_denominator,
					   NULL);
			gst_caps_append_structure (caps, structure);
		}
	}

	g_free (pixel_formats);

	return caps;
}
コード例 #2
0
static GstCaps *owr_image_renderer_get_caps(OwrMediaRenderer *renderer)
{
    GstCaps *caps = NULL;
    guint width = 0, height = 0;
    gdouble max_framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    g_object_get(OWR_IMAGE_RENDERER(renderer),
        "width", &width,
        "height", &height,
        "max-framerate", &max_framerate,
        NULL);

    caps = gst_caps_new_empty_simple("video/x-raw");
    /* FIXME - add raw format property to image renderer */
    gst_caps_set_simple(caps, "format", G_TYPE_STRING, "BGRA", NULL);
    gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
    gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

    max_framerate = max_framerate > 0.0 ? max_framerate : LIMITED_FRAMERATE;
    gst_util_double_to_fraction(max_framerate, &fps_n, &fps_d);
    GST_DEBUG_OBJECT(renderer, "Setting the framerate to %d/%d", fps_n, fps_d);
    gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);

    return caps;
}
コード例 #3
0
bool fill_structure_fixed_resolution (GstStructure* structure,
                                      const tcam::VideoFormatDescription& format,
                                      const tcam_resolution_description& res)
{

    std::vector<double> framerates = format.get_frame_rates(res);
    int framerate_count = framerates.size();

    GValue fps_list = G_VALUE_INIT;
    g_value_init(&fps_list, GST_TYPE_LIST);

    for (int f = 0; f < framerate_count; f++)
    {
        int frame_rate_numerator;
        int frame_rate_denominator;
        gst_util_double_to_fraction(framerates[f],
                                    &frame_rate_numerator,
                                    &frame_rate_denominator);

        GValue fraction = G_VALUE_INIT;
        g_value_init(&fraction, GST_TYPE_FRACTION);
        gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator);
        gst_value_list_append_value(&fps_list, &fraction);
        g_value_unset(&fraction);
    }

    gst_structure_set (structure,
                       "width", G_TYPE_INT, res.max_size.width,
                       "height", G_TYPE_INT, res.max_size.height,
                       NULL);

    gst_structure_take_value(structure, "framerate", &fps_list);

    return true;
}
コード例 #4
0
static GstCaps *owr_video_renderer_get_caps(OwrMediaRenderer *renderer)
{
    GstCaps *caps;
    guint width = 0, height = 0;
    gdouble max_framerate = 0.0;

    g_object_get(OWR_VIDEO_RENDERER(renderer),
        "width", &width,
        "height", &height,
        "max-framerate", &max_framerate,
        NULL);

    caps = gst_caps_new_empty_simple("video/x-raw");
    if (width > 0)
        gst_caps_set_simple(caps, "width", G_TYPE_INT, width, NULL);
    if (height > 0)
        gst_caps_set_simple(caps, "height", G_TYPE_INT, height, NULL);
    if (max_framerate > 0.0) {
        gint fps_n = 0, fps_d = 1;
        gst_util_double_to_fraction(max_framerate, &fps_n, &fps_d);
        GST_DEBUG_OBJECT(renderer, "Setting the framerate to %d/%d", fps_n, fps_d);
        gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
    }

    return caps;
}
コード例 #5
0
ファイル: gstxmptag.c プロジェクト: genesi/gst-base-plugins
static gchar *
double_to_fraction_string (gdouble num)
{
  gint frac_n;
  gint frac_d;

  gst_util_double_to_fraction (num, &frac_n, &frac_d);
  return g_strdup_printf ("%d/%d", frac_n, frac_d);
}
コード例 #6
0
ファイル: owr_payload.c プロジェクト: zdydek/openwebrtc
GstCaps * _owr_payload_create_raw_caps(OwrPayload *payload)
{
    OwrPayloadPrivate *priv;
    OwrMediaType media_type;
    GstCaps *caps = NULL;
    guint channels = 0;
    guint width = 0, height = 0;
    gdouble framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    g_return_val_if_fail(payload, NULL);
    priv = payload->priv;

    g_object_get(payload, "media-type", &media_type, NULL);

    switch (media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        if (OWR_IS_AUDIO_PAYLOAD(payload))
            g_object_get(OWR_AUDIO_PAYLOAD(payload), "channels", &channels, NULL);
        caps = gst_caps_new_simple("audio/x-raw",
            "rate", G_TYPE_INT, priv->clock_rate,
            NULL);
        if (channels > 0) {
            gst_caps_set_simple(caps,
                "channels", G_TYPE_INT, channels,
                NULL);
        }
        break;

    case OWR_MEDIA_TYPE_VIDEO:
        if (OWR_IS_VIDEO_PAYLOAD(payload)) {
            g_object_get(OWR_VIDEO_PAYLOAD(payload),
                "width", &width,
                "height", &height,
                "framerate", &framerate,
                NULL);
        }
        caps = gst_caps_new_empty_simple(_owr_codec_type_to_caps_mime(media_type, priv->codec_type));
#ifdef __APPLE__
        if (priv->codec_type == OWR_CODEC_TYPE_H264)
          gst_caps_set_features(caps, 0, gst_caps_features_new_any());
#endif
        gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
        gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

        framerate = framerate > 0.0 ? framerate : LIMITED_FRAMERATE;
        gst_util_double_to_fraction(framerate, &fps_n, &fps_d);
        gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
        break;
    default:
        g_return_val_if_reached(NULL);
    }

    return caps;
}
コード例 #7
0
ファイル: gstexiftag.c プロジェクト: genesi/gst-base-plugins
static void
gst_exif_writer_write_rational_tag_from_double (GstExifWriter * writer,
    guint16 tag, gdouble value)
{
  gint frac_n;
  gint frac_d;

  gst_util_double_to_fraction (value, &frac_n, &frac_d);

  gst_exif_writer_write_rational_tag (writer, tag, frac_n, frac_d);
}
コード例 #8
0
static void
gst_mio_video_device_framerate_to_fraction_value (TundraFramerate * rate,
    GValue * fract)
{
  gdouble rounded;
  gint n, d;

  rounded = gst_mio_video_device_round_to_whole_hundreths (rate->value);
  gst_util_double_to_fraction (rounded, &n, &d);
  gst_value_set_fraction (fract, n, d);
}
コード例 #9
0
ファイル: owr_payload.c プロジェクト: Bayerner/openwebrtc
GstCaps * _owr_payload_create_raw_caps(OwrPayload *payload)
{
    OwrPayloadPrivate *priv;
    GstCaps *caps = NULL;
    guint channels = 0;
    guint width = 0, height = 0;
    gdouble framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    g_return_val_if_fail(payload, NULL);
    priv = payload->priv;

    switch (priv->media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        if (OWR_IS_AUDIO_PAYLOAD(payload))
            g_object_get(OWR_AUDIO_PAYLOAD(payload), "channels", &channels, NULL);
        caps = gst_caps_new_simple("audio/x-raw",
            "format", G_TYPE_STRING, "S16LE",
            "layout", G_TYPE_STRING, "interleaved",
            "rate", G_TYPE_INT, priv->clock_rate,
            NULL);
        if (channels > 0) {
            gst_caps_set_simple(caps,
                "channels", G_TYPE_INT, channels,
                NULL);
        }
        break;

    case OWR_MEDIA_TYPE_VIDEO:
        if (OWR_IS_VIDEO_PAYLOAD(payload)) {
            g_object_get(OWR_VIDEO_PAYLOAD(payload),
                "width", &width,
                "height", &height,
                "framerate", &framerate,
                NULL);
        }
        caps = gst_caps_new_empty_simple("video/x-raw");
        gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
        gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

        framerate = framerate > 0.0 ? framerate : LIMITED_FRAMERATE;
        gst_util_double_to_fraction(framerate, &fps_n, &fps_d);
        gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
        break;
    default:
        g_return_val_if_reached(NULL);
    }

    return caps;
}
コード例 #10
0
static GstClockTime
get_seconds (const GstRTSPTime * t)
{
  if (t->seconds < G_MAXINT) {
    gint num, denom;
    /* Don't do direct multiply with GST_SECOND to avoid rounding
     * errors.
     * This only works for "small" numbers, because num is limited to 32-bit
     */
    gst_util_double_to_fraction (t->seconds, &num, &denom);
    return gst_util_uint64_scale_int (GST_SECOND, num, denom);
  } else {
    return gst_util_gdouble_to_guint64 (t->seconds * GST_SECOND);
  }
}
コード例 #11
0
static GstClockTime
get_frames (const GstRTSPTime2 * t, GstRTSPRangeUnit unit)
{
  gint num, denom;

  gst_util_double_to_fraction (t->frames, &num, &denom);

  switch (unit) {
    case GST_RTSP_RANGE_SMPTE_25:
      denom *= 25;
      break;
    case GST_RTSP_RANGE_SMPTE:
    case GST_RTSP_RANGE_SMPTE_30_DROP:
    default:
      num *= 1001;
      denom *= 30003;
      break;
  }
  return gst_util_uint64_scale_int (GST_SECOND, num, denom);
}
コード例 #12
0
ファイル: gstexample.c プロジェクト: MantisCinema/tiscamera
/*
  callback to adjust the framerate
 */
void framerate_value_changed (Win* we)
{
    GstPad *pad = NULL;
    GstCaps *caps = NULL;

    if (w == NULL)
    {
        return;
    }

    /* retrieve user input */
    const char* s = gtk_entry_get_text(GTK_ENTRY(w->framerate_field));
    gdouble frame_rate = strtod(s, NULL);

    int num;
    int denom;

    /* gst caps want framerates as fraction, so we convert it */
    gst_util_double_to_fraction (frame_rate, &num, &denom);
    
    /*
      When setting caps, width and height currently always have to be defined or
      there will be an undefined behaviour that can crash your application
      Currently width and height have to be mentioned, else undefined behaviour will be experienced.
    */
    caps = gst_caps_new_simple (FORMAT_COLOR,
                                "format", G_TYPE_STRING, PATTERN,
                                "framerate", GST_TYPE_FRACTION, num, denom,
                                "width", G_TYPE_INT, WIDTH,
                                "height",G_TYPE_INT, HEIGHT,
                                NULL);

    /* pause pipeline to allow re-negotiation */
    gst_element_set_state (p.pipeline, GST_STATE_READY);
   
    /* set changed values */
    g_object_set (p.capsfilter, "caps", caps, NULL);

    /* resume playing */
    gst_element_set_state (p.pipeline, GST_STATE_PLAYING);
}
コード例 #13
0
ファイル: owr_payload.c プロジェクト: ramaxlo/openwebrtc
static void set_video_params(GstCaps *caps, OwrPayload *payload)
{
    guint width = 0, height = 0;
    gdouble framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    if (OWR_IS_VIDEO_PAYLOAD(payload)) {
        g_object_get(OWR_VIDEO_PAYLOAD(payload),
            "width", &width,
            "height", &height,
            "framerate", &framerate,
            NULL);
    }

    gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
    gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

    framerate = framerate > 0.0 ? framerate : LIMITED_FRAMERATE;
    gst_util_double_to_fraction(framerate, &fps_n, &fps_d);
    gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
}
コード例 #14
0
static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer)
{
    OwrVideoRenderer *video_renderer;
    OwrVideoRendererPrivate *priv;
    GstElement *videorate, *videoscale, *videoconvert, *capsfilter, *balance, *queue, *sink;
    GstCaps *filter_caps;
    GstPad *ghostpad, *sinkpad;
    gint fps_n = 0, fps_d = 1;
    gchar *bin_name;

    g_assert(renderer);
    video_renderer = OWR_VIDEO_RENDERER(renderer);
    priv = video_renderer->priv;

    g_mutex_lock(&priv->video_renderer_lock);

    if (priv->renderer_bin)
        goto done;

    bin_name = g_strdup_printf("video-renderer-bin-%u", g_atomic_int_add(&unique_bin_id, 1));
    priv->renderer_bin = gst_bin_new(bin_name);
    g_free(bin_name);

    gst_bin_add(GST_BIN(_owr_get_pipeline()), priv->renderer_bin);
    gst_element_sync_state_with_parent(GST_ELEMENT(priv->renderer_bin));

    videorate = gst_element_factory_make("videorate", "video-renderer-rate");
    g_object_set(videorate, "drop-only", TRUE, NULL);

    videoscale = gst_element_factory_make("videoscale", "video-renderer-scale");
    videoconvert = gst_element_factory_make(VIDEO_CONVERT, "video-renderer-convert");

    gst_util_double_to_fraction(priv->max_framerate, &fps_n, &fps_d);

    capsfilter = gst_element_factory_make("capsfilter", "video-renderer-capsfilter");
    filter_caps = gst_caps_new_empty_simple("video/x-raw");
    if (priv->width > 0)
        gst_caps_set_simple(filter_caps, "width", G_TYPE_INT, priv->width, NULL);
    if (priv->height > 0)
        gst_caps_set_simple(filter_caps, "height", G_TYPE_INT, priv->height, NULL);
    if (fps_n > 0 && fps_d > 0)
        gst_caps_set_simple(filter_caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
    g_object_set(capsfilter, "caps", filter_caps, NULL);

    balance = gst_element_factory_make("videobalance", "video-renderer-balance");
    g_signal_connect_object(renderer, "notify::disabled", G_CALLBACK(renderer_disabled),
        balance, 0);
    renderer_disabled(renderer, NULL, balance);

    queue = gst_element_factory_make("queue", "video-renderer-queue");
    g_assert(queue);
    g_object_set(queue, "max-size-buffers", 3, "max-size-bytes", 0, "max-size-time", 0, NULL);

    sink = gst_element_factory_make(VIDEO_SINK, "video-renderer-sink");
    g_assert(sink);
    if (GST_IS_VIDEO_OVERLAY(sink))
        gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), priv->window_handle);

    /* async false is needed when using live sources to not require prerolling
     * as prerolling is not possible from live sources in GStreamer */
    g_object_set(sink, "async", FALSE, NULL);

    gst_bin_add_many(GST_BIN(priv->renderer_bin), videorate, videoscale,
        videoconvert, capsfilter, balance, queue, sink, NULL);

    LINK_ELEMENTS(queue, sink);
    LINK_ELEMENTS(balance, queue);
    LINK_ELEMENTS(capsfilter, balance);
    LINK_ELEMENTS(videoconvert, capsfilter);
    LINK_ELEMENTS(videoscale, videoconvert);
    LINK_ELEMENTS(videorate, videoscale);

    sinkpad = gst_element_get_static_pad(videorate, "sink");
    g_assert(sinkpad);
    ghostpad = gst_ghost_pad_new("sink", sinkpad);
    gst_pad_set_active(ghostpad, TRUE);
    gst_element_add_pad(priv->renderer_bin, ghostpad);
    gst_object_unref(sinkpad);

    gst_element_sync_state_with_parent(sink);
    gst_element_sync_state_with_parent(queue);
    gst_element_sync_state_with_parent(balance);
    gst_element_sync_state_with_parent(capsfilter);
    gst_element_sync_state_with_parent(videoconvert);
    gst_element_sync_state_with_parent(videoscale);
    gst_element_sync_state_with_parent(videorate);
done:
    g_mutex_unlock(&priv->video_renderer_lock);
    return priv->renderer_bin;
}
コード例 #15
0
static GstCaps *
caps_from_amc_format (GstAmcFormat * amc_format)
{
  GstCaps *caps = NULL;
  gchar *mime = NULL;
  gint width, height;
  gint amc_profile, amc_level;
  gfloat frame_rate = 0.0;
  gint fraction_n, fraction_d;
  GError *err = NULL;

  if (!gst_amc_format_get_string (amc_format, "mime", &mime, &err)) {
    GST_ERROR ("Failed to get 'mime': %s", err->message);
    g_clear_error (&err);
    return NULL;
  }

  if (!gst_amc_format_get_int (amc_format, "width", &width, &err) ||
      !gst_amc_format_get_int (amc_format, "height", &height, &err)) {
    GST_ERROR ("Failed to get size: %s", err->message);
    g_clear_error (&err);

    g_free (mime);
    return NULL;
  }

  gst_amc_format_get_float (amc_format, "frame-rate", &frame_rate, NULL);
  gst_util_double_to_fraction (frame_rate, &fraction_n, &fraction_d);

  if (strcmp (mime, "video/mp4v-es") == 0) {
    const gchar *profile_string, *level_string;

    caps =
        gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
        "systemstream", G_TYPE_BOOLEAN, FALSE,
        "parsed", G_TYPE_BOOLEAN, TRUE, NULL);

    if (gst_amc_format_get_int (amc_format, "profile", &amc_profile, NULL)) {
      profile_string = gst_amc_mpeg4_profile_to_string (amc_profile);
      if (!profile_string)
        goto unsupported_profile;

      gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_string,
          NULL);
    }

    if (gst_amc_format_get_int (amc_format, "level", &amc_level, NULL)) {
      level_string = gst_amc_mpeg4_level_to_string (amc_profile);
      if (!level_string)
        goto unsupported_level;

      gst_caps_set_simple (caps, "level", G_TYPE_STRING, level_string, NULL);
    }

  } else if (strcmp (mime, "video/mpeg2") == 0) {
    caps = gst_caps_new_simple ("video/mpeg", "mpegversion", 2, NULL);
  } else if (strcmp (mime, "video/3gpp") == 0) {
    caps = gst_caps_new_empty_simple ("video/x-h263");
  } else if (strcmp (mime, "video/avc") == 0) {
    const gchar *profile_string, *level_string;

    caps =
        gst_caps_new_simple ("video/x-h264", "parsed", G_TYPE_BOOLEAN, TRUE,
        "stream-format", G_TYPE_STRING, "byte-stream",
        "alignment", G_TYPE_STRING, "au", NULL);

    if (gst_amc_format_get_int (amc_format, "profile", &amc_profile, NULL)) {
      profile_string = gst_amc_avc_profile_to_string (amc_profile, NULL);
      if (!profile_string)
        goto unsupported_profile;

      gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_string,
          NULL);
    }

    if (gst_amc_format_get_int (amc_format, "level", &amc_level, NULL)) {
      level_string = gst_amc_avc_level_to_string (amc_profile);
      if (!level_string)
        goto unsupported_level;

      gst_caps_set_simple (caps, "level", G_TYPE_STRING, level_string, NULL);
    }
  } else if (strcmp (mime, "video/x-vnd.on2.vp8") == 0) {
    caps = gst_caps_new_empty_simple ("video/x-vp8");
  }

  gst_caps_set_simple (caps, "width", G_TYPE_INT, width,
      "height", G_TYPE_INT, height,
      "framerate", GST_TYPE_FRACTION, fraction_n, fraction_d, NULL);

  g_free (mime);
  return caps;

unsupported_profile:
  GST_ERROR ("Unsupport amc profile id %d", amc_profile);
  g_free (mime);
  gst_object_unref (caps);

  return NULL;

unsupported_level:
  GST_ERROR ("Unsupport amc level id %d", amc_level);
  g_free (mime);
  gst_object_unref (caps);

  return NULL;
}
コード例 #16
0
GstCaps* convert_videoformatsdescription_to_caps (const std::vector<tcam::VideoFormatDescription>& descriptions)
{
    GstCaps* caps = gst_caps_new_empty();

    for (const auto& desc : descriptions)
    {
        if (desc.get_fourcc() == 0)
        {
            tcam_info("Format has empty fourcc. Ignoring");
            continue;
        }

        const char* caps_string = tcam_fourcc_to_gst_1_0_caps_string(desc.get_fourcc());

        if (caps_string == nullptr)
        {
            tcam_warning("Format has empty caps string. Ignoring %s",
                         tcam::fourcc_to_description(desc.get_fourcc()));
            continue;
        }

        // tcam_error("Found '%s' pixel format string", caps_string);

        std::vector<struct tcam_resolution_description> res = desc.get_resolutions();

        for (const auto& r : res)
        {
            int min_width = r.min_size.width;
            int min_height = r.min_size.height;

            int max_width = r.max_size.width;
            int max_height = r.max_size.height;

            if (r.type == TCAM_RESOLUTION_TYPE_RANGE)
            {
                std::vector<struct tcam_image_size> framesizes = tcam::get_standard_resolutions(r.min_size,
                                                                                                r.max_size);

                // check if min/max are already in the vector.
                // some devices return std resolutions as max
                if (r.min_size != framesizes.front())
                {
                    framesizes.insert(framesizes.begin(), r.min_size);
                }

                if (r.max_size != framesizes.back())
                {
                    framesizes.push_back(r.max_size);
                }

                for (const auto& reso : framesizes)
                {
                    GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                    std::vector<double> framerates = desc.get_framerates(reso);

                    if (framerates.empty())
                    {
                        // tcam_log(TCAM_LOG_WARNING, "No available framerates. Ignoring format.");
                        continue;
                    }

                    GValue fps_list = G_VALUE_INIT;
                    g_value_init(&fps_list, GST_TYPE_LIST);

                    for (const auto& f : framerates)
                    {
                        int frame_rate_numerator;
                        int frame_rate_denominator;
                        gst_util_double_to_fraction(f,
                                                    &frame_rate_numerator,
                                                    &frame_rate_denominator);

                        if ((frame_rate_denominator == 0) || (frame_rate_numerator == 0))
                        {
                            continue;
                        }

                        GValue fraction = G_VALUE_INIT;
                        g_value_init(&fraction, GST_TYPE_FRACTION);
                        gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator);
                        gst_value_list_append_value(&fps_list, &fraction);
                        g_value_unset(&fraction);
                    }


                    gst_structure_set (structure,
                                       "width", G_TYPE_INT, reso.width,
                                       "height", G_TYPE_INT, reso.height,
                                       NULL);

                    gst_structure_take_value(structure, "framerate", &fps_list);
                    gst_caps_append_structure (caps, structure);

                }

                // finally also add the range to allow unusual settings like 1920x96@90fps
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                GValue w = G_VALUE_INIT;
                g_value_init(&w, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&w, min_width, max_width);

                GValue h = G_VALUE_INIT;
                g_value_init(&h, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&h, min_height, max_height);

                std::vector<double> fps = desc.get_frame_rates(r);

                if (fps.empty())
                {
                    // GST_ERROR("Could not find any framerates for format");
                    continue;
                }

                int fps_min_num;
                int fps_min_den;
                int fps_max_num;
                int fps_max_den;
                gst_util_double_to_fraction(*std::min_element(fps.begin(), fps.end()),
                                            &fps_min_num,
                                            &fps_min_den);
                gst_util_double_to_fraction(*std::max_element(fps.begin(), fps.end()),
                                            &fps_max_num,
                                            &fps_max_den);

                GValue f = G_VALUE_INIT;
                g_value_init(&f, GST_TYPE_FRACTION_RANGE);

                gst_value_set_fraction_range_full(&f,
                                                  fps_min_num, fps_min_den,
                                                  fps_max_num, fps_max_den);

                gst_structure_set_value(structure, "width", &w);
                gst_structure_set_value(structure,"height", &h);
                gst_structure_set_value(structure,"framerate", &f);
                gst_caps_append_structure(caps, structure);
            }
            else
            {
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                fill_structure_fixed_resolution(structure, desc, r);
                gst_caps_append_structure (caps, structure);
            }
        }

    }

    return caps;
}