void
ges_base_xml_formatter_set_timeline_properties (GESBaseXmlFormatter * self,
    GESTimeline * timeline, const gchar * properties, const gchar * metadatas)
{
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);
  gboolean auto_transition = FALSE;

  if (properties) {
    GstStructure *props = gst_structure_from_string (properties, NULL);

    if (props) {
      if (gst_structure_get_boolean (props, "auto-transition",
              &auto_transition))
        gst_structure_remove_field (props, "auto-transition");

      gst_structure_foreach (props,
          (GstStructureForeachFunc) set_property_foreach, timeline);
      gst_structure_free (props);
    }
  }

  if (metadatas) {
    ges_meta_container_add_metas_from_string (GES_META_CONTAINER (timeline),
        metadatas);
  };

  priv->timeline_auto_transition = auto_transition;
}
示例#2
0
void test_mutability()
{
  GstStructure *s1;
  GstCaps *c1;
  gint ret;
  //xmlfile = "test_mutability";
      std_log(LOG_FILENAME_LINE, "Test Started test_mutability");
  c1 = gst_caps_new_any ();
  s1 = gst_structure_from_string ("audio/x-raw-int,rate=44100", NULL);
  gst_structure_set (s1, "rate", G_TYPE_INT, 48000, NULL);
  gst_caps_append_structure (c1, s1);
  gst_structure_set (s1, "rate", G_TYPE_INT, 22500, NULL);
  gst_caps_ref (c1);
  ASSERT_CRITICAL (gst_structure_set (s1, "rate", G_TYPE_INT, 11250, NULL));
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 22500);
  ASSERT_CRITICAL (gst_caps_set_simple (c1, "rate", G_TYPE_INT, 11250, NULL));
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 22500);
  gst_caps_unref (c1);
  gst_structure_set (s1, "rate", G_TYPE_INT, 11250, NULL);
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 11250);
  gst_caps_set_simple (c1, "rate", G_TYPE_INT, 1, NULL);
  fail_unless (gst_structure_get_int (s1, "rate", &ret));
  fail_unless (ret == 1);
  gst_caps_unref (c1);
  std_log(LOG_FILENAME_LINE, "Test Successful");
      create_xml(0);
}
示例#3
0
static GstCaps *
gst_aravis_get_all_camera_caps (GstAravis *gst_aravis)
{
	GstCaps *caps;
	gint64 *pixel_formats;
	double min_frame_rate, max_frame_rate;
	int min_height, min_width;
	int max_height, max_width;
	unsigned int n_pixel_formats, i;

	g_return_val_if_fail (GST_IS_ARAVIS (gst_aravis), NULL);

	if (!ARV_IS_CAMERA (gst_aravis->camera))
		return NULL;

	GST_LOG_OBJECT (gst_aravis, "Get all camera caps");

	arv_camera_get_width_bounds (gst_aravis->camera, &min_width, &max_width);
	arv_camera_get_height_bounds (gst_aravis->camera, &min_height, &max_height);
	pixel_formats = arv_camera_get_available_pixel_formats (gst_aravis->camera, &n_pixel_formats);
	arv_camera_get_frame_rate_bounds (gst_aravis->camera, &min_frame_rate, &max_frame_rate);

	int min_frame_rate_numerator;
	int min_frame_rate_denominator;
	gst_util_double_to_fraction (min_frame_rate, &min_frame_rate_numerator, &min_frame_rate_denominator);

	int max_frame_rate_numerator;
	int max_frame_rate_denominator;
	gst_util_double_to_fraction (max_frame_rate, &max_frame_rate_numerator, &max_frame_rate_denominator);

	caps = gst_caps_new_empty ();
	for (i = 0; i < n_pixel_formats; i++) {
		const char *caps_string;

		caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_formats[i]);

		if (caps_string != NULL) {
			GstStructure *structure;

			structure = gst_structure_from_string (caps_string, NULL);
			gst_structure_set (structure,
					   "width", GST_TYPE_INT_RANGE, min_width, max_width,
					   "height", GST_TYPE_INT_RANGE, min_height, max_height,
					   "framerate", GST_TYPE_FRACTION_RANGE,
							   min_frame_rate_numerator, min_frame_rate_denominator,
							   max_frame_rate_numerator, max_frame_rate_denominator,
					   NULL);
			gst_caps_append_structure (caps, structure);
		}
	}

	g_free (pixel_formats);

	return caps;
}
示例#4
0
/*
 * Creates a pipeline in the form:
 * fakesrc num-buffers=1 ! caps ! muxer ! filesink location=file
 *
 * And sets the tags in tag_str into the muxer via tagsetter.
 */
static void
test_mux_tags (const gchar * tag_str, const gchar * caps,
    const gchar * muxer, const gchar * file)
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;
  GstTagList *sent_tags;
  GstElement *mux;
  GstTagSetter *setter;
  gchar *launch_str;
  guint bus_watch = 0;

  GST_DEBUG ("testing xmp muxing on : %s", muxer);

  launch_str = g_strdup_printf ("fakesrc num-buffers=1 ! %s ! %s name=mux ! "
      "filesink location=%s name=sink", caps, muxer, file);
  pipeline = gst_parse_launch (launch_str, NULL);
  g_free (launch_str);
  fail_unless (pipeline != NULL);

  mux = gst_bin_get_by_name (GST_BIN (pipeline), "mux");
  fail_unless (mux != NULL);

  loop = g_main_loop_new (NULL, TRUE);
  fail_unless (loop != NULL);

  bus = gst_element_get_bus (pipeline);
  fail_unless (bus != NULL);
  bus_watch = gst_bus_add_watch (bus, bus_handler, loop);
  gst_object_unref (bus);

  gst_element_set_state (pipeline, GST_STATE_READY);

  setter = GST_TAG_SETTER (mux);
  fail_unless (setter != NULL);
  sent_tags = gst_structure_from_string (tag_str, NULL);
  fail_unless (sent_tags != NULL);
  gst_tag_setter_merge_tags (setter, sent_tags, GST_TAG_MERGE_REPLACE);
  gst_tag_list_free (sent_tags);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);
  g_object_unref (mux);
  g_object_unref (pipeline);
  g_source_remove (bus_watch);
}
示例#5
0
void test_double_append()
{
  GstStructure *s1;
  GstCaps *c1;
  //xmlfile = "test_double_append";
      std_log(LOG_FILENAME_LINE, "Test Started test_double_append");
  c1 = gst_caps_new_any ();
  s1 = gst_structure_from_string ("audio/x-raw-int,rate=44100", NULL);
  gst_caps_append_structure (c1, s1);
  ASSERT_CRITICAL (gst_caps_append_structure (c1, s1));

  gst_caps_unref (c1);
  std_log(LOG_FILENAME_LINE, "Test Successful");
      create_xml(0);
}
static void
fetch_element_table (GstPlugin * plugin)
{
  gchar *path;
  gchar *config, *s;
  GstStructure *tmp, *element;

  element_table = gst_plugin_get_cache_data (plugin);

  if (element_table)
    return;

  path = get_config_path ();

  if (!g_file_get_contents (path, &config, NULL, NULL)) {
    g_warning ("could not find config file '%s'.. using defaults!", path);
    config = (gchar *) default_config;
  }

  gst_plugin_add_dependency_simple (plugin, "ONX_CONFIG", path, NULL,
      GST_PLUGIN_DEPENDENCY_FLAG_NONE);

  g_free (path);

  GST_DEBUG ("parsing config:\n%s", config);

  tmp = gst_structure_empty_new ("element_table");

  s = config;

  while ((element = gst_structure_from_string (s, &s))) {
    const gchar *element_name = gst_structure_get_name (element);
    gst_structure_set (tmp, element_name, GST_TYPE_STRUCTURE, element, NULL);
  }

  if (config != default_config)
    g_free (config);

  GST_DEBUG ("element_table=%" GST_PTR_FORMAT, tmp);

  gst_plugin_set_cache_data (plugin, tmp);

  element_table = tmp;
}
示例#7
0
static void
gst_tag_inject_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstTagInject *self = GST_TAG_INJECT (object);

  switch (prop_id) {
    case PROP_TAGS:{
      gchar *structure =
          g_strdup_printf ("taglist,%s", g_value_get_string (value));
      if (!(self->tags = gst_structure_from_string (structure, NULL))) {
        GST_WARNING ("unparsable taglist = '%s'", structure);
      }
      g_free (structure);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
示例#8
0
static void
test_tags (const gchar * tag_str)
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;
  GstTagList *sent_tags;
  gint i, j, n_recv, n_sent;
  const gchar *name_sent, *name_recv;
  const GValue *value_sent, *value_recv;
  gboolean found, ok;
  gint comparison;
  GstElement *videotestsrc, *jpegenc, *metadatamux, *metadatademux, *fakesink;
  GstTagSetter *setter;

  GST_DEBUG ("testing tags : %s", tag_str);

  if (received_tags) {
    gst_tag_list_free (received_tags);
    received_tags = NULL;
  }

  pipeline = gst_pipeline_new ("pipeline");
  fail_unless (pipeline != NULL);

  videotestsrc = gst_element_factory_make ("videotestsrc", "src");
  fail_unless (videotestsrc != NULL);
  g_object_set (G_OBJECT (videotestsrc), "num-buffers", 1, NULL);

  jpegenc = gst_element_factory_make ("jpegenc", "enc");
  if (jpegenc == NULL) {
    g_print ("Cannot test - jpegenc not available\n");
    return;
  }

  metadatamux = gst_element_factory_make ("metadatamux", "mux");
  g_object_set (G_OBJECT (metadatamux), "exif", TRUE, NULL);
  fail_unless (metadatamux != NULL);

  metadatademux = gst_element_factory_make ("metadatademux", "demux");
  fail_unless (metadatademux != NULL);

  fakesink = gst_element_factory_make ("fakesink", "sink");
  fail_unless (fakesink != NULL);

  gst_bin_add_many (GST_BIN (pipeline), videotestsrc, jpegenc, metadatamux,
      metadatademux, fakesink, NULL);

  ok = gst_element_link_many (videotestsrc, jpegenc, metadatamux, metadatademux,
      fakesink, NULL);
  fail_unless (ok == TRUE);

  loop = g_main_loop_new (NULL, TRUE);
  fail_unless (loop != NULL);

  bus = gst_element_get_bus (pipeline);
  fail_unless (bus != NULL);
  gst_bus_add_watch (bus, bus_handler, loop);
  gst_object_unref (bus);

  gst_element_set_state (pipeline, GST_STATE_READY);

  setter = GST_TAG_SETTER (metadatamux);
  fail_unless (setter != NULL);
  sent_tags = gst_structure_from_string (tag_str, NULL);
  fail_unless (sent_tags != NULL);
  gst_tag_setter_merge_tags (setter, sent_tags, GST_TAG_MERGE_REPLACE);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  GST_DEBUG ("mainloop done : %p", received_tags);

  /* verify tags */
  fail_unless (received_tags != NULL);
  n_recv = gst_structure_n_fields (received_tags);
  n_sent = gst_structure_n_fields (sent_tags);
  /* we also get e.g. an exif binary block */
  fail_unless (n_recv >= n_sent);
  /* FIXME: compare taglits values */
  for (i = 0; i < n_sent; i++) {
    name_sent = gst_structure_nth_field_name (sent_tags, i);
    value_sent = gst_structure_get_value (sent_tags, name_sent);
    found = FALSE;
    for (j = 0; j < n_recv; j++) {
      name_recv = gst_structure_nth_field_name (received_tags, j);
      if (!strcmp (name_sent, name_recv)) {
        value_recv = gst_structure_get_value (received_tags, name_recv);
        comparison = gst_value_compare (value_sent, value_recv);
        if (comparison != GST_VALUE_EQUAL) {
          gchar *vs = g_strdup_value_contents (value_sent);
          gchar *vr = g_strdup_value_contents (value_recv);
          GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'",
              G_VALUE_TYPE_NAME (value_sent), vs,
              G_VALUE_TYPE_NAME (value_recv), vr);
          g_free (vs);
          g_free (vr);
        }
        fail_unless (comparison == GST_VALUE_EQUAL,
            "tag item %s has been received with different type or value",
            name_sent);
        found = TRUE;
        break;
      }
    }
    fail_unless (found, "tag item %s is lost", name_sent);
  }

  gst_tag_list_free (received_tags);
  received_tags = NULL;
  gst_tag_list_free (sent_tags);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);
  g_object_unref (pipeline);
}
示例#9
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	int bpp, depth;
	const GValue *frame_rate;
	const char *caps_string;
	unsigned int i;
	guint32 fourcc;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	gst_structure_get_int (structure, "bpp", &bpp);
	gst_structure_get_int (structure, "depth", &depth);

	if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) {
		const char *string;

	       	string = gst_structure_get_string (structure, "format");
		fourcc = GST_STR_FOURCC (string);
	} else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) {
		gst_structure_get_fourcc (structure, "format", &fourcc);
	} else
		fourcc = 0;

	pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous");
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous");
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend)
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL);
	else
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
示例#10
0
/**
 * pk_gst_parse_codec:
 **/
static PkGstCodecInfo *
pk_gst_parse_codec (const gchar *codec)
{
    GstStructure *s;
    PkGstCodecInfo *info = NULL;
    _cleanup_free_ gchar *caps = NULL;
    _cleanup_free_ gchar *type_name = NULL;
    _cleanup_strv_free_ gchar **split = NULL;
    _cleanup_strv_free_ gchar **ss = NULL;

    split = g_strsplit (codec, "|", -1);
    if (split == NULL || g_strv_length (split) != 5) {
        g_message ("PackageKit: not a GStreamer codec line");
        return NULL;
    }
    if (g_strcmp0 (split[0], "gstreamer") != 0) {
        g_message ("PackageKit: not a GStreamer codec request");
        return NULL;
    }
    if (g_strcmp0 (split[1], "0.10") != 0 &&
            g_strcmp0 (split[1], "1.0") != 0) {
        g_message ("PackageKit: not recognised GStreamer version");
        return NULL;
    }

    if (g_str_has_prefix (split[4], "uri") != FALSE) {
        /* split uri */
        ss = g_strsplit (split[4], " ", 2);
        info = g_new0 (PkGstCodecInfo, 1);
        info->app_name = g_strdup (split[2]);
        info->codec_name = g_strdup (split[3]);
        info->type_name = g_strdup (ss[0]);
        return info;
    }

    /* split */
    ss = g_strsplit (split[4], "-", 2);
    type_name = g_strdup (ss[0]);
    caps = g_strdup (ss[1]);

    s = gst_structure_from_string (caps, NULL);
    if (s == NULL) {
        g_message ("PackageKit: failed to parse caps: %s", caps);
        return NULL;
    }

    /* remove fields that are almost always just MIN-MAX of some sort
     * in order to make the caps look less messy */
    gst_structure_remove_field (s, "pixel-aspect-ratio");
    gst_structure_remove_field (s, "framerate");
    gst_structure_remove_field (s, "channels");
    gst_structure_remove_field (s, "width");
    gst_structure_remove_field (s, "height");
    gst_structure_remove_field (s, "rate");
    gst_structure_remove_field (s, "depth");
    gst_structure_remove_field (s, "clock-rate");
    gst_structure_remove_field (s, "bitrate");

    info = g_new0 (PkGstCodecInfo, 1);
    info->gstreamer_version = g_strdup (split[1]);
    info->app_name = g_strdup (split[2]);
    info->codec_name = g_strdup (split[3]);
    info->type_name = g_strdup (type_name);
    info->structure = s;
    return info;
}
示例#11
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
    GstAravis* gst_aravis = GST_ARAVIS(src);
    GstStructure *structure;
    ArvPixelFormat pixel_format;
    int height, width;
    int bpp, depth;
    const GValue *frame_rate;
    const char *caps_string;
    unsigned int i;
    guint32 fourcc;

    GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

    arv_camera_stop_acquisition (gst_aravis->camera);

    if (gst_aravis->stream != NULL)
        g_object_unref (gst_aravis->stream);

    structure = gst_caps_get_structure (caps, 0);

    gst_structure_get_int (structure, "width", &width);
    gst_structure_get_int (structure, "height", &height);
    frame_rate = gst_structure_get_value (structure, "framerate");
    gst_structure_get_fourcc (structure, "format", &fourcc);
    gst_structure_get_int (structure, "bpp", &bpp);
    gst_structure_get_int (structure, "depth", &depth);

    pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

    arv_camera_set_region (gst_aravis->camera, 0, 0, width, height);
    arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
    arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

    if (frame_rate != NULL) {
        double dbl_frame_rate;

        dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
                         (double) gst_value_get_fraction_denominator (frame_rate);

        GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
        arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

        if (dbl_frame_rate > 0.0)
            gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
                                                 3e6 / dbl_frame_rate);
        else
            gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
    } else
        gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

    GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us);

    GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Gain       = %d", gst_aravis->gain);
    arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
    GST_DEBUG_OBJECT (gst_aravis, "Actual gain       = %d", arv_camera_get_gain (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Exposure   = %g µs", gst_aravis->exposure_time_us);
    arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
    GST_DEBUG_OBJECT (gst_aravis, "Actual exposure   = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));

    if (gst_aravis->fixed_caps != NULL)
        gst_caps_unref (gst_aravis->fixed_caps);

    caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
    if (caps_string != NULL) {
        GstStructure *structure;
        GstCaps *caps;

        caps = gst_caps_new_empty ();
        structure = gst_structure_from_string (caps_string, NULL);
        gst_structure_set (structure,
                           "width", G_TYPE_INT, width,
                           "height", G_TYPE_INT, height,
                           NULL);

        if (frame_rate != NULL)
            gst_structure_set_value (structure, "framerate", frame_rate);

        gst_caps_append_structure (caps, structure);

        gst_aravis->fixed_caps = caps;
    } else
        gst_aravis->fixed_caps = NULL;

    gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
    gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

    for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
        arv_stream_push_buffer (gst_aravis->stream,
                                arv_buffer_new (gst_aravis->payload, NULL));

    GST_LOG_OBJECT (gst_aravis, "Start acquisition");
    arv_camera_start_acquisition (gst_aravis->camera);

    gst_aravis->timestamp_offset = 0;
    gst_aravis->last_timestamp = 0;

    return TRUE;
}
示例#12
0
GstCaps* convert_videoformatsdescription_to_caps (const std::vector<tcam::VideoFormatDescription>& descriptions)
{
    GstCaps* caps = gst_caps_new_empty();

    for (const auto& desc : descriptions)
    {
        if (desc.get_fourcc() == 0)
        {
            tcam_info("Format has empty fourcc. Ignoring");
            continue;
        }

        const char* caps_string = tcam_fourcc_to_gst_1_0_caps_string(desc.get_fourcc());

        if (caps_string == nullptr)
        {
            tcam_warning("Format has empty caps string. Ignoring %s",
                         tcam::fourcc_to_description(desc.get_fourcc()));
            continue;
        }

        // tcam_error("Found '%s' pixel format string", caps_string);

        std::vector<struct tcam_resolution_description> res = desc.get_resolutions();

        for (const auto& r : res)
        {
            int min_width = r.min_size.width;
            int min_height = r.min_size.height;

            int max_width = r.max_size.width;
            int max_height = r.max_size.height;

            if (r.type == TCAM_RESOLUTION_TYPE_RANGE)
            {
                std::vector<struct tcam_image_size> framesizes = tcam::get_standard_resolutions(r.min_size,
                                                                                                r.max_size);

                // check if min/max are already in the vector.
                // some devices return std resolutions as max
                if (r.min_size != framesizes.front())
                {
                    framesizes.insert(framesizes.begin(), r.min_size);
                }

                if (r.max_size != framesizes.back())
                {
                    framesizes.push_back(r.max_size);
                }

                for (const auto& reso : framesizes)
                {
                    GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                    std::vector<double> framerates = desc.get_framerates(reso);

                    if (framerates.empty())
                    {
                        // tcam_log(TCAM_LOG_WARNING, "No available framerates. Ignoring format.");
                        continue;
                    }

                    GValue fps_list = G_VALUE_INIT;
                    g_value_init(&fps_list, GST_TYPE_LIST);

                    for (const auto& f : framerates)
                    {
                        int frame_rate_numerator;
                        int frame_rate_denominator;
                        gst_util_double_to_fraction(f,
                                                    &frame_rate_numerator,
                                                    &frame_rate_denominator);

                        if ((frame_rate_denominator == 0) || (frame_rate_numerator == 0))
                        {
                            continue;
                        }

                        GValue fraction = G_VALUE_INIT;
                        g_value_init(&fraction, GST_TYPE_FRACTION);
                        gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator);
                        gst_value_list_append_value(&fps_list, &fraction);
                        g_value_unset(&fraction);
                    }


                    gst_structure_set (structure,
                                       "width", G_TYPE_INT, reso.width,
                                       "height", G_TYPE_INT, reso.height,
                                       NULL);

                    gst_structure_take_value(structure, "framerate", &fps_list);
                    gst_caps_append_structure (caps, structure);

                }

                // finally also add the range to allow unusual settings like 1920x96@90fps
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                GValue w = G_VALUE_INIT;
                g_value_init(&w, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&w, min_width, max_width);

                GValue h = G_VALUE_INIT;
                g_value_init(&h, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&h, min_height, max_height);

                std::vector<double> fps = desc.get_frame_rates(r);

                if (fps.empty())
                {
                    // GST_ERROR("Could not find any framerates for format");
                    continue;
                }

                int fps_min_num;
                int fps_min_den;
                int fps_max_num;
                int fps_max_den;
                gst_util_double_to_fraction(*std::min_element(fps.begin(), fps.end()),
                                            &fps_min_num,
                                            &fps_min_den);
                gst_util_double_to_fraction(*std::max_element(fps.begin(), fps.end()),
                                            &fps_max_num,
                                            &fps_max_den);

                GValue f = G_VALUE_INIT;
                g_value_init(&f, GST_TYPE_FRACTION_RANGE);

                gst_value_set_fraction_range_full(&f,
                                                  fps_min_num, fps_min_den,
                                                  fps_max_num, fps_max_den);

                gst_structure_set_value(structure, "width", &w);
                gst_structure_set_value(structure,"height", &h);
                gst_structure_set_value(structure,"framerate", &f);
                gst_caps_append_structure(caps, structure);
            }
            else
            {
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                fill_structure_fixed_resolution(structure, desc, r);
                gst_caps_append_structure (caps, structure);
            }
        }

    }

    return caps;
}
示例#13
0
/*
 * Makes a pipeline in the form:
 * filesrc location=file ! demuxer ! fakesink
 *
 * And gets the tags that are posted on the bus to compare
 * with the tags in 'tag_str'
 */
static void
test_demux_tags (const gchar * tag_str, const gchar * demuxer,
    const gchar * file)
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;
  GstTagList *sent_tags;
  gint i, j, n_recv, n_sent;
  const gchar *name_sent, *name_recv;
  const GValue *value_sent, *value_recv;
  gboolean found;
  gint comparison;
  GstElement *demux;
  gchar *launch_str;
  guint bus_watch = 0;

  GST_DEBUG ("testing tags : %s", tag_str);

  if (received_tags) {
    gst_tag_list_free (received_tags);
    received_tags = NULL;
  }

  launch_str = g_strdup_printf ("filesrc location=%s ! %s name=demux ! "
      "fakesink", file, demuxer);
  pipeline = gst_parse_launch (launch_str, NULL);
  g_free (launch_str);
  fail_unless (pipeline != NULL);

  demux = gst_bin_get_by_name (GST_BIN (pipeline), "demux");
  fail_unless (demux != NULL);

  loop = g_main_loop_new (NULL, TRUE);
  fail_unless (loop != NULL);

  bus = gst_element_get_bus (pipeline);
  fail_unless (bus != NULL);
  bus_watch = gst_bus_add_watch (bus, bus_handler, loop);
  gst_object_unref (bus);

  sent_tags = gst_structure_from_string (tag_str, NULL);
  fail_unless (sent_tags != NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  GST_DEBUG ("mainloop done : %p", received_tags);

  /* verify tags */
  fail_unless (received_tags != NULL);
  n_recv = gst_structure_n_fields (received_tags);
  n_sent = gst_structure_n_fields (sent_tags);
  fail_unless (n_recv >= n_sent);
  /* FIXME: compare taglits values */
  for (i = 0; i < n_sent; i++) {
    name_sent = gst_structure_nth_field_name (sent_tags, i);
    value_sent = gst_structure_get_value (sent_tags, name_sent);
    found = FALSE;
    for (j = 0; j < n_recv; j++) {
      name_recv = gst_structure_nth_field_name (received_tags, j);
      if (!strcmp (name_sent, name_recv)) {
        value_recv = gst_structure_get_value (received_tags, name_recv);
        comparison = gst_value_compare (value_sent, value_recv);
        if (comparison != GST_VALUE_EQUAL) {
          gchar *vs = g_strdup_value_contents (value_sent);
          gchar *vr = g_strdup_value_contents (value_recv);
          GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'",
              G_VALUE_TYPE_NAME (value_sent), vs,
              G_VALUE_TYPE_NAME (value_recv), vr);
          g_free (vs);
          g_free (vr);
        }
        fail_unless (comparison == GST_VALUE_EQUAL,
            "tag item %s has been received with different type or value",
            name_sent);
        found = TRUE;
        break;
      }
    }
    fail_unless (found, "tag item %s is lost", name_sent);
  }

  gst_tag_list_free (received_tags);
  received_tags = NULL;
  gst_tag_list_free (sent_tags);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);
  g_object_unref (demux);
  g_object_unref (pipeline);
  g_source_remove (bus_watch);
}
示例#14
0
gint
main (gint argc, gchar ** argv)
{
  GstElement *bin;
  GstElement *src, *fmt, *enc, *sink;
  GstCaps *caps;
  GstStructure *prog;

  /* init gstreamer */
  gst_init (&argc, &argv);

  /* create a new bin to hold the elements */
  bin = gst_pipeline_new ("camera");

  /* create elements */
  if (!(sink = gst_element_factory_make ("multifilesink", NULL))) {
    GST_WARNING ("Can't create element \"multifilesink\"");
    return -1;
  }
  g_object_set (sink, "location", "image%02d.jpg", NULL);

  if (!(enc = gst_element_factory_make ("jpegenc", NULL))) {
    GST_WARNING ("Can't create element \"jpegenc\"");
    return -1;
  }

  if (!(fmt = gst_element_factory_make ("capsfilter", NULL))) {
    GST_WARNING ("Can't create element \"capsfilter\"");
    return -1;
  }
  caps =
      gst_caps_from_string
      ("video/x-raw, width=640, height=480, framerate=(fraction)15/1");
  g_object_set (fmt, "caps", caps, NULL);

  if (!(src = gst_element_factory_make ("v4l2src", NULL))) {
    GST_WARNING ("Can't create element \"v4l2src\"");
    return -1;
  }
  g_object_set (src, "queue-size", 1, NULL);

  /* add objects to the main bin */
  gst_bin_add_many (GST_BIN (bin), src, fmt, enc, sink, NULL);

  /* link elements */
  if (!gst_element_link_many (src, fmt, enc, sink, NULL)) {
    GST_WARNING ("Can't link elements");
    return -1;
  }

  /* programm a pattern of events */
#if 0
  prog = gst_structure_from_string ("program"
      ", image00=(structure)\"image\\,contrast\\=0.0\\;\""
      ", image01=(structure)\"image\\,contrast\\=0.3\\;\""
      ", image02=(structure)\"image\\,contrast\\=1.0\\;\""
      ", image03=(structure)\"image\\,contrast\\=0.05\\;\";", NULL);
#endif
#if 1
  prog = gst_structure_from_string ("program"
      ", image00=(structure)\"image\\,brightness\\=1.0\\,contrast\\=0.0\\;\""
      ", image01=(structure)\"image\\,brightness\\=0.5\\,contrast\\=0.3\\;\""
      ", image02=(structure)\"image\\,brightness\\=0.25\\,contrast\\=1.0\\;\""
      ", image03=(structure)\"image\\,brightness\\=0.0\\,contrast\\=0.05\\;\";",
      NULL);
#endif
  set_program (GST_OBJECT (src), prog);
  g_object_set (src, "num-buffers", gst_structure_n_fields (prog), NULL);

  /* prepare playback */
  gst_element_set_state (bin, GST_STATE_PAUSED);

  /* play and wait */
  gst_element_set_state (bin, GST_STATE_PLAYING);

  /* mainloop and wait for eos */
  event_loop (bin);

  /* stop and cleanup */
  gst_element_set_state (bin, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (bin));
  return 0;
}
示例#15
0
void 
shmsrc_tilde_on_data (shmdata_any_reader_t *reader,
		      void *shmbuf,
		      void *data,
		      int data_size,
		      unsigned long long timestamp,
		      const char *type_description, 
		      void *user_data)
{
  t_shmsrc_tilde *x = (t_shmsrc_tilde *) user_data;
  
  //do not buffer audio if dsp is off FIXME should be optionnal
  if (!canvas_dspstate)
    {
      shmdata_any_reader_free (shmbuf);
      return;
    }

  /* printf ("data %p, data size %d, timestamp %llu, type descr %s\n",   */
  /* 	  data, data_size, timestamp, type_description);   */
  GstStructure *meta_data = gst_structure_from_string (type_description, NULL);
  if (meta_data == NULL) 
    { 
      shmdata_any_reader_free (shmbuf);
      //post ("metadata is NULL\n"); 
      return; 
    } 
  if (!g_str_has_prefix (gst_structure_get_name (meta_data), "audio/")) 
    { 
      shmdata_any_reader_free (shmbuf);
      //post ("not an audio stream\n"); 
      return; 
    } //should be "audio/... 

  t_shmsrc_tilde_buf *audio_buf = g_malloc0 (sizeof (t_shmsrc_tilde_buf));

  int channels = -1; 
  int samplerate = -1; 
  int width = -1; 
  gst_structure_get (meta_data,  
   		     "rate", G_TYPE_INT, &samplerate,  
   		     "channels", G_TYPE_INT, &channels,  
   		     "width", G_TYPE_INT, &width,  
   		     NULL); 

  gst_structure_free(meta_data);

  audio_buf->num_channels_in_buf = channels; 
  if (channels > x->x_num_outlets)
    audio_buf->num_channels_to_output = x->x_num_outlets;
  else if (channels < 0)
    audio_buf->num_channels_to_output = 0;
  else 
    audio_buf->num_channels_to_output = channels;
  
  audio_buf->num_unused_channels = channels - x->x_num_outlets;
  if (audio_buf->num_unused_channels < 0)
    audio_buf->num_unused_channels = 0;

  audio_buf->sample_rate = samplerate;
  //audio_buf->sample_size = width;
  audio_buf->remaining_samples = data_size / ((width/8) * channels);
  /* g_print ("data_size %d, width %d, channels %d, samplerate %d, remaining  samples %d, cur logicial date %f \n",   */
  /* 	   data_size,    */
  /* 	   width,    */
  /* 	   channels,    */
  /* 	   samplerate,  */
  /* 	   audio_buf->remaining_samples, */
  /* 	   clock_getlogicaltime());     */
   //g_print ("on data queue size %d\n", g_async_queue_length (x->x_audio_queue));
  double audio_buf_sample_duration =  (1.0 / samplerate) * (32.*441000.); //see TIMEUNITPERSEC in m_sched.c
  audio_buf->shm_buf = shmbuf;

  //double cur_date = clock_getlogicaltime();
  /* if (x->x_stream_data_date == -1.0) */
  /*   x->x_stream_data_date = clock_getlogicaltime(); */
  if (x->x_stream_sample_duration == -1.0)
    x->x_stream_sample_duration = audio_buf_sample_duration;

  /* else */
  /* 	{ */
  /* 	  double cur_stream_dur = (cur_date - x->x_stream_data_date) / audio_buf->remaining_samples; */
  /* 	  double max_deriv = 0.001;  //FIXME make this a param */
  /* 	  if (cur_stream_dur > audio_buf_sample_duration * (1.0 + max_deriv)) */
  /* 	    cur_stream_dur = audio_buf_sample_duration * (1.0 + max_deriv); */
  /* 	  else if (cur_stream_dur < audio_buf_sample_duration * (1.0 - max_deriv)) */
  /* 	    cur_stream_dur = audio_buf_sample_duration * (1.0 - max_deriv); */

  /* 	  x->x_stream_sample_duration = audio_buf_sample_duration; */
  /* 	} */
  /*     x->x_stream_sample_duration = ceil (x->x_stream_sample_duration); */
  /*     x->x_stream_data_date = cur_date; */

  /* g_print ("rate %d, channels %d, width %d num sample=%d\n", */
  /* 	    samplerate,  */
  /* 	    channels,  */
  /* 	    width,  */
  /* 	    data_size / ((width/8) *channels));  */

  //converting to float
  audio_buf->free_audio_data = FALSE;
  audio_buf->audio_data = (t_float *)data;
  if (width == 16)
    {
      //g_print ("converting\n");
      audio_buf->free_audio_data = TRUE;
      t_float *audio_converted =  g_malloc0 (sizeof (t_float) 
					     * audio_buf->num_channels_in_buf 
					     * audio_buf->remaining_samples);
      audio_buf->audio_data = audio_converted;
      
      int n = channels *  audio_buf->remaining_samples;
      while (n--)
	{
	  *audio_converted++ = (t_float)(*(gint16 *)data * 3.051850e-05);
	  data += sizeof(gint16); 
	}
    }
  else if (width == 8)
    {
      post ("8 bit audio not supported yet");
      shmdata_any_reader_free (shmbuf);
      return;
    }
  
  audio_buf->current_pos = audio_buf->audio_data;
  g_async_queue_push (x->x_audio_queue, audio_buf);
}