GStreamerGWorld::GStreamerGWorld(GstElement* pipeline)
    : m_pipeline(pipeline)
{
    // XOverlay messages need to be handled synchronously.
    GRefPtr<GstBus> bus = webkitGstPipelineGetBus(GST_PIPELINE(m_pipeline));
#ifndef GST_API_VERSION_1
    gst_bus_set_sync_handler(bus.get(), gst_bus_sync_signal_handler, this);
#else
    gst_bus_set_sync_handler(bus.get(), gst_bus_sync_signal_handler, this, 0);
#endif
    g_signal_connect(bus.get(), "sync-message::element", G_CALLBACK(gstGWorldSyncMessageCallback), this);
}
Esempio n. 2
0
gboolean gstreamer_run_pipeline(GMainLoop *loop, const char *s, StartupState state) {
	main_set_real_time_scheduling_policy();

	GError *error = NULL;
	pipeline = gst_parse_launch(s, &error);
	if (!pipeline) {
		printf("Error: Could not create gstreamer pipeline.\n");
		printf("Parse error: %s\n", error->message);
		return FALSE;
	}

	bus_quit_on_playing = FALSE;
	GstBus *bus;
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	bus_watch_id = gst_bus_add_watch(bus, bus_callback, loop);
	if (main_have_gui())
#if GST_CHECK_VERSION(1, 0, 0)
		gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler, NULL, NULL);
#else
		gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler, NULL);
#endif
	gst_object_unref(bus);

	// Iterate over all elements of the pipeline to hook a handler
	// listing newly created pads.
	if (created_pads_list != NULL) {
		g_list_free(created_pads_list);
		created_pads_list = NULL;
	}
	GstIterator *iterator = gst_bin_iterate_elements(GST_BIN(pipeline));
	gst_iterator_foreach(iterator, for_each_pipeline_element, NULL);
	gst_iterator_free(iterator);

	stats_reset();

	gst_element_set_state(pipeline, GST_STATE_READY);

	state_change_to_playing_already_occurred = FALSE;

	if (state == STARTUP_PLAYING)
		gst_element_set_state(pipeline, GST_STATE_PLAYING);
	else
		gst_element_set_state(pipeline, GST_STATE_PAUSED);

	pipeline_description = s;
	end_of_stream = FALSE;

	inform_pipeline_destroyed_cb_list = NULL;
	return TRUE;
}
GStreamerGWorld::~GStreamerGWorld()
{
    exitFullscreen();

    GRefPtr<GstBus> bus = webkitGstPipelineGetBus(GST_PIPELINE(m_pipeline));
    g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(gstGWorldSyncMessageCallback), this);
#ifndef GST_API_VERSION_1
    gst_bus_set_sync_handler(bus.get(), 0, this);
#else
    gst_bus_set_sync_handler(bus.get(), 0, this, 0);
#endif

    m_pipeline = 0;
}
Esempio n. 4
0
static void
create_playbin (GthMediaViewerPage *self)
{
	GSettings *settings;
	GstBus    *bus;

	if (self->priv->playbin != NULL)
		return;

	self->priv->playbin = gst_element_factory_make ("playbin", "playbin");

	settings = g_settings_new (GTHUMB_GSTREAMER_TOOLS_SCHEMA);
	g_object_set (self->priv->playbin,
		      "volume", (double) g_settings_get_int (settings, PREF_GSTREAMER_TOOLS_VOLUME) / 100.0,
		      "force-aspect-ratio", TRUE,
		      NULL);
	g_object_unref (settings);

	bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->playbin));
	gst_bus_enable_sync_message_emission (bus);
	gst_bus_set_sync_handler (bus, (GstBusSyncHandler) set_playbin_window, self, NULL);
	gst_bus_add_signal_watch (bus);

	g_signal_connect (self->priv->playbin,
			  "notify::volume",
			  G_CALLBACK (playbin_notify_volume_cb),
			  self);
	g_signal_connect (bus,
			  "message",
			  G_CALLBACK (bus_message_cb),
			  self);
}
Esempio n. 5
0
static void
kms_recorder_endpoint_new_media_muxer (KmsRecorderEndpoint * self)
{
  GstBus *bus;

  kms_recorder_endpoint_create_base_media_muxer (self);

  g_signal_connect (self->priv->mux, "on-sink-added",
      G_CALLBACK (kms_recorder_endpoint_on_sink_added), self);

  kms_recorder_endpoint_update_media_stats (self);
  bus = kms_base_media_muxer_get_bus (self->priv->mux);
  gst_bus_set_sync_handler (bus, bus_sync_signal_handler, self, NULL);
  g_object_unref (bus);

  if (kms_recording_profile_supports_type (self->priv->profile,
          KMS_ELEMENT_PAD_TYPE_AUDIO)) {
    kms_recorder_endpoint_add_appsink (self, KMS_ELEMENT_PAD_TYPE_AUDIO, NULL,
        AUDIO_STREAM_NAME RECORDER_DEFAULT_SUFFIX, FALSE);
  }

  if (kms_recording_profile_supports_type (self->priv->profile,
          KMS_ELEMENT_PAD_TYPE_VIDEO)) {
    kms_recorder_endpoint_add_appsink (self, KMS_ELEMENT_PAD_TYPE_VIDEO, NULL,
        VIDEO_STREAM_NAME RECORDER_DEFAULT_SUFFIX, FALSE);
  }
}
int
lapsi_backend_init(CustomData *data,int* argc,char*** argv)
{
  GstBus *bus;

  /* Initialize our data structure */
  memset (data, 0, sizeof (data));
  data->duration = GST_CLOCK_TIME_NONE;

  /* Create the elements */
  data->playbin = gst_element_factory_make ("playbin", "playbin");

  if (!data->playbin)
  {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  /* Connect to interesting signals in playbin */
  bus = gst_element_get_bus (data->playbin);
  gst_bus_add_signal_watch (bus);

  /* Create the GUI */
  create_ui (data);

  /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
  gst_bus_set_sync_handler(bus,(GstBusSyncHandler)busSyncHandler,NULL,NULL);
  g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb,data);
  return 0;

}
static gboolean
gst_uri_downloader_set_uri (GstUriDownloader * downloader, const gchar * uri)
{
  GstPad *pad;

  if (!gst_uri_is_valid (uri))
    return FALSE;

  GST_DEBUG_OBJECT (downloader, "Creating source element for the URI:%s", uri);
  downloader->priv->urisrc = gst_element_make_from_uri (GST_URI_SRC, uri, NULL);
  if (!downloader->priv->urisrc)
    return FALSE;

  /* add a sync handler for the bus messages to detect errors in the download */
  gst_element_set_bus (GST_ELEMENT (downloader->priv->urisrc),
      downloader->priv->bus);
  gst_bus_set_sync_handler (downloader->priv->bus,
      gst_uri_downloader_bus_handler, downloader);

  pad = gst_element_get_static_pad (downloader->priv->urisrc, "src");
  if (!pad)
    return FALSE;
  gst_pad_link (pad, downloader->priv->pad);
  gst_object_unref (pad);
  return TRUE;
}
Esempio n. 8
0
void
acam_webcam_setup_devices (acam_webcam_device_s *acam_webcam_device)
{
	gboolean ok        = TRUE;
	GError  *tmp_error = NULL;

	acam_webcam_device->video_pipeline = gst_pipeline_new ("pipeline");

	/* Create all bins */
	acam_webcam_setup_create_video_display_bin (acam_webcam_device, &tmp_error);
	acam_webcam_setup_create_photo_save_bin (acam_webcam_device, &tmp_error);
	acam_webcam_setup_create_video_save_bin (acam_webcam_device, &tmp_error);

	if (tmp_error != NULL)
		g_print ("One or more needed gstreamer elements are missing\n");

	gst_bin_add_many (GST_BIN (acam_webcam_device->video_pipeline),
	    				acam_webcam_device->video_display_bin,
	    				acam_webcam_device->photo_save_bin,
	    				NULL);

	ok = gst_element_link (acam_webcam_device->video_display_bin, acam_webcam_device->photo_save_bin);
	
	acam_webcam_device->bus = gst_element_get_bus (acam_webcam_device->video_pipeline);
	gst_bus_add_signal_watch (acam_webcam_device->bus);

	g_signal_connect (G_OBJECT (acam_webcam_device->bus), "message", G_CALLBACK (acam_webcam_bus_message_cb), acam_webcam_device);
	
	gst_bus_set_sync_handler (acam_webcam_device->bus, (GstBusSyncHandler) acam_webcam_bus_sync_handler, acam_webcam_device);
}
Esempio n. 9
0
static GstBusSyncReply
gst_uri_downloader_bus_handler (GstBus * bus,
    GstMessage * message, gpointer data)
{
  GstUriDownloader *downloader = (GstUriDownloader *) (data);

  if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) {
    GError *err = NULL;
    gchar *dbg_info = NULL;
    gchar *new_error = NULL;

    gst_message_parse_error (message, &err, &dbg_info);
    GST_WARNING_OBJECT (downloader,
        "Received error: %s from %s, the download will be cancelled",
        err->message, GST_OBJECT_NAME (message->src));
    GST_DEBUG ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");

    if (dbg_info)
      new_error = g_strdup_printf ("%s: %s\n", err->message, dbg_info);
    if (new_error) {
      g_free (err->message);
      err->message = new_error;
    }

    if (!downloader->priv->err)
      downloader->priv->err = err;
    else
      g_error_free (err);

    g_free (dbg_info);

    /* remove the sync handler to avoid duplicated messages */
    gst_bus_set_sync_handler (downloader->priv->bus, NULL, NULL, NULL);

    /* stop the download */
    GST_OBJECT_LOCK (downloader);
    if (downloader->priv->download != NULL) {
      GST_DEBUG_OBJECT (downloader, "Stopping download");
      g_object_unref (downloader->priv->download);
      downloader->priv->download = NULL;
      downloader->priv->cancelled = TRUE;
      g_cond_signal (&downloader->priv->cond);
    }
    GST_OBJECT_UNLOCK (downloader);
  } else if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_WARNING) {
    GError *err = NULL;
    gchar *dbg_info = NULL;

    gst_message_parse_warning (message, &err, &dbg_info);
    GST_WARNING_OBJECT (downloader,
        "Received warning: %s from %s",
        GST_OBJECT_NAME (message->src), err->message);
    GST_DEBUG ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
    g_error_free (err);
    g_free (dbg_info);
  }

  gst_message_unref (message);
  return GST_BUS_DROP;
}
static GstBusSyncReply
gst_uri_downloader_bus_handler (GstBus * bus,
    GstMessage * message, gpointer data)
{
  GstUriDownloader *downloader = (GstUriDownloader *) (data);

  if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR ||
      GST_MESSAGE_TYPE (message) == GST_MESSAGE_WARNING) {
    GError *err = NULL;
    gchar *dbg_info = NULL;

    gst_message_parse_error (message, &err, &dbg_info);
    GST_WARNING_OBJECT (downloader,
        "Received error: %s from %s, the download will be cancelled",
        GST_OBJECT_NAME (message->src), err->message);
    GST_DEBUG ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
    g_error_free (err);
    g_free (dbg_info);

    /* remove the sync handler to avoid duplicated messages */
    gst_bus_set_sync_handler (downloader->priv->bus, NULL, NULL);
    gst_uri_downloader_cancel (downloader);
  }

  gst_message_unref (message);
  return GST_BUS_DROP;
}
static void
kms_recorder_end_point_init (KmsRecorderEndPoint * self)
{
  GstBus *bus;

  self->priv = KMS_RECORDER_END_POINT_GET_PRIVATE (self);

  self->priv->loop = kms_loop_new ();

  self->priv->paused_time = G_GUINT64_CONSTANT (0);
  self->priv->paused_start = GST_CLOCK_TIME_NONE;

  /* Create internal pipeline */
  self->priv->pipeline = gst_pipeline_new ("recorder-pipeline");
  g_object_set (self->priv->pipeline, "async-handling", TRUE, NULL);
  g_cond_init (&self->priv->state_manager.cond);

  bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->pipeline));
  gst_bus_set_sync_handler (bus, bus_sync_signal_handler, self, NULL);
  g_object_unref (bus);

  self->priv->controller =
      kms_conf_controller_new (KMS_CONF_CONTROLLER_KMS_ELEMENT, self,
      KMS_CONF_CONTROLLER_PIPELINE, self->priv->pipeline, NULL);
  g_signal_connect (self->priv->controller, "matched-elements",
      G_CALLBACK (matched_elements_cb), self);
  g_signal_connect (self->priv->controller, "sink-required",
      G_CALLBACK (sink_required_cb), self);
  g_signal_connect (self->priv->controller, "sink-unrequired",
      G_CALLBACK (sink_unrequired_cb), self);
}
Esempio n. 12
0
int TGstEngine::load( const QUrl &url, int id )
{
    QUrl path = url;

    if (path.scheme().isEmpty())
        path.setScheme("file");

    qDebug() << "LOAD: " << path.toString() << " " << m_currentPlayer;

    if (!QFile::exists(url.path())) {
        m_currentPlayer = -1;

        kError() << "Cannot load: " << path.toString();
        return -1;
    }

    if (id < 0) {
        id = m_players.count();
        if (m_players.contains(id))
            id = KAlgorithm::random();
        }

        m_players.insert(id, createPlayInfo(path));

        m_currentPlayer = id;

        m_players[id].id = id;

        gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(m_players[id].player)), bus_cb, &m_players[id]);

        return id;
}
Esempio n. 13
0
void StreamPipeline::Init(const Song& song) {
  pipeline_ = gst_pipeline_new("pipeline");
  GstElement* uridecodebin = CreateElement("uridecodebin", pipeline_);
  qLog(Debug) << "Streaming:" << song.url();
  g_object_set(
      G_OBJECT(uridecodebin), "uri", song.url().toString().toUtf8().constData(), NULL);
  g_signal_connect(
      G_OBJECT(uridecodebin), "pad-added", G_CALLBACK(NewPadCallback), this);

  GError* error = NULL;
  convert_bin_ = gst_parse_bin_from_description(kPipeline, TRUE, &error);
  gst_bin_add(GST_BIN(pipeline_), convert_bin_);

  gst_element_set_state(uridecodebin, GST_STATE_PLAYING);

  app_sink_ = CreateElement("appsink", pipeline_);
  g_object_set(G_OBJECT(app_sink_), "emit-signals", TRUE, NULL);
  g_signal_connect(
      G_OBJECT(app_sink_), "new-buffer", G_CALLBACK(NewBufferCallback), this);

  qLog(Debug) << "Linking appsink:" << gst_element_link(convert_bin_, app_sink_);

  gst_bus_set_sync_handler(
      gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this);
  bus_cb_id_ = gst_bus_add_watch(
      gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this);
}
Esempio n. 14
0
eServiceMP3::~eServiceMP3()
{
	// disconnect subtitle callback
	GstElement *subsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");

	if (subsink)
	{
		g_signal_handler_disconnect (subsink, m_subs_to_pull_handler_id);
		gst_object_unref(subsink);
	}

	delete m_subtitle_widget;

	// disconnect sync handler callback
	gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin)), NULL, NULL);

	if (m_state == stRunning)
		stop();

	if (m_stream_tags)
		gst_tag_list_free(m_stream_tags);
	
	if (m_gst_playbin)
	{
		gst_object_unref (GST_OBJECT (m_gst_playbin));
		eDebug("eServiceMP3::destruct!");
	}
}
Esempio n. 15
0
static void
setup (void)
{
  GstTagSetter *setter;
  gchar *desc_str;
  GstCaps *filter_caps;
  GstBus *bus;
  gint i;

  GST_INFO ("init");

  main_loop = g_main_loop_new (NULL, TRUE);

  camera = gst_check_setup_element ("camerabin");

  setup_camerabin_elements (camera);

  g_signal_connect (camera, "image-done", G_CALLBACK (capture_done), main_loop);

  bus = gst_pipeline_get_bus (GST_PIPELINE (camera));
  gst_bus_add_watch (bus, (GstBusFunc) capture_bus_cb, main_loop);
  gst_bus_set_sync_handler (bus, bus_sync_callback, main_loop);
  gst_object_unref (bus);

  filter_caps = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420");
  g_object_set (G_OBJECT (camera), "filter-caps", filter_caps, NULL);
  gst_caps_unref (filter_caps);

  /* force a low framerate here to not timeout the tests because of the
   * encoders */
  g_signal_emit_by_name (camera, "set-video-resolution-fps", 320, 240, 5, 1,
      NULL);

  /* Set some default tags */
  setter = GST_TAG_SETTER (camera);
  desc_str = g_strdup_printf ("Created by %s", g_get_real_name ());

  gst_tag_setter_add_tags (setter, GST_TAG_MERGE_REPLACE,
      GST_TAG_DESCRIPTION, desc_str, NULL);
  g_free (desc_str);

  if (gst_element_set_state (GST_ELEMENT (camera), GST_STATE_PLAYING) ==
      GST_STATE_CHANGE_FAILURE) {
    GST_WARNING ("setting camerabin to PLAYING failed");
    gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
    gst_object_unref (camera);
    camera = NULL;
  }

  /* create the taglists */
  for (i = 0; i < TAGLISTS_COUNT; i++) {
    taglists[i] = gst_tag_list_new_full (GST_TAG_ARTIST, "test-artist",
        GST_TAG_GEO_LOCATION_LONGITUDE, g_random_double_range (-180, 180),
        GST_TAG_GEO_LOCATION_LATITUDE, g_random_double_range (-90, 90),
        GST_TAG_GEO_LOCATION_ELEVATION, g_random_double_range (0, 3000), NULL);
  }

  GST_INFO ("init finished");
}
static void
setup_errors (GstElement *play)
{
	GstBus *bus;

	bus = gst_element_get_bus (play);
	gst_bus_set_sync_handler (bus, (GstBusSyncHandler) error_handler, play, NULL);
}
static void
thumb_app_set_error_handler (ThumbApp *app)
{
	GstBus *bus;

	bus = gst_element_get_bus (app->play);
	gst_bus_set_sync_handler (bus, (GstBusSyncHandler) error_handler, app->play, NULL);
}
GstEnginePipeline::~GstEnginePipeline() {
    if (pipeline_) {
        gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), NULL, NULL);
        g_source_remove(bus_cb_id_);
        gst_element_set_state(pipeline_, GST_STATE_NULL);
        gst_object_unref(GST_OBJECT(pipeline_));
    }
}
Esempio n. 19
0
GStreamerGWorld::GStreamerGWorld(GstElement* pipeline)
    : m_pipeline(pipeline)
{
    // XOverlay messages need to be handled synchronously.
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
    gst_bus_set_sync_handler(bus, gst_bus_sync_signal_handler, this);
    g_signal_connect(bus, "sync-message::element", G_CALLBACK(gstGWorldSyncMessageCallback), this);
    gst_object_unref(bus);
}
Esempio n. 20
0
static Eina_Bool
em_file_open(void *video,
             const char   *file)
{
   Emotion_Gstreamer_Video *ev = video;
   Eina_Strbuf *sbuf = NULL;
   const char *uri;

   if (!file) return EINA_FALSE;
   if (strstr(file, "://") == NULL)
     {
        sbuf = eina_strbuf_new();
        eina_strbuf_append(sbuf, "file://");
        if (strncmp(file, "./", 2) == 0)
          file += 2;
	if (strstr(file, ":/") != NULL)
	  { /* We absolutely need file:///C:/ under Windows, so adding it here */
             eina_strbuf_append(sbuf, "/");
	  }
	else if (*file != '/')
          {
             char tmp[PATH_MAX];

             if (getcwd(tmp, PATH_MAX))
               {
                  eina_strbuf_append(sbuf, tmp);
                  eina_strbuf_append(sbuf, "/");
               }
          }
        eina_strbuf_append(sbuf, file);
     }

   ev->play_started = 0;
   ev->pipeline_parsed = 0;

   uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
   DBG("setting file to '%s'", uri);
   ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri);
   if (sbuf) eina_strbuf_free(sbuf);

   if (!ev->pipeline)
     return EINA_FALSE;

   ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
   if (!ev->eos_bus)
     {
        ERR("could not get the bus");
        return EINA_FALSE;
     }

   gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);

   ev->position = 0.0;

   return 1;
}
Esempio n. 21
0
int
main (int argc, char **argv)
{
  GstElement *pipeline;
  GstBus *bus;

#ifndef GST_DISABLE_PARSE
  GError *error = NULL;
#endif

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_print ("Usage: %s \"pipeline description with launch format\"\n",
        argv[0]);
    g_print ("The pipeline should contain an element implementing XOverlay.\n");
    g_print ("Example: %s \"videotestsrc ! ximagesink\"\n", argv[0]);
    return -1;
  }
#ifdef GST_DISABLE_PARSE
  g_print ("GStreamer was built without pipeline parsing capabilities.\n");
  g_print
      ("Please rebuild GStreamer with pipeline parsing capabilities activated to use this example.\n");
  return 1;
#else
  pipeline = gst_parse_launch (argv[1], &error);
  if (error) {
    g_print ("Error while parsing pipeline description: %s\n", error->message);
    return -1;
  }
#endif

  loop = g_main_loop_new (NULL, FALSE);

  open_display ();

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* We want to get out after */
  //g_timeout_add (500000, (GSourceFunc) terminate_playback, pipeline);
  g_timeout_add (10000, (GSourceFunc) pause_playback, pipeline);
  g_timeout_add (20000, (GSourceFunc) start_playback, pipeline);

  g_main_loop_run (loop);

  close_display ();

  g_main_loop_unref (loop);

  return 0;
}
Esempio n. 22
0
static void
ubiquity_webcam_init (UbiquityWebcam *self) {
	UbiquityWebcamPrivate *priv;
	gint width = 172, height = 129;

	assert (width * 3 == height * 4); /* 4x3 ratio */

	priv = self->priv = UBIQUITY_WEBCAM_PRIVATE (self);

	gtk_orientable_set_orientation (GTK_ORIENTABLE (self),
					GTK_ORIENTATION_VERTICAL);
	gtk_box_set_spacing (GTK_BOX (self), 1);
	priv->drawing_area = gtk_drawing_area_new ();
	gtk_widget_set_size_request (priv->drawing_area, width, height);
	g_signal_connect (priv->drawing_area, "realize",
			G_CALLBACK(drawing_area_realized_cb), NULL);
	gtk_widget_set_double_buffered (priv->drawing_area, FALSE);

	priv->button = gtk_button_new ();
	gtk_button_set_label (GTK_BUTTON (priv->button), "Take Photo");

	gtk_box_pack_start (GTK_BOX (self), priv->drawing_area, TRUE, TRUE, 0);
	gtk_box_pack_start (GTK_BOX (self), priv->button, FALSE, FALSE, 0);

	priv->camerabin = gst_element_factory_make ("camerabin2" , "cam");
	priv->viewfinder_caps = gst_caps_new_simple ("video/x-raw-rgb",
		"width", G_TYPE_INT, 640,
		"height", G_TYPE_INT, 480, NULL);
	g_object_set (G_OBJECT (priv->camerabin),
		"viewfinder-caps", priv->viewfinder_caps, NULL);
    g_signal_new ("image-captured",
					UBIQUITY_TYPE_WEBCAM,
					G_SIGNAL_RUN_FIRST,
					0,
					NULL,
					NULL,
					g_cclosure_marshal_VOID__OBJECT,
					G_TYPE_NONE, 1,
					G_TYPE_STRING);
	if (!priv->camerabin) {
		g_print ("Failed to create camerabin.\n");
		return;
	}
	g_signal_connect (priv->button, "clicked",
			G_CALLBACK(button_clicked_cb), priv->camerabin);

	priv->bus = gst_element_get_bus (priv->camerabin);
	gst_bus_add_signal_watch (priv->bus);
	g_signal_connect (priv->bus, "message", G_CALLBACK (message_cb), self);
	gst_bus_set_sync_handler (priv->bus, (GstBusSyncHandler) window_id_cb, NULL);
	gst_object_ref (priv->bus);
	gst_object_ref (priv->camerabin);
}
static void
kms_recorder_endpoint_set_property (GObject * object, guint property_id,
    const GValue * value, GParamSpec * pspec)
{
  KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (object);

  KMS_ELEMENT_LOCK (KMS_ELEMENT (self));
  switch (property_id) {
    case PROP_DVR:
      self->priv->use_dvr = g_value_get_boolean (value);
      break;
    case PROP_PROFILE:{
      if (self->priv->profile == KMS_RECORDING_PROFILE_NONE) {
        self->priv->profile = g_value_get_enum (value);

        if (self->priv->profile != KMS_RECORDING_PROFILE_NONE) {
          GstElement *sink;
          GstBus *bus;

          sink = kms_recorder_endpoint_create_sink (self);
          self->priv->mux =
              kms_muxing_pipeline_new (KMS_MUXING_PIPELINE_PROFILE,
              self->priv->profile, KMS_MUXING_PIPELINE_SINK, sink, NULL);
          g_object_unref (sink);

          bus = kms_muxing_pipeline_get_bus (self->priv->mux);
          gst_bus_set_sync_handler (bus, bus_sync_signal_handler, self, NULL);
          g_object_unref (bus);

          if (kms_recording_profile_supports_type (self->priv->profile,
                  KMS_ELEMENT_PAD_TYPE_AUDIO)) {
            kms_recorder_endpoint_add_appsink (self,
                KMS_ELEMENT_PAD_TYPE_AUDIO);
          }

          if (kms_recording_profile_supports_type (self->priv->profile,
                  KMS_ELEMENT_PAD_TYPE_VIDEO)) {
            kms_recorder_endpoint_add_appsink (self,
                KMS_ELEMENT_PAD_TYPE_VIDEO);
          }
        }
      } else {
        GST_ERROR_OBJECT (self, "Profile can only be configured once");
      }

      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
  KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self));
}
Esempio n. 24
0
eServiceMP3Record::~eServiceMP3Record()
{
	if (m_recording_pipeline)
	{
		// disconnect sync handler callback
		GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_recording_pipeline));
#if GST_VERSION_MAJOR < 1
		gst_bus_set_sync_handler(bus, NULL, NULL);
#else
		gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
#endif
		gst_object_unref(bus);
	}

	if (m_state > stateIdle)
		stop();

	if (m_recording_pipeline)
	{
		gst_object_unref(GST_OBJECT(m_recording_pipeline));
	}
}
Esempio n. 25
0
static void
gst_uri_downloader_stop (GstUriDownloader * downloader)
{
  if (!downloader->urisrc)
    return;

  GST_DEBUG_OBJECT (downloader, "stopping source element %s",
      GST_ELEMENT_NAME (downloader->urisrc));

  gst_bus_set_sync_handler (downloader->bus, NULL, NULL, NULL);
  gst_pad_set_active (downloader->pad, FALSE);
  gst_bus_set_flushing (downloader->bus, TRUE);
  gst_element_set_state (downloader->urisrc, GST_STATE_READY);
}
Esempio n. 26
0
struct SimpleTestConference *
setup_simple_conference_full (
    gint id,
    gchar *conference_elem,
    gchar *cname,
    FsMediaType mediatype)
{
    struct SimpleTestConference *dat = g_new0 (struct SimpleTestConference, 1);
    GError *error = NULL;
    guint tos;
    GstBus *bus;
    GstStructure *s;

    dat->id = id;
    dat->cname = g_strdup (cname);

    dat->pipeline = gst_pipeline_new ("pipeline");
    fail_if (dat->pipeline == NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (dat->pipeline));
    fail_if (bus == NULL);
    gst_bus_set_sync_handler (bus, default_sync_handler, dat, NULL);
    gst_object_unref (bus);

    dat->conference = gst_element_factory_make (conference_elem, NULL);
    fail_if (dat->conference == NULL, "Could not build %s", conference_elem);
    fail_unless (gst_bin_add (GST_BIN (dat->pipeline), dat->conference),
                 "Could not add conference to the pipeline");

    g_object_get (dat->conference, "sdes", &s, NULL);
    gst_structure_set (s, "cname", G_TYPE_STRING, cname, NULL);
    g_object_set (dat->conference, "sdes", s, NULL);
    gst_structure_free (s);

    dat->session = fs_conference_new_session (FS_CONFERENCE (dat->conference),
                   mediatype, &error);
    if (error)
        fail ("Error while creating new session (%d): %s",
              error->code, error->message);
    fail_if (dat->session == NULL, "Could not make session, but no GError!");

    g_object_set (dat->session, "tos", 2, NULL);
    g_object_get (dat->session, "tos", &tos, NULL);
    fail_unless (tos == 2);


    g_object_set_data (G_OBJECT (dat->conference), "dat", dat);

    return dat;
}
Esempio n. 27
0
static gboolean
gst_thumbnailer_play_start (GstElement   *play,
                            GCancellable *cancellable)
{
  GstBus     *bus;
  gboolean    terminate = FALSE;
  GstMessage *message;
  gboolean    async_received = FALSE;

  /* pause to prepare for seeking */
  gst_element_set_state (play, GST_STATE_PAUSED);

  bus = gst_element_get_bus (play);

  while (!terminate
         && !g_cancellable_is_cancelled (cancellable))
    {
      message = gst_bus_timed_pop_filtered (bus,
                                            GST_CLOCK_TIME_NONE,
                                            GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR);

      switch (GST_MESSAGE_TYPE (message))
        {
        case GST_MESSAGE_ASYNC_DONE:
          if (GST_MESSAGE_SRC (message) == GST_OBJECT (play))
            {
              async_received = TRUE;
              terminate = TRUE;
            }
          break;

        case GST_MESSAGE_ERROR:
          terminate = TRUE;
          break;

        default:
          break;
        }

      gst_message_unref (message);
    }

  /* setup the error handler */
  if (async_received)
    gst_bus_set_sync_handler (bus, gst_thumbnailer_error_handler, cancellable, NULL);

  gst_object_unref (bus);

  return async_received;
}
Esempio n. 28
0
// initiates gstreamer as a playbin pipeline
gstPlay *gst_binding_init (gulong xwin) {
	gstPlay *play = g_new0 (gstPlay, 1);
	
	gst_init (NULL, NULL);
	play->element = gst_element_factory_make ("playbin", "play");
	if (play->element == NULL) return NULL;
	play->xid = xwin;
	
	gst_bus_set_sync_handler (gst_pipeline_get_bus(GST_PIPELINE(play->element)), 
		gst_sync_watch, play, NULL);
	gst_bus_add_watch (gst_pipeline_get_bus(GST_PIPELINE(play->element)), 
		gst_async_watch, play);
	
	return play;
}
Esempio n. 29
0
	SourceObject::SourceObject (Category cat, QObject *parent)
	: QObject (parent)
#if GST_VERSION_MAJOR < 1
	, Dec_ (gst_element_factory_make ("playbin2", "play"))
#else
	, Dec_ (gst_element_factory_make ("playbin", "play"))
#endif
	, Path_ (nullptr)
	, IsSeeking_ (false)
	, LastCurrentTime_ (-1)
	, PrevSoupRank_ (0)
	, PopThread_ (new MsgPopThread (gst_pipeline_get_bus (GST_PIPELINE (Dec_)),
				this,
				cat == Category::Notification ? 0.05 : 1,
				BusDrainMutex_,
				BusDrainWC_))
	, OldState_ (SourceState::Stopped)
	{
		g_signal_connect (Dec_, "about-to-finish", G_CALLBACK (CbAboutToFinish), this);
		g_signal_connect (Dec_, "notify::source", G_CALLBACK (CbSourceChanged), this);

		qRegisterMetaType<GstMessage*> ("GstMessage*");
		qRegisterMetaType<GstMessage_ptr> ("GstMessage_ptr");

		qRegisterMetaType<AudioSource> ("AudioSource");

		auto timer = new QTimer (this);
		connect (timer,
				SIGNAL (timeout ()),
				this,
				SLOT (handleTick ()));
		timer->start (1000);

		gst_bus_set_sync_handler (gst_pipeline_get_bus (GST_PIPELINE (Dec_)),
				[] (GstBus *bus, GstMessage *msg, gpointer udata)
				{
					return static_cast<GstBusSyncReply> (static_cast<SourceObject*> (udata)->
								HandleSyncMessage (bus, msg));
				},
#if GST_VERSION_MAJOR < 1
				this);
#else
				this,
				nullptr);
#endif

		PopThread_->start (QThread::LowestPriority);
	}
Esempio n. 30
0
bool TrackAnalyser::prepare()
{
        GstElement *dec, *conv, *sink, *cutter, *audio, *analysis;
        GstPad *audiopad;
        GstCaps *caps;

        caps = gst_caps_new_simple ("audio/x-raw-int",
                                    "channels", G_TYPE_INT, 2, NULL);

        pipeline = gst_pipeline_new ("pipeline");
        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));


        dec = gst_element_factory_make ("decodebin2", "decoder");
        g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_ta), this);
        gst_bin_add (GST_BIN (pipeline), dec);

        audio = gst_bin_new ("audiobin");
        conv = gst_element_factory_make ("audioconvert", "conv");
        audiopad = gst_element_get_static_pad (conv, "sink");
        analysis = gst_element_factory_make ("rganalysis", "analysis");
        cutter = gst_element_factory_make ("cutter", "cutter");
        sink = gst_element_factory_make ("fakesink", "sink");

        g_object_set (analysis, "message", TRUE, NULL);
        g_object_set (analysis, "num-tracks", 1, NULL);
        g_object_set (cutter, "threshold-dB", -25.0, NULL);

        gst_bin_add_many (GST_BIN (audio), conv, analysis, cutter, sink, NULL);
        gst_element_link (conv, analysis);
        gst_element_link_filtered (analysis, cutter, caps);
        gst_element_link (cutter, sink);
        gst_element_add_pad (audio, gst_ghost_pad_new ("sink", audiopad));

        gst_bin_add (GST_BIN (pipeline), audio);

        GstElement *l_src;
        l_src = gst_element_factory_make ("filesrc", "localsrc");
        gst_bin_add_many (GST_BIN (pipeline), l_src, NULL);
        gst_element_set_state (l_src, GST_STATE_NULL);
        gst_element_link ( l_src,dec);

        gst_object_unref (audiopad);

        gst_bus_set_sync_handler (bus, bus_cb, this);

        return pipeline;
}