Пример #1
1
void
initialize_pref_controls(void)
{
  GtkWidget *widget;
  GtkSpinButton *spin;
  GdkColor color;
  GtkTreeModel *model;

  diag_pref = glade_xml_get_widget (appdata.xml, "diag_pref");

  /* Updates controls from values of variables */
  widget = glade_xml_get_widget (appdata.xml, "node_radius_slider");
  gtk_adjustment_set_value (GTK_RANGE (widget)->adjustment,
			    log (pref.node_radius_multiplier) / log (10));
  g_signal_emit_by_name (G_OBJECT (GTK_RANGE (widget)->adjustment),
			 "changed");
  widget = glade_xml_get_widget (appdata.xml, "link_width_slider");
  gtk_adjustment_set_value (GTK_RANGE (widget)->adjustment,
			    pref.link_node_ratio);
  g_signal_emit_by_name (GTK_OBJECT (GTK_RANGE (widget)->adjustment),
			 "changed");
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "averaging_spin"));
  gtk_spin_button_set_value (spin, pref.averaging_time);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "refresh_spin"));
  gtk_spin_button_set_value (spin, pref.refresh_period);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "node_to_spin"));
  gtk_spin_button_set_value (spin, pref.node_timeout_time/MILLI);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "gui_node_to_spin"));
  gtk_spin_button_set_value (spin, pref.gui_node_timeout_time/MILLI);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "proto_node_to_spin"));
  gtk_spin_button_set_value (spin, pref.proto_node_timeout_time/MILLI);

  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "link_to_spin"));
  gtk_spin_button_set_value (spin, pref.link_timeout_time/MILLI);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "gui_link_to_spin"));
  gtk_spin_button_set_value (spin, pref.gui_link_timeout_time/MILLI);
  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "proto_link_to_spin"));
  gtk_spin_button_set_value (spin, pref.proto_link_timeout_time/MILLI);

  spin = GTK_SPIN_BUTTON (glade_xml_get_widget (appdata.xml, "proto_to_spin"));
  gtk_spin_button_set_value (spin, pref.proto_timeout_time/MILLI);

  widget = glade_xml_get_widget (appdata.xml, "diagram_only_toggle");
  gtk_toggle_button_set_active (GTK_TOGGLE_BUTTON (widget),
				pref.diagram_only);
  widget = glade_xml_get_widget (appdata.xml, "group_unk_check");
  gtk_toggle_button_set_active (GTK_TOGGLE_BUTTON (widget), pref.group_unk);
  widget = glade_xml_get_widget (appdata.xml, "name_res_check");
  gtk_toggle_button_set_active (GTK_TOGGLE_BUTTON (widget), pref.name_res);
  widget = glade_xml_get_widget (appdata.xml, "stack_level");
  gtk_combo_box_set_active(GTK_COMBO_BOX(widget), pref.stack_level);
  widget = glade_xml_get_widget (appdata.xml, "size_variable");
  gtk_combo_box_set_active(GTK_COMBO_BOX(widget), pref.node_size_variable);
  widget = glade_xml_get_widget (appdata.xml, "size_mode");
  gtk_combo_box_set_active(GTK_COMBO_BOX(widget), pref.size_mode);
  widget = glade_xml_get_widget (appdata.xml, "text_font");
  gtk_font_button_set_font_name(GTK_FONT_BUTTON(widget), pref.fontname);
  widget = glade_xml_get_widget (appdata.xml, "text_color");
  if (!gdk_color_parse(pref.text_color, &color))
    gdk_color_parse("#ffff00", &color);
  gtk_color_button_set_color(GTK_COLOR_BUTTON(widget), &color);

  widget = glade_xml_get_widget (appdata.xml, "filter_combo");
  model = gtk_combo_box_get_model(GTK_COMBO_BOX(widget));
  if (!model)
    {
      GtkListStore *list_store;
      list_store=gtk_list_store_new (1, G_TYPE_STRING);
      gtk_combo_box_set_model(GTK_COMBO_BOX(widget), GTK_TREE_MODEL(list_store));
    }

  widget = glade_xml_get_widget (appdata.xml, "center_combo");
  model = gtk_combo_box_get_model(GTK_COMBO_BOX(widget));
  if (!model)
    {
      GtkListStore *list_store;
      list_store=gtk_list_store_new (1, G_TYPE_STRING);
      gtk_combo_box_set_model(GTK_COMBO_BOX(widget), GTK_TREE_MODEL(list_store));
    }

  pref_to_color_list();		/* Updates the color preferences table with pref.colors */

  /* Connects signals */
  widget = glade_xml_get_widget (appdata.xml, "diag_pref");
  g_signal_connect (G_OBJECT (widget),
		    "delete_event",
		    GTK_SIGNAL_FUNC
		    (on_cancel_pref_button_clicked ), NULL);
  widget = glade_xml_get_widget (appdata.xml, "node_radius_slider");
  g_signal_connect (G_OBJECT (GTK_RANGE (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_node_radius_slider_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "link_width_slider");
  g_signal_connect (G_OBJECT (GTK_RANGE (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_link_width_slider_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "averaging_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_averaging_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "refresh_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_refresh_spin_adjustment_changed),
		    glade_xml_get_widget (appdata.xml, "canvas1"));
  widget = glade_xml_get_widget (appdata.xml, "node_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_node_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "gui_node_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_gui_node_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "proto_node_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_proto_node_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "link_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_link_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "gui_link_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_gui_link_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "proto_link_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_proto_link_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "proto_to_spin");
  g_signal_connect (G_OBJECT (GTK_SPIN_BUTTON (widget)->adjustment),
		    "value_changed",
		    GTK_SIGNAL_FUNC
		    (on_proto_to_spin_adjustment_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "stack_level");
  g_signal_connect (G_OBJECT (widget), 
                    "changed",
		    GTK_SIGNAL_FUNC (on_stack_level_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "size_variable");
  g_signal_connect (G_OBJECT (widget), 
                    "changed",
		    GTK_SIGNAL_FUNC (on_size_variable_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "size_mode");
  g_signal_connect (G_OBJECT (widget), 
                    "changed",
		    GTK_SIGNAL_FUNC (on_size_mode_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "text_font");
  g_signal_connect (G_OBJECT (widget), 
                    "font_set",
		    GTK_SIGNAL_FUNC (on_text_font_changed), NULL);
  widget = glade_xml_get_widget (appdata.xml, "text_color");
  g_signal_connect (G_OBJECT (widget), 
                    "color_set",
		    GTK_SIGNAL_FUNC (on_text_color_changed), NULL);
}
Пример #2
0
static void
bus_message (GstBus * bus, GstMessage * msg, KmsHttpEndpoint * self)
{
  if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS)
    g_signal_emit_by_name (G_OBJECT (self), "eos", 0);
}
Пример #3
0
static void loadDone(WebKitWebFrame* frame, bool didSucceed)
{
    // FIXME: load-done is deprecated. Please remove when signal's been removed.
    g_signal_emit_by_name(frame, "load-done", didSucceed);
    notifyStatus(frame, WEBKIT_LOAD_FINISHED);
}
Пример #4
0
/* Main entry point for gdk in 2.6 args are parsed
 */
GdkDisplay * gdk_display_open (const gchar *display_name)
{
  IDirectFB              *directfb;
  IDirectFBDisplayLayer  *layer;
  IDirectFBInputDevice   *keyboard;
  DFBResult               ret;

  int    argc = 0;
  char **argv = NULL;

  if (_gdk_display)
    {
      return GDK_DISPLAY_OBJECT(_gdk_display); /* single display only */
    }

  ret = DirectFBInit (&argc,&argv);
  if (ret != DFB_OK)
    {
      DirectFBError ("gdk_display_open: DirectFBInit", ret);
      return NULL;
    }

  ret = DirectFBCreate (&directfb);
  if (ret != DFB_OK)
    {
      DirectFBError ("gdk_display_open: DirectFBCreate", ret);
      return NULL;
    }

  _gdk_display = g_object_new (GDK_TYPE_DISPLAY_DFB, NULL);
  _gdk_display->directfb = directfb;

  ret = directfb->GetDisplayLayer (directfb, DLID_PRIMARY, &layer);
  if (ret != DFB_OK)
    {
      DirectFBError ("gdk_display_open: GetDisplayLayer", ret);
      directfb->Release (directfb);
	  directfb = NULL;
      return NULL;
    }


  ret=directfb->GetInputDevice (directfb, DIDID_KEYBOARD, &keyboard);

  if (ret != DFB_OK){
      DirectFBError ("gdk_display_open: GetInputDevice", ret);
   	return NULL;
  }

  _gdk_display->layer=layer;
  _gdk_display->keyboard=keyboard;

    _gdk_directfb_keyboard_init ();

  _gdk_screen = g_object_new (GDK_TYPE_SCREEN, NULL);

  _gdk_visual_init ();
  _gdk_windowing_window_init ();

  gdk_screen_set_default_colormap (_gdk_screen,
                                   gdk_screen_get_system_colormap (_gdk_screen));  _gdk_windowing_image_init ();

  _gdk_input_init ();
  _gdk_dnd_init ();

  _gdk_events_init ();
  layer->EnableCursor (layer, 1);

  g_signal_emit_by_name (gdk_display_manager_get (),
			 "display_opened", _gdk_display);

  return GDK_DISPLAY_OBJECT(_gdk_display);
}
Пример #5
0
static void
on_file_downloaded (SummerWebBackend *web, gchar *saved_path, gchar *saved_data, GError *error, gpointer data)
{
	g_return_if_fail (SUMMER_IS_DOWNLOAD_YOUTUBE (data));
	g_return_if_fail (saved_data == NULL);
	SummerDownload *self = SUMMER_DOWNLOAD (data);
	SummerDownloadYoutubePrivate *priv = SUMMER_DOWNLOAD_YOUTUBE (self)->priv;
	
	if (g_error_matches (error, SUMMER_WEB_BACKEND_ERROR, SUMMER_WEB_BACKEND_ERROR_REMOTE)) {
		priv->quality--;
		if (priv->quality < 0) {
			GError *error = g_error_new (
				SUMMER_DOWNLOAD_ERROR,
				SUMMER_DOWNLOAD_ERROR_INPUT,
				"Download Failed");
			g_signal_emit_by_name (self, "download-error", error);
			g_object_unref (self);
			g_object_unref (web);
			return;
		}
		
		SummerItemData *item;
		g_object_get (self, "item", &item, NULL);
		summer_download_set_filename (self, g_strdup_printf ("%s.%s",
			summer_item_data_get_title (item),
			quality[priv->quality][1]));
		g_object_unref (item);

		GError *e = NULL;
		g_object_set (web, 
			"url", create_youtube_url (SUMMER_DOWNLOAD_YOUTUBE (self)),
			NULL);
		summer_web_backend_fetch (web, &e);
		if (e != NULL) {
			g_signal_emit_by_name (self, "download-error", e);
			g_object_unref (self);
		}
		return;
	} else if (error != NULL) {
		g_signal_emit_by_name (self, "download-error", error);
		g_object_unref (self);
		return;
	}

	GFile *src = g_file_new_for_path (saved_path);
	gchar *destpath = summer_download_get_save_path (self);
	GFile *dest = g_file_new_for_path (destpath);
	g_free (destpath);
	destpath = NULL;
	GError *e = NULL;

	gchar *save_dir = summer_download_get_save_dir (self);
	GFile *destdir = g_file_new_for_path (save_dir);
	g_free (save_dir);
	save_dir = NULL;
	if (!g_file_query_exists (destdir, NULL)) {
		if (!g_file_make_directory_with_parents (destdir, NULL, &e)) {
			GError *e2 = g_error_new (
				SUMMER_DOWNLOAD_ERROR,
				SUMMER_DOWNLOAD_ERROR_OUTPUT,
				"Couldn't create output directory: %s", e->message);
			g_signal_emit_by_name (self, "download-error", e2);
			g_object_unref (self);
			g_clear_error (&e);
			g_object_unref (src);
			g_object_unref (dest);
			g_object_unref (destdir);
			return;
		}
	}
	g_object_unref (destdir);

	if (!g_file_move (src, dest, G_FILE_COPY_OVERWRITE, NULL, NULL, NULL, &e)) {
		GError *e2 = g_error_new (
			SUMMER_DOWNLOAD_ERROR,
			SUMMER_DOWNLOAD_ERROR_OUTPUT,
			"Couldn't move download to it's final destination: %s",
			e->message);
		g_signal_emit_by_name (self, "download-error", e2);
		g_object_unref (self);
		g_clear_error (&e);
		g_object_unref (src);
		g_object_unref (dest);
		return;
	}
	g_object_unref (src);
	g_object_unref (dest);
	g_free (save_dir);

	SummerItemData *item;
	g_object_get (self, "item", &item, NULL);
	SummerFeedCache *cache = summer_feed_cache_get ();
	summer_feed_cache_add_new_item (cache, item);
	g_object_unref (G_OBJECT (cache));
	g_object_unref (item);

	g_object_unref (web);
	g_signal_emit_by_name (self, "download-complete");
	g_signal_emit_by_name (self, "download-done");
	g_object_unref (self);
}
static void
entry_on_text_changed (GtkEditable * editable,
                       gpointer userdata)
{
  HildonTouchSelector *selector;
  HildonTouchSelectorEntryPrivate *priv;
  GtkTreeModel *model;
  GtkTreeIter iter;
  GtkTreeIter iter_suggested;
  GtkEntry *entry;
  const gchar *prefix;
  gchar *text;
  gboolean found = FALSE;
  gint text_column = -1;
  gchar *ascii_prefix;
  gint prefix_len;
  gboolean found_suggestion = FALSE;

  entry = GTK_ENTRY (editable);
  selector = HILDON_TOUCH_SELECTOR (userdata);
  priv = HILDON_TOUCH_SELECTOR_ENTRY_GET_PRIVATE (selector);

  text_column =
    hildon_touch_selector_entry_get_text_column (HILDON_TOUCH_SELECTOR_ENTRY (selector));

  prefix = gtk_entry_get_text (entry);

  if (prefix[0] == '\0') {
	  return;
  }

  model = hildon_touch_selector_get_model (selector, 0);

  if (!gtk_tree_model_get_iter_first (model, &iter)) {
    return;
  }

  if (priv->smart_match) {
    ascii_prefix = g_convert_with_iconv (prefix, -1, priv->converter, NULL, NULL, NULL);
    prefix_len = strlen (ascii_prefix);
  }

  do {
    gtk_tree_model_get (model, &iter, text_column, &text, -1);
    found = g_str_has_prefix (text, prefix);

    if (!found && !found_suggestion && priv->smart_match) {
      gchar *ascii_text = g_convert_with_iconv (text, -1, priv->converter, NULL, NULL, NULL);
      found_suggestion = !g_ascii_strncasecmp (ascii_text, ascii_prefix, prefix_len);
      if (found_suggestion) {
        iter_suggested = iter;
      }
      g_free (ascii_text);
    }

    g_free (text);
  } while (found != TRUE && gtk_tree_model_iter_next (model, &iter));

  g_signal_handler_block (selector, priv->signal_id);
  {
    /* We emit the HildonTouchSelector::changed signal because a change in the
       GtkEntry represents a change in current selection, and therefore, users
       should be notified. */
    if (found) {
      hildon_touch_selector_select_iter (selector, 0, &iter, TRUE);
    } else if (found_suggestion) {
      hildon_touch_selector_select_iter (selector, 0, &iter_suggested, TRUE);
    }
    g_signal_emit_by_name (selector, "changed", 0);
  }
  g_signal_handler_unblock (selector, priv->signal_id);

  if (priv->smart_match) {
    g_free (ascii_prefix);
  }
}
Пример #7
0
static gboolean
vfunc_filter_keypress (GtkIMContext *context, GdkEventKey *event)
{
  GtkIMContextClass      *parent;
  GtkImContextMultipress *multipress_context;

  multipress_context = GTK_IM_CONTEXT_MULTIPRESS (context);

  if (event->type == GDK_KEY_PRESS)
    {
      KeySequence *possible;

      /* Check whether the current key is the same as previously entered, because
       * if it is not then we should accept the previous one, and start a new
       * character. */
      if (multipress_context->compose_count > 0
          && multipress_context->key_last_entered != event->keyval
          && multipress_context->tentative_match != NULL)
        {
          /* Accept the previously chosen character.  This wipes
           * the compose_count and key_last_entered. */
          accept_character (multipress_context,
                            multipress_context->tentative_match);
        } 

      /* Decide what character this key press would choose: */
      possible = g_hash_table_lookup (multipress_context->key_sequences,
                                      GUINT_TO_POINTER (event->keyval));
      if (possible != NULL)
        {
          if (multipress_context->compose_count == 0)
            g_signal_emit_by_name (multipress_context, "preedit-start");

          /* Check whether we are at the end of a compose sequence, with no more
           * possible characters.  Cycle back to the start if necessary. */
          if (multipress_context->compose_count >= possible->n_characters)
            multipress_context->compose_count = 0;

          /* Store the last key pressed in the compose sequence. */
          multipress_context->key_last_entered = event->keyval; 

          /* Get the possible match for this number of presses of the key.
           * compose_count starts at 1, so that 0 can mean not composing. */ 
          multipress_context->tentative_match =
            possible->characters[multipress_context->compose_count++];

          /* Indicate the current possible character.  This will cause our
           * vfunc_get_preedit_string() vfunc to be called, which will provide
           * the current possible character for the user to see. */
          g_signal_emit_by_name (multipress_context, "preedit-changed");

          /* Cancel any outstanding timeout, so we can start the timer again: */
          cancel_automatic_timeout_commit (multipress_context);

          /* Create a timeout that will cause the currently chosen character to
           * be committed, if nothing happens for a certain amount of time: */
          multipress_context->timeout_id =
            g_timeout_add_seconds (AUTOMATIC_COMPOSE_TIMEOUT,
                                   &on_timeout, multipress_context);

          return TRUE; /* key handled */
        }
      else
        {
          guint32 keyval_uchar;

          /* Just accept all other keypresses directly, but commit the
           * current preedit content first. */
          if (multipress_context->compose_count > 0
              && multipress_context->tentative_match != NULL)
            {
              accept_character (multipress_context,
                                multipress_context->tentative_match);
            }
          keyval_uchar = gdk_keyval_to_unicode (event->keyval);

          /* Convert to a string for accept_character(). */
          if (keyval_uchar != 0)
            {
              /* max length of UTF-8 sequence = 6 + 1 for NUL termination */
              gchar keyval_utf8[7];
              gint  length;

              length = g_unichar_to_utf8 (keyval_uchar, keyval_utf8);
              keyval_utf8[length] = '\0';

              accept_character (multipress_context, keyval_utf8);

              return TRUE; /* key handled */
            }
        }
    }

  parent = (GtkIMContextClass *)im_context_multipress_parent_class;

  /* The default implementation just returns FALSE, but it is generally
   * a good idea to call the base class implementation: */
  if (parent->filter_keypress)
    return (*parent->filter_keypress) (context, event);

  return FALSE;
}
void GObjectEventListener::handleEvent(ScriptExecutionContext*, Event* event)
{
    gboolean handled = FALSE;
    WebKitDOMEvent* gobjectEvent = WEBKIT_DOM_EVENT(WebKit::kit(event));
    g_signal_emit_by_name(m_object, m_signalName.utf8().data(), gobjectEvent, &handled);
}
Пример #9
0
static void
update_dialog_ui (GdictSourceDialog *dialog)
{
  GdictSource *source;
  
  /* TODO - add code to update the contents of the dialog depending
   * on the action; if we are in _CREATE, no action is needed
   */
  switch (dialog->action)
    {
    case GDICT_SOURCE_DIALOG_VIEW:
    case GDICT_SOURCE_DIALOG_EDIT:
      if (!dialog->source_name)
	{
          g_warning ("Attempting to retrieve source, but no "
		     "source name has been defined.  Aborting...");
	  return;
	}
      
      source = gdict_source_loader_get_source (dialog->loader,
		      			       dialog->source_name);
      if (!source)
	{
          g_warning ("Attempting to retrieve source, but no "
		     "source named `%s' was found.  Aborting...",
		     dialog->source_name);
	  return;
	}
      
      g_object_ref (source);
      
      dialog->source = source;
      set_text_to_entry (dialog, "description_entry",
		         gdict_source_get_description (source));

      dialog->transport = gdict_source_get_transport (source);
      gtk_combo_box_set_active (GTK_COMBO_BOX (dialog->transport_combo),
                                (gint) dialog->transport);

      /* set the context for the database and strategy choosers */
      dialog->context = gdict_source_get_context (source);
      if (!dialog->context)
        {
          g_warning ("Attempting to retrieve the context, but "
                     "none was found for source `%s'.",
                     dialog->source_name);
          return;
        }
      
      set_transport_settings (dialog);

      gdict_database_chooser_set_context (GDICT_DATABASE_CHOOSER (dialog->db_chooser),
                                          dialog->context);
      gdict_database_chooser_refresh (GDICT_DATABASE_CHOOSER (dialog->db_chooser));
      gdict_strategy_chooser_set_context (GDICT_STRATEGY_CHOOSER (dialog->strat_chooser),
                                          dialog->context);
      gdict_strategy_chooser_refresh (GDICT_STRATEGY_CHOOSER (dialog->strat_chooser));
      break;
    case GDICT_SOURCE_DIALOG_CREATE:
      /* DICTD transport is default */
      gtk_combo_box_set_active (GTK_COMBO_BOX (dialog->transport_combo), 0);
      g_signal_emit_by_name (dialog->transport_combo, "changed");
      break;
    default:
      g_assert_not_reached ();
      break;
    }
}
Пример #10
0
static void
gst_camera_bin_stop_capture (GstCameraBin * camerabin)
{
  if (camerabin->src)
    g_signal_emit_by_name (camerabin->src, "stop-capture", NULL);
}
Пример #11
0
static void
gst_camera_bin_start_capture (GstCameraBin * camerabin)
{
  GST_DEBUG_OBJECT (camerabin, "Received start-capture");
  g_signal_emit_by_name (camerabin->src, "start-capture", NULL);
}
Пример #12
0
static void
send_emblems_changed (void)
{
    g_signal_emit_by_name (caja_signaller_get_current (),
                           "emblems_changed");
}
Пример #13
0
static void emit_seek (void * data, GObject * object)
{
    g_signal_emit_by_name (object, "seeked", (int64_t) aud_drct_get_time () * 1000);
}
Пример #14
0
/*!
 \brief displays a live marker for the Y axis (horizontal Line)
 \param curve (MtxCurve *) pointer to curve
 */
void mtx_curve_set_y_marker_value (MtxCurve *curve, gfloat value)
{
	MtxCurvePrivate *priv = NULL;
	gint i = 0;
	gfloat x = 0.0;
	gfloat y = 0.0;
	gfloat d1 = 0.0;
	gfloat d2 = 0.0;
	gboolean get_peak_cross = FALSE;
	g_return_if_fail (MTX_IS_CURVE (curve));
	priv = MTX_CURVE_GET_PRIVATE(curve);

#if GTK_MINOR_VERSION >= 18
	if (!gtk_widget_is_sensitive(GTK_WIDGET(curve)))
		return;
#else
	if (!GTK_WIDGET_IS_SENSITIVE(GTK_WIDGET(curve)))
		return;
#endif
	if (priv->y_marker == value)
		return;
	/* IF marker is clamped beyond ranges, don't bother updating*/
	if (((value < priv->lowest_y) && (priv->y_marker_clamp == LOW)) || ((value > priv->highest_y) && (priv->y_marker_clamp == HIGH)))
		return;

	/* Filter out jitter to within 1% */
	if (fabs(value-priv->y_marker) < (fabs(priv->highest_y-priv->lowest_y)/100.0))
		return;

	g_object_freeze_notify (G_OBJECT (curve));
	if (value <priv->lowest_y)
	{
		priv->y_marker = priv->lowest_y;
		priv->y_marker_clamp = LOW;
	}
	else if (value > priv->highest_y)
	{
		priv->y_marker = priv->highest_y;
		priv->y_marker_clamp = HIGH;
	}
	else
	{
		priv->y_marker = value;
		priv->y_marker_clamp = NONE;
	}
	if (value > priv->peak_y_marker)
	{
		priv->peak_y_marker = value > priv->highest_y ? priv->highest_y:value;
		get_peak_cross = TRUE;
		if (priv->y_peak_timeout)
		{
			g_source_remove(priv->y_peak_timeout);
			priv->y_peak_timeout = -1;
		}
	}
	else
	{
		priv->y_draw_peak = TRUE;
		g_object_set_data(G_OBJECT(curve),"axis",GINT_TO_POINTER(_Y_));
		if (priv->y_peak_timeout <= 0)
			priv->y_peak_timeout = g_timeout_add(5000,(GSourceFunc)cancel_peak,(gpointer)curve);
	}
	for (i = 0;i<priv->num_points - 1;i++)
	{
		if (value < priv->coords[0].y)
		{
			priv->x_at_y_marker = priv->coords[0].x;
			if (0 != priv->marker_proximity_vertex)
			{
				priv->marker_proximity_vertex = 0;
				g_signal_emit_by_name((gpointer)curve, "marker-proximity");
			}
		}
		else if (value > priv->coords[priv->num_points-1].y)
		{
			priv->x_at_y_marker = priv->coords[priv->num_points-1].x;
			if (get_peak_cross)
				priv->peak_x_at_y_marker = priv->x_at_y_marker;
			if (priv->num_points-1 != priv->marker_proximity_vertex)
			{
				priv->marker_proximity_vertex = priv->num_points-1;
				g_signal_emit_by_name((gpointer)curve, "marker-proximity");
			}
		}
		else if ((value > priv->coords[i].y) && (value < priv->coords[i+1].y))
		{
			if (get_intersection(priv->coords[i].x,priv->coords[i].y,priv->coords[i+1].x,priv->coords[i+1].y,0,value,priv->w,value,&x,&y))
			{
				priv->x_at_y_marker = x;
				if (get_peak_cross)
					priv->peak_x_at_y_marker = x;
				d1 = sqrt(pow((priv->coords[i].x-x),2)+ pow((priv->coords[i].y-value),2));
				d2 = sqrt(pow((priv->coords[i+1].x-x),2)+ pow((priv->coords[i+1].y-value),2));
				if (d1 < d2)
				{
					if (i != priv->marker_proximity_vertex)
					{
						priv->marker_proximity_vertex = i;
						g_signal_emit_by_name((gpointer)curve, "marker-proximity");
					}
				}
				else
				{
					if (i+1 != priv->marker_proximity_vertex)
					{
						priv->marker_proximity_vertex = i+1;
						g_signal_emit_by_name((gpointer)curve, "marker-proximity");
					}
				}

			}
			else
				printf("couldn't find intersection\n");
			break;
		}
	}
	g_object_thaw_notify (G_OBJECT (curve));
	mtx_curve_redraw(curve,FALSE);
	return;
}
Пример #15
0
static GstFlowReturn
new_sample_post_handler (GstElement * appsink, gpointer user_data)
{
  GstElement *appsrc = GST_ELEMENT (user_data);
  GstSample *sample = NULL;
  GstBuffer *buffer;
  GstFlowReturn ret;
  GstClockTime *base_time;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  if (sample == NULL)
    return GST_FLOW_OK;

  buffer = gst_sample_get_buffer (sample);
  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  gst_buffer_ref (buffer);
  buffer = gst_buffer_make_writable (buffer);

  BASE_TIME_LOCK (GST_OBJECT_PARENT (appsrc));

  base_time =
      g_object_get_qdata (G_OBJECT (GST_OBJECT_PARENT (appsrc)),
      base_time_data_quark ());

  if (base_time == NULL) {
    GstClock *clock;

    clock = gst_element_get_clock (appsrc);
    base_time = g_slice_new0 (GstClockTime);

    g_object_set_qdata_full (G_OBJECT (GST_OBJECT_PARENT (appsrc)),
        base_time_data_quark (), base_time, release_gst_clock);
    *base_time =
        gst_clock_get_time (clock) - gst_element_get_base_time (appsrc);
    g_object_unref (clock);
    GST_DEBUG ("Setting base time to: %" G_GUINT64_FORMAT, *base_time);
  }

  if (GST_BUFFER_PTS_IS_VALID (buffer))
    buffer->pts += *base_time;
  if (GST_BUFFER_DTS_IS_VALID (buffer))
    buffer->dts += *base_time;

  BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsrc));

  /* Pass the buffer through appsrc element which is */
  /* placed in a different pipeline */
  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* something went wrong */
    GST_ERROR ("Could not send buffer to appsrc %s. Cause %s",
        GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
  }

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
Пример #16
0
void CGstPlayback::AnalyzeStreams()
{
    return_assert(m_playbin);

    g_print("%s, begin", __func__);
    g_object_get (m_playbin, "n-video", &m_numVideo, NULL);
    g_object_get (m_playbin, "n-audio", &m_numAudio, NULL);
    g_object_get (m_playbin, "n-text", &m_numText, NULL);   

    for (gint i = 0; i < m_numVideo; i++) {
        GstTagList *tags = NULL;
        gchar *str = NULL;
        /* Retrieve the stream's video tags */
        g_signal_emit_by_name (m_playbin, "get-video-tags", i, &tags);
        if (tags) {
            g_print ("video stream %d:\n", i);
            gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
            g_print ("  codec: %s\n", str ? str : "unknown");
            g_free (str);
            gst_tag_list_free (tags);
        }
    }

    for (gint i = 0; i < m_numAudio; i++) {
        GstTagList *tags = NULL;
        gchar *str = NULL;
        guint rate = 0;
        /* Retrieve the stream's audio tags */
        g_signal_emit_by_name (m_playbin, "get-audio-tags", i, &tags);
        if (tags) {
            g_print ("audio stream %d:\n", i);
            if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
                g_print ("  codec: %s\n", str);
                g_free (str);
            }
            if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
                g_print ("  language: %s\n", str);
                g_free (str);
            }
            if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
                g_print ("  bitrate: %d\n", rate);
            }
            gst_tag_list_free (tags);
        }
    }

    for (gint i = 0; i < m_numText; i++) {
        GstTagList *tags = NULL;
        gchar *str = NULL;
        /* Retrieve the stream's subtitle tags */
        g_signal_emit_by_name (m_playbin, "get-text-tags", i, &tags);
        if (tags) {
            g_print ("subtitle stream %d:\n", i);
            if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
                g_print ("  language: %s\n", str);
                g_free (str);
            }
            gst_tag_list_free (tags);
        }
    }

    g_object_get (GST_OBJECT(m_playbin), "current-video", &m_curVideo, NULL);
    g_object_get (GST_OBJECT(m_playbin), "current-audio", &m_curAudio, NULL);
    g_object_get (GST_OBJECT(m_playbin), "current-text", &m_curText, NULL);
}
Пример #17
0
static void
pad_added_cb (GstElement * timeline, GstPad * pad, GESPipeline * self)
{
  OutputChain *chain;
  GESTrack *track;
  GstPad *sinkpad;
  GstCaps *caps;
  gboolean reconfigured = FALSE;

  caps = gst_pad_query_caps (pad, NULL);

  GST_DEBUG_OBJECT (self, "new pad %s:%s , caps:%" GST_PTR_FORMAT,
      GST_DEBUG_PAD_NAME (pad), caps);

  gst_caps_unref (caps);

  track = ges_timeline_get_track_for_pad (self->priv->timeline, pad);

  if (G_UNLIKELY (!track)) {
    GST_WARNING_OBJECT (self, "Couldn't find coresponding track !");
    return;
  }

  /* Don't connect track if it's not going to be used */
  if (track->type == GES_TRACK_TYPE_VIDEO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_VIDEO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Video track... but we don't need it. Not linking");
  }
  if (track->type == GES_TRACK_TYPE_AUDIO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_AUDIO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Audio track... but we don't need it. Not linking");
  }

  /* Get an existing chain or create it */
  if (!(chain = get_output_chain_for_track (self, track)))
    chain = new_output_chain_for_track (self, track);
  chain->srcpad = pad;

  /* Adding tee */
  chain->tee = gst_element_factory_make ("tee", NULL);
  gst_bin_add (GST_BIN_CAST (self), chain->tee);
  gst_element_sync_state_with_parent (chain->tee);

  /* Linking pad to tee */
  sinkpad = gst_element_get_static_pad (chain->tee, "sink");
  gst_pad_link_full (pad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
  gst_object_unref (sinkpad);

  /* Connect playsink */
  if (self->priv->mode & TIMELINE_MODE_PREVIEW) {
    const gchar *sinkpad_name;
    GstPad *tmppad;

    GST_DEBUG_OBJECT (self, "Connecting to playsink");

    switch (track->type) {
      case GES_TRACK_TYPE_VIDEO:
        sinkpad_name = "video_sink";
        break;
      case GES_TRACK_TYPE_AUDIO:
        sinkpad_name = "audio_sink";
        break;
      case GES_TRACK_TYPE_TEXT:
        sinkpad_name = "text_sink";
        break;
      default:
        GST_WARNING_OBJECT (self, "Can't handle tracks of type %d yet",
            track->type);
        goto error;
    }

    /* Request a sinkpad from playsink */
    if (G_UNLIKELY (!(sinkpad =
                gst_element_get_request_pad (self->priv->playsink,
                    sinkpad_name)))) {
      GST_ERROR_OBJECT (self, "Couldn't get a pad from the playsink !");
      goto error;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src_%u");
    if (G_UNLIKELY (gst_pad_link_full (tmppad, sinkpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_ERROR_OBJECT (self, "Couldn't link track pad to playsink");
      gst_object_unref (tmppad);
      goto error;
    }
    chain->blocked_pad = tmppad;
    GST_DEBUG_OBJECT (tmppad, "blocking pad");
    chain->probe_id = gst_pad_add_probe (tmppad,
        GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked, NULL, NULL);

    GST_DEBUG ("Reconfiguring playsink");

    /* reconfigure playsink */
    g_signal_emit_by_name (self->priv->playsink, "reconfigure", &reconfigured);
    GST_DEBUG ("'reconfigure' returned %d", reconfigured);

    /* We still hold a reference on the sinkpad */
    chain->playsinkpad = sinkpad;
  }

  /* Connect to encodebin */
  if (self->priv->mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER)) {
    GstPad *tmppad;
    GST_DEBUG_OBJECT (self, "Connecting to encodebin");

    if (!chain->encodebinpad) {
      /* Check for unused static pads */
      sinkpad = get_compatible_unlinked_pad (self->priv->encodebin, pad);

      if (sinkpad == NULL) {
        GstCaps *caps = gst_pad_query_caps (pad, NULL);

        /* If no compatible static pad is available, request a pad */
        g_signal_emit_by_name (self->priv->encodebin, "request-pad", caps,
            &sinkpad);
        gst_caps_unref (caps);

        if (G_UNLIKELY (sinkpad == NULL)) {
          GST_ERROR_OBJECT (self, "Couldn't get a pad from encodebin !");
          goto error;
        }
      }
      chain->encodebinpad = sinkpad;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src_%u");
    if (G_UNLIKELY (gst_pad_link_full (tmppad,
                chain->encodebinpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_WARNING_OBJECT (self, "Couldn't link track pad to playsink");
      goto error;
    }
    gst_object_unref (tmppad);

  }

  /* If chain wasn't already present, insert it in list */
  if (!get_output_chain_for_track (self, track))
    self->priv->chains = g_list_append (self->priv->chains, chain);

  GST_DEBUG ("done");
  return;

error:
  {
    if (chain->tee) {
      gst_bin_remove (GST_BIN_CAST (self), chain->tee);
    }
    if (sinkpad)
      gst_object_unref (sinkpad);
    g_free (chain);
  }
}
Пример #18
0
static gboolean scroll_handler(	GtkWidget *widget,
				GdkEvent *event,
				gpointer data )
{
	GdkEventScroll *scroll_event = (GdkEventScroll *)event;
	guint button = 0;
	gint count = 0;

	/* Only one axis will be used at a time to prevent accidental activation
	 * of commands bound to buttons associated with the other axis.
	 */
	if(ABS(scroll_event->delta_x) > ABS(scroll_event->delta_y))
	{
		count = (gint)ABS(scroll_event->delta_x);

		if(scroll_event->delta_x <= -1)
		{
			button = 6;
		}
		else if(scroll_event->delta_x >= 1)
		{
			button = 7;
		}
	}
	else
	{
		count = (gint)ABS(scroll_event->delta_y);

		if(scroll_event->delta_y <= -1)
		{
			button = 4;
		}
		else if(scroll_event->delta_y >= 1)
		{
			button = 5;
		}
	}

	if(button > 0)
	{
		GdkEventButton btn_event;

		btn_event.type = scroll_event->type;
		btn_event.window = scroll_event->window;
		btn_event.send_event = scroll_event->send_event;
		btn_event.time = scroll_event->time;
		btn_event.x = scroll_event->x;
		btn_event.y = scroll_event->y;
		btn_event.axes = NULL;
		btn_event.state = scroll_event->state;
		btn_event.button = button;
		btn_event.device = scroll_event->device;
		btn_event.x_root = scroll_event->x_root;
		btn_event.y_root = scroll_event->y_root;

		for(gint i = 0; i < count; i++)
		{
			/* Not used */
			gboolean rc;

			g_signal_emit_by_name
				(widget, "button-press-event", &btn_event, &rc);
		}
	}

	return TRUE;
}
Пример #19
0
/* Main Window Initialization */
void init_main_window(const gchar * glade_file)
{
	GladeXML *xml;
	GtkWidget *widget;
	GtkTextBuffer *txtbuf;
	char title[256];
	GtkStyle *style;

	xml = glade_xml_new(glade_file, "window1", NULL);
	if (!xml)
		g_error(_("GUI loading failed !\n"));
	glade_xml_signal_autoconnect(xml);

	main_wnd = glade_xml_get_widget(xml, "window1");
	hpaned = glade_xml_get_widget(xml, "hpaned1");
	vpaned = glade_xml_get_widget(xml, "vpaned1");
	tree1_w = glade_xml_get_widget(xml, "treeview1");
	tree2_w = glade_xml_get_widget(xml, "treeview2");
	text_w = glade_xml_get_widget(xml, "textview3");

	back_btn = glade_xml_get_widget(xml, "button1");
	gtk_widget_set_sensitive(back_btn, FALSE);

	widget = glade_xml_get_widget(xml, "show_name1");
	gtk_check_menu_item_set_active((GtkCheckMenuItem *) widget,
				       show_name);

	widget = glade_xml_get_widget(xml, "show_range1");
	gtk_check_menu_item_set_active((GtkCheckMenuItem *) widget,
				       show_range);

	widget = glade_xml_get_widget(xml, "show_data1");
	gtk_check_menu_item_set_active((GtkCheckMenuItem *) widget,
				       show_value);

	save_btn = glade_xml_get_widget(xml, "button3");
	save_menu_item = glade_xml_get_widget(xml, "save1");
	conf_set_changed_callback(conf_changed);

	style = gtk_widget_get_style(main_wnd);
	widget = glade_xml_get_widget(xml, "toolbar1");

#if 0	/* Use stock Gtk icons instead */
	replace_button_icon(xml, main_wnd->window, style,
			    "button1", (gchar **) xpm_back);
	replace_button_icon(xml, main_wnd->window, style,
			    "button2", (gchar **) xpm_load);
	replace_button_icon(xml, main_wnd->window, style,
			    "button3", (gchar **) xpm_save);
#endif
	replace_button_icon(xml, main_wnd->window, style,
			    "button4", (gchar **) xpm_single_view);
	replace_button_icon(xml, main_wnd->window, style,
			    "button5", (gchar **) xpm_split_view);
	replace_button_icon(xml, main_wnd->window, style,
			    "button6", (gchar **) xpm_tree_view);

#if 0
	switch (view_mode) {
	case SINGLE_VIEW:
		widget = glade_xml_get_widget(xml, "button4");
		g_signal_emit_by_name(widget, "clicked");
		break;
	case SPLIT_VIEW:
		widget = glade_xml_get_widget(xml, "button5");
		g_signal_emit_by_name(widget, "clicked");
		break;
	case FULL_VIEW:
		widget = glade_xml_get_widget(xml, "button6");
		g_signal_emit_by_name(widget, "clicked");
		break;
	}
#endif
	txtbuf = gtk_text_view_get_buffer(GTK_TEXT_VIEW(text_w));
	tag1 = gtk_text_buffer_create_tag(txtbuf, "mytag1",
					  "foreground", "red",
					  "weight", PANGO_WEIGHT_BOLD,
					  NULL);
	tag2 = gtk_text_buffer_create_tag(txtbuf, "mytag2",
					  /*"style", PANGO_STYLE_OBLIQUE, */
					  NULL);

	sprintf(title, _("BaThos v%s Configuration"),
		getenv("KERNELVERSION"));
	gtk_window_set_title(GTK_WINDOW(main_wnd), title);

	gtk_widget_show(main_wnd);
}
Пример #20
0
static gboolean mpv_event_handler(gpointer data)
{
	GmpvMpv *mpv = data;
	gboolean done = !mpv;

	while(!done)
	{
		mpv_event *event =	mpv->mpv_ctx?
					mpv_wait_event(mpv->mpv_ctx, 0):
					NULL;

		if(!event)
		{
			done = TRUE;
		}
		else if(event->event_id == MPV_EVENT_PROPERTY_CHANGE)
		{
			mpv_event_property *prop = event->data;

			mpv_prop_change_handler(mpv, prop);

			g_signal_emit_by_name(	mpv,
						"mpv-prop-change",
						prop->name );
		}
		else if(event->event_id == MPV_EVENT_IDLE)
		{
			if(mpv->state.loaded)
			{
				mpv->state.loaded = FALSE;

				gmpv_mpv_set_property_flag
					(mpv, "pause", TRUE);
				gmpv_playlist_reset(mpv->playlist);
			}

			mpv->state.init_load = FALSE;
		}
		else if(event->event_id == MPV_EVENT_FILE_LOADED)
		{
			mpv->state.loaded = TRUE;
			mpv->state.init_load = FALSE;

			update_playlist(mpv);
		}
		else if(event->event_id == MPV_EVENT_END_FILE)
		{
			mpv_event_end_file *ef_event = event->data;

			mpv->state.init_load = FALSE;

			if(mpv->state.loaded)
			{
				mpv->state.new_file = FALSE;
			}

			if(ef_event->reason == MPV_END_FILE_REASON_ERROR)
			{
				const gchar *err;
				gchar *msg;

				err = mpv_error_string(ef_event->error);
				msg = g_strdup_printf
					(	_("Playback was terminated "
						"abnormally. Reason: %s."),
						err );

				gmpv_mpv_set_property_flag
					(mpv, "pause", TRUE);
				g_signal_emit_by_name(mpv, "mpv-error", msg);

				g_free(msg);
			}
		}
		else if(event->event_id == MPV_EVENT_VIDEO_RECONFIG)
		{
			if(mpv->state.new_file)
			{
				gmpv_mpv_opt_handle_autofit(mpv);
			}
		}
		else if(event->event_id == MPV_EVENT_PLAYBACK_RESTART)
		{
			g_signal_emit_by_name(mpv, "mpv-playback-restart");
		}
		else if(event->event_id == MPV_EVENT_LOG_MESSAGE)
		{
			log_handler(mpv, event->data);
		}
		else if(event->event_id == MPV_EVENT_SHUTDOWN
		|| event->event_id == MPV_EVENT_NONE)
		{
			done = TRUE;
		}

		if(event)
		{
			if(mpv->event_callback)
			{
				mpv->event_callback
					(event, mpv->event_callback_data);
			}

			if(mpv->mpv_ctx)
			{
				g_signal_emit_by_name
					(mpv, "mpv-event", event->event_id);
			}
			else
			{
				done = TRUE;
			}
		}
	}

	return FALSE;
}
Пример #21
0
void
gnc_dense_cal_store_clear(GncDenseCalStore *model)
{
    model->num_real_marks = 0;
    g_signal_emit_by_name(model, "update", GUINT_TO_POINTER(1));
}
Пример #22
0
void gmpv_mpv_initialize(GmpvMpv *mpv)
{
	GSettings *main_settings = g_settings_new(CONFIG_ROOT);
	gchar *config_dir = get_config_dir_path();
	gchar *mpvopt = NULL;
	gchar *current_vo = NULL;
	gchar *mpv_version = NULL;

	const struct
	{
		const gchar *name;
		const gchar *value;
	}
	options[] = {	{"osd-level", "1"},
			{"softvol", "yes"},
			{"force-window", "yes"},
			{"input-default-bindings", "yes"},
			{"audio-client-name", ICON_NAME},
			{"title", "${media-title}"},
			{"autofit-larger", "75%"},
			{"window-scale", "1"},
			{"pause", "no"},
			{"ytdl", "yes"},
			{"osd-bar", "no"},
			{"input-cursor", "no"},
			{"cursor-autohide", "no"},
			{"softvol-max", "100"},
			{"config", "yes"},
			{"screenshot-template", "gnome-mpv-shot%n"},
			{"config-dir", config_dir},
			{NULL, NULL} };

	g_assert(mpv->mpv_ctx);

	for(gint i = 0; options[i].name; i++)
	{
		g_debug(	"Applying default option --%s=%s",
				options[i].name,
				options[i].value );

		mpv_set_option_string(	mpv->mpv_ctx,
					options[i].name,
					options[i].value );
	}

	if(g_settings_get_boolean(main_settings, "mpv-config-enable"))
	{
		gchar *mpv_conf
			= g_settings_get_string
				(main_settings, "mpv-config-file");

		g_info("Loading config file: %s", mpv_conf);
		mpv_load_config_file(mpv->mpv_ctx, mpv_conf);

		g_free(mpv_conf);
	}

	if(g_settings_get_boolean(main_settings, "mpv-input-config-enable"))
	{
		gchar *input_conf
			= g_settings_get_string
				(main_settings, "mpv-input-config-file");

		g_info("Loading input config file: %s", input_conf);
		load_input_conf(mpv, input_conf);

		g_free(input_conf);
	}
	else
	{
		load_input_conf(mpv, NULL);
	}

	mpvopt = g_settings_get_string(main_settings, "mpv-options");

	g_debug("Applying extra mpv options: %s", mpvopt);

	/* Apply extra options */
	if(apply_args(mpv->mpv_ctx, mpvopt) < 0)
	{
		const gchar *msg
			= _("Failed to apply one or more MPV options.");

		g_signal_emit_by_name(mpv, "mpv-error", msg);
	}

	if(mpv->force_opengl || mpv->wid <= 0)
	{
		g_info("Forcing --vo=opengl-cb");
		mpv_set_option_string(mpv->mpv_ctx, "vo", "opengl-cb");

	}
	else
	{
		g_debug(	"Attaching mpv window to wid %#x",
				(guint)mpv->wid );

		mpv_set_option(mpv->mpv_ctx, "wid", MPV_FORMAT_INT64, &mpv->wid);
	}

	mpv_observe_property(mpv->mpv_ctx, 0, "aid", MPV_FORMAT_INT64);
	mpv_observe_property(mpv->mpv_ctx, 0, "chapters", MPV_FORMAT_INT64);
	mpv_observe_property(mpv->mpv_ctx, 0, "core-idle", MPV_FORMAT_FLAG);
	mpv_observe_property(mpv->mpv_ctx, 0, "fullscreen", MPV_FORMAT_FLAG);
	mpv_observe_property(mpv->mpv_ctx, 0, "pause", MPV_FORMAT_FLAG);
	mpv_observe_property(mpv->mpv_ctx, 0, "length", MPV_FORMAT_DOUBLE);
	mpv_observe_property(mpv->mpv_ctx, 0, "media-title", MPV_FORMAT_STRING);
	mpv_observe_property(mpv->mpv_ctx, 0, "playlist-pos", MPV_FORMAT_INT64);
	mpv_observe_property(mpv->mpv_ctx, 0, "track-list", MPV_FORMAT_NODE);
	mpv_observe_property(mpv->mpv_ctx, 0, "volume", MPV_FORMAT_DOUBLE);
	mpv_set_wakeup_callback(mpv->mpv_ctx, wakeup_callback, mpv);
	mpv_check_error(mpv_initialize(mpv->mpv_ctx));


	mpv_version = gmpv_mpv_get_property_string(mpv, "mpv-version");
	current_vo = gmpv_mpv_get_property_string(mpv, "current-vo");

	g_info("Using %s", mpv_version);

	if(current_vo && !GDK_IS_X11_DISPLAY(gdk_display_get_default()))
	{
		g_info(	"The chosen vo is %s but the display is not X11; "
			"forcing --vo=opengl-cb and resetting",
			current_vo );

		mpv->force_opengl = TRUE;
		mpv->state.paused = FALSE;

		gmpv_mpv_reset(mpv);
	}
	else
	{
		GSettings *win_settings;
		gdouble volume;

		win_settings = g_settings_new(CONFIG_WIN_STATE);
		volume = g_settings_get_double(win_settings, "volume")*100;

		g_debug("Setting volume to %f", volume);
		mpv_set_property(	mpv->mpv_ctx,
					"volume",
					MPV_FORMAT_DOUBLE,
					&volume );

		/* The vo should be opengl-cb if current_vo is NULL*/
		if(!current_vo)
		{
			mpv->opengl_ctx =	mpv_get_sub_api
						(	mpv->mpv_ctx,
							MPV_SUB_API_OPENGL_CB );
		}

		gmpv_mpv_opt_handle_msg_level(mpv);
		gmpv_mpv_opt_handle_fs(mpv);
		gmpv_mpv_opt_handle_geometry(mpv);

		mpv->force_opengl = FALSE;
		mpv->state.ready = TRUE;
		g_signal_emit_by_name(mpv, "mpv-init");

		g_clear_object(&win_settings);
	}

	g_clear_object(&main_settings);
	g_free(config_dir);
	g_free(mpvopt);
	mpv_free(current_vo);
	mpv_free(mpv_version);
}
Пример #23
0
/*
 * get uri
 * create playbin
 * check that we have video
 * for each potential location
 *   check for cancel
 *   grab snapshot
 *   interesting?
 *   return if so, other spin loop
 *
 * for future, check metadata for artwork and don't reject non-video streams (see totem)
 *
 * TODO: all error paths leak
 */
static void
gst_thumbnailer_create (TumblerAbstractThumbnailer *thumbnailer,
                        GCancellable               *cancellable,
                        TumblerFileInfo            *info)
{
  /* These positions are taken from Totem */
  const double positions[] = {
    1.0 / 3.0,
    2.0 / 3.0,
    0.1,
    0.9,
    0.5
  };
  GstElement             *playbin;
  gint64                  duration;
  unsigned int            i;
  GstBuffer              *frame;
  GdkPixbuf              *shot;
  TumblerThumbnail       *thumbnail;
  TumblerThumbnailFlavor *flavour;
  TumblerImageData        data;
  GError                 *error = NULL;

  g_return_if_fail (IS_GST_THUMBNAILER (thumbnailer));
  g_return_if_fail (cancellable == NULL || G_IS_CANCELLABLE (cancellable));
  g_return_if_fail (TUMBLER_IS_FILE_INFO (info));

  /* Check for early cancellation */
  if (g_cancellable_is_cancelled (cancellable))
    return;

  playbin = make_pipeline (info, cancellable);
  if (playbin == NULL) {
    /* TODO: emit an error, but the specification won't let me. */
    return;
  }

  duration = get_duration (playbin);

  /* Now we have a pipeline that we know has video and is paused, ready for
     seeking */
  for (i = 0; i < G_N_ELEMENTS (positions); i++) {
    /* Check if we've been cancelled */
    if (g_cancellable_is_cancelled (cancellable)) {
      gst_element_set_state (playbin, GST_STATE_NULL);
      g_object_unref (playbin);
      return;
    }

    LOG ("trying position %f", positions[i]);

    gst_element_seek_simple (playbin,
                             GST_FORMAT_TIME,
                             GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
                             (gint64)(positions[i] * duration));

    if (gst_element_get_state (playbin, NULL, NULL, 1 * GST_SECOND) == GST_STATE_CHANGE_FAILURE) {
      LOG ("Could not seek");
      return;
    }

    g_object_get (playbin, "frame", &frame, NULL);

    if (frame == NULL) {
      LOG ("No frame found!");
      continue;
    }

    thumbnail = tumbler_file_info_get_thumbnail (info);
    flavour = tumbler_thumbnail_get_flavor (thumbnail);
    /* This frees the buffer for us */
    shot = convert_buffer_to_pixbuf (frame, cancellable, flavour);
    g_object_unref (flavour);

    /* If it's not interesting, throw it away and try again*/
    if (is_interesting (shot)) {
      /* Got an interesting image, break out */
      LOG ("Found an interesting image");
      break;
    }

    /*
     * If we've still got positions to try, free the current uninteresting
     * shot. Otherwise we'll make do with what we have.
     */
    if (i + 1 < G_N_ELEMENTS (positions) && shot) {
      g_object_unref (shot);
      shot = NULL;
    }

    /* Spin mainloop so we can pick up the cancels */
    while (g_main_context_pending (NULL)) {
      g_main_context_iteration (NULL, FALSE);
    }
  }

  gst_element_set_state (playbin, GST_STATE_NULL);
  g_object_unref (playbin);

  if (shot) {
    data.data = gdk_pixbuf_get_pixels (shot);
    data.has_alpha = gdk_pixbuf_get_has_alpha (shot);
    data.bits_per_sample = gdk_pixbuf_get_bits_per_sample (shot);
    data.width = gdk_pixbuf_get_width (shot);
    data.height = gdk_pixbuf_get_height (shot);
    data.rowstride = gdk_pixbuf_get_rowstride (shot);
    data.colorspace = (TumblerColorspace) gdk_pixbuf_get_colorspace (shot);

    tumbler_thumbnail_save_image_data (thumbnail, &data,
                                       tumbler_file_info_get_mtime (info),
                                       NULL, &error);

    g_object_unref (shot);

    if (error != NULL) {
      g_signal_emit_by_name (thumbnailer, "error",
                             tumbler_file_info_get_uri (info),
                             error->code, error->message);
      g_error_free (error);
    } else {
      g_signal_emit_by_name (thumbnailer, "ready",
                             tumbler_file_info_get_uri (info));
    }
  }
}
void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string * language)
{ 
	lt_info( "%s:%s\n", FILENAME, __FUNCTION__);

	if(m_gst_playbin)
	{
		gint i, n_audio = 0;
		//GstStructure * structure = NULL;
		
		// get audio
		g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
		printf("%s: %d audio\n", __FUNCTION__, n_audio);
		
		if(n_audio == 0)
			return;
		
		for (i = 0; i < n_audio; i++)
		{
			// apids
			apids[i]=i;
			
			GstPad * pad = 0;
			g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
			GstCaps * caps = gst_pad_get_negotiated_caps(pad);
			if (!caps)
				continue;
			
			GstStructure * structure = gst_caps_get_structure(caps, 0);
			//const gchar *g_type = gst_structure_get_name(structure);
		
			//if (!structure)
				//return atUnknown;
			//ac3flags[0] = 0;

			// ac3flags
			if ( gst_structure_has_name (structure, "audio/mpeg"))
			{
				gint mpegversion, layer = -1;
				
				if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
					//return atUnknown;
					ac3flags[i] = 0;

				switch (mpegversion) 
				{
					case 1:
						/*
						{
							gst_structure_get_int (structure, "layer", &layer);
							if ( layer == 3 )
								return atMP3;
							else
								return atMPEG;
								ac3flags[0] = 4;
							break;
						}
						*/
						ac3flags[i] = 4;
					case 2:
						//return atAAC;
						ac3flags[i] = 5;
					case 4:
						//return atAAC;
						ac3flags[i] = 5;
					default:
						//return atUnknown;
						ac3flags[i] = 0;
				}
			}
			else if ( gst_structure_has_name (structure, "audio/x-ac3") || gst_structure_has_name (structure, "audio/ac3") )
				//return atAC3;
				ac3flags[i] = 1;
			else if ( gst_structure_has_name (structure, "audio/x-dts") || gst_structure_has_name (structure, "audio/dts") )
				//return atDTS;
				ac3flags[i] = 6;
			else if ( gst_structure_has_name (structure, "audio/x-raw-int") )
				//return atPCM;
				ac3flags[i] = 0;
			
			gst_caps_unref(caps);
		}
		
		// numpids
		*numpida=i;
	}
}
Пример #25
0
static void
on_webpage_downloaded (SummerWebBackend *web, gchar *path, gchar *web_data,
	GError *error, gpointer data)
{
	g_return_if_fail (SUMMER_IS_DOWNLOAD_YOUTUBE (data));
	g_return_if_fail (web_data != NULL);
	SummerDownloadYoutube *self = SUMMER_DOWNLOAD_YOUTUBE (data);
	if (error != NULL) {
		GError *e = g_error_new (
			SUMMER_DOWNLOAD_ERROR,
			SUMMER_DOWNLOAD_ERROR_INPUT,
			"Could not download youtube webpage: %s", error->message);
		g_signal_emit_by_name (self, "download-error", e);
		g_clear_error (&error);
		g_object_unref (self);
		return;
	}
	GError *e = NULL;
	GRegex *t = g_regex_new (", \"t\": \"([^\"]+)\"", 0, 0, &e);
	if (e) {
		g_warning ("Error compiling regex: %s", e->message);
		g_clear_error (&e);
		g_object_unref (self);
		return;
	}
	GMatchInfo *info;
	g_regex_match (t, web_data, 0, &info);
	g_regex_unref (t);
	
	if (!g_match_info_matches (info)) {
		e = g_error_new (
			SUMMER_DOWNLOAD_ERROR,
			SUMMER_DOWNLOAD_ERROR_INPUT,
			"Not a youtube page");
		g_signal_emit_by_name (self, "download-error", e);
		g_object_unref (self);
		g_match_info_free (info);
		return;
	}
	self->priv->t = g_match_info_fetch (info, 1);
	self->priv->quality = NUM_FORMATS - 1;
	g_match_info_free (info);

	g_object_unref (web);
	gchar *save_dir;
	g_object_get (self, "tmp-dir", &save_dir, NULL);
	web = summer_web_backend_disk_new (
		create_youtube_url (self),
		save_dir);
	g_free (save_dir);

	SummerItemData *item;
	g_object_get (self, "item", &item, NULL);
	gchar *filename = g_strdup_printf ("%s.%s", 
		summer_item_data_get_title (item),
		quality[self->priv->quality][1]);
	g_object_set (self, "filename", filename, NULL);
	g_free (filename);
	g_object_unref (item);

	g_signal_connect (web, "download-chunk",
		G_CALLBACK (on_download_chunk), self);
	g_signal_connect (web, "download-complete", 
		G_CALLBACK (on_file_downloaded), self);
	g_signal_connect (web, "headers-parsed",
		G_CALLBACK (on_headers_parsed), self);

	summer_web_backend_fetch (web, &e);
	if (e != NULL) {
		g_signal_emit_by_name (self, "download-error", e);
		g_object_unref (self);
	}
}
Пример #26
0
void eServiceMP3::gstBusCall(GstMessage *msg)
{
	if (!msg)
		return;
	gchar *sourceName;
	GstObject *source;
	source = GST_MESSAGE_SRC(msg);
	if (!GST_IS_OBJECT(source))
		return;
	sourceName = gst_object_get_name(source);
#if 0
	gchar *string;
	if (gst_message_get_structure(msg))
		string = gst_structure_to_string(gst_message_get_structure(msg));
	else
		string = g_strdup(GST_MESSAGE_TYPE_NAME(msg));
	eDebug("eTsRemoteSource::gst_message from %s: %s", sourceName, string);
	g_free(string);
#endif
	switch (GST_MESSAGE_TYPE (msg))
	{
		case GST_MESSAGE_EOS:
			m_event((iPlayableService*)this, evEOF);
			break;
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
		
			if(old_state == new_state)
				break;
	
			eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
	
			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
	
			switch(transition)
			{
				case GST_STATE_CHANGE_NULL_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_PAUSED:
				{
					GstElement *subsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
					if (subsink)
					{
#ifdef GSTREAMER_SUBTITLE_SYNC_MODE_BUG
						/* 
						 * HACK: disable sync mode for now, gstreamer suffers from a bug causing sparse streams to loose sync, after pause/resume / skip
						 * see: https://bugzilla.gnome.org/show_bug.cgi?id=619434
						 * Sideeffect of using sync=false is that we receive subtitle buffers (far) ahead of their
						 * display time.
						 * Not too far ahead for subtitles contained in the media container.
						 * But for external srt files, we could receive all subtitles at once.
						 * And not just once, but after each pause/resume / skip.
						 * So as soon as gstreamer has been fixed to keep sync in sparse streams, sync needs to be re-enabled.
						 */
						g_object_set (G_OBJECT (subsink), "sync", FALSE, NULL);
#endif
#if 0
						/* we should not use ts-offset to sync with the decoder time, we have to do our own decoder timekeeping */
						g_object_set (G_OBJECT (subsink), "ts-offset", -2L * GST_SECOND, NULL);
						/* late buffers probably will not occur very often */
						g_object_set (G_OBJECT (subsink), "max-lateness", 0L, NULL);
						/* avoid prerolling (it might not be a good idea to preroll a sparse stream) */
						g_object_set (G_OBJECT (subsink), "async", TRUE, NULL);
#endif
						eDebug("eServiceMP3::subsink properties set!");
						gst_object_unref(subsink);
					}
					setAC3Delay(ac3_delay);
					setPCMDelay(pcm_delay);
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
					if ( m_sourceinfo.is_streaming && m_streamingsrc_timeout )
						m_streamingsrc_timeout->stop();
				}	break;
				case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
				{
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_NULL:
				{
				}	break;
			}
			break;
		}
		case GST_MESSAGE_ERROR:
		{
			gchar *debug;
			GError *err;
			gst_message_parse_error (msg, &err, &debug);
			g_free (debug);
			eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName );
			if ( err->domain == GST_STREAM_ERROR )
			{
				if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
				{
					if ( g_strrstr(sourceName, "videosink") )
						m_event((iPlayableService*)this, evUser+11);
					else if ( g_strrstr(sourceName, "audiosink") )
						m_event((iPlayableService*)this, evUser+10);
				}
			}
			g_error_free(err);
			break;
		}
		case GST_MESSAGE_INFO:
		{
			gchar *debug;
			GError *inf;
	
			gst_message_parse_info (msg, &inf, &debug);
			g_free (debug);
			if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
			{
				if ( g_strrstr(sourceName, "videosink") )
					m_event((iPlayableService*)this, evUser+14);
			}
			g_error_free(inf);
			break;
		}
		case GST_MESSAGE_TAG:
		{
			GstTagList *tags, *result;
			gst_message_parse_tag(msg, &tags);
	
			result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
			if (result)
			{
				if (m_stream_tags)
					gst_tag_list_free(m_stream_tags);
				m_stream_tags = result;
			}
	
			const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
			if ( gv_image )
			{
				GstBuffer *buf_image;
				buf_image = gst_value_get_buffer (gv_image);
				int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
				int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
				close(fd);
				eDebug("eServiceMP3::/tmp/.id3coverart %d bytes written ", ret);
				m_event((iPlayableService*)this, evUser+13);
			}
			gst_tag_list_free(tags);
			m_event((iPlayableService*)this, evUpdatedInfo);
			break;
		}
		case GST_MESSAGE_ASYNC_DONE:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstTagList *tags;
			gint i, active_idx, n_video = 0, n_audio = 0, n_text = 0;

			g_object_get (m_gst_playbin, "n-video", &n_video, NULL);
			g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
			g_object_get (m_gst_playbin, "n-text", &n_text, NULL);

			eDebug("eServiceMP3::async-done - %d video, %d audio, %d subtitle", n_video, n_audio, n_text);

			if ( n_video + n_audio <= 0 )
				stop();

			active_idx = 0;

			m_audioStreams.clear();
			m_subtitleStreams.clear();

			for (i = 0; i < n_audio; i++)
			{
				audioStream audio;
				gchar *g_codec, *g_lang;
				GstPad* pad = 0;
				g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
				GstCaps* caps = gst_pad_get_negotiated_caps(pad);
				if (!caps)
					continue;
				GstStructure* str = gst_caps_get_structure(caps, 0);
				const gchar *g_type = gst_structure_get_name(str);
				eDebug("AUDIO STRUCT=%s", g_type);
				audio.type = gstCheckAudioPad(str);
				g_codec = g_strdup(g_type);
				g_lang = g_strdup_printf ("und");
				g_signal_emit_by_name (m_gst_playbin, "get-audio-tags", i, &tags);
				if ( tags && gst_is_tag_list(tags) )
				{
					gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &g_codec);
					gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
					gst_tag_list_free(tags);
				}
				audio.language_code = std::string(g_lang);
				audio.codec = std::string(g_codec);
				eDebug("eServiceMP3::audio stream=%i codec=%s language=%s", i, g_codec, g_lang);
				m_audioStreams.push_back(audio);
				g_free (g_lang);
				g_free (g_codec);
				gst_caps_unref(caps);
			}

			for (i = 0; i < n_text; i++)
			{	
				gchar *g_codec = NULL, *g_lang = NULL;
				g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags);
				subtitleStream subs;

				g_lang = g_strdup_printf ("und");
				if ( tags && gst_is_tag_list(tags) )
				{
					gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
					gst_tag_list_get_string(tags, GST_TAG_SUBTITLE_CODEC, &g_codec);
					gst_tag_list_free(tags);
				}

				subs.language_code = std::string(g_lang);
				eDebug("eServiceMP3::subtitle stream=%i language=%s codec=%s", i, g_lang, g_codec);
				
				GstPad* pad = 0;
				g_signal_emit_by_name (m_gst_playbin, "get-text-pad", i, &pad);
				if ( pad )
					g_signal_connect (G_OBJECT (pad), "notify::caps", G_CALLBACK (gstTextpadHasCAPS), this);
				subs.type = getSubtitleType(pad, g_codec);

				m_subtitleStreams.push_back(subs);
				g_free (g_lang);
			}
			m_event((iPlayableService*)this, evUpdatedInfo);

			if ( m_errorInfo.missing_codec != "" )
			{
				if ( m_errorInfo.missing_codec.find("video/") == 0 || ( m_errorInfo.missing_codec.find("audio/") == 0 && getNumberOfTracks() == 0 ) )
					m_event((iPlayableService*)this, evUser+12);
			}
			break;
		}
		case GST_MESSAGE_ELEMENT:
		{
			if (const GstStructure *msgstruct = gst_message_get_structure(msg))
			{
				if ( gst_is_missing_plugin_message(msg) )
				{
					GstCaps *caps;
					gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL); 
					std::string codec = (const char*) gst_caps_to_string(caps);
					gchar *description = gst_missing_plugin_message_get_description(msg);
					if ( description )
					{
						eDebug("eServiceMP3::m_errorInfo.missing_codec = %s", codec.c_str());
						m_errorInfo.error_message = "GStreamer plugin " + (std::string)description + " not available!\n";
						m_errorInfo.missing_codec = codec.substr(0,(codec.find_first_of(',')));
						g_free(description);
					}
					gst_caps_unref(caps);
				}
				else
				{
					const gchar *eventname = gst_structure_get_name(msgstruct);
					if ( eventname )
					{
						if (!strcmp(eventname, "eventSizeChanged") || !strcmp(eventname, "eventSizeAvail"))
						{
							gst_structure_get_int (msgstruct, "aspect_ratio", &m_aspect);
							gst_structure_get_int (msgstruct, "width", &m_width);
							gst_structure_get_int (msgstruct, "height", &m_height);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoSizeChanged);
						}
						else if (!strcmp(eventname, "eventFrameRateChanged") || !strcmp(eventname, "eventFrameRateAvail"))
						{
							gst_structure_get_int (msgstruct, "frame_rate", &m_framerate);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoFramerateChanged);
						}
						else if (!strcmp(eventname, "eventProgressiveChanged") || !strcmp(eventname, "eventProgressiveAvail"))
						{
							gst_structure_get_int (msgstruct, "progressive", &m_progressive);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoProgressiveChanged);
						}
					}
				}
			}
			break;
		}
		case GST_MESSAGE_BUFFERING:
		{
			GstBufferingMode mode;
			gst_message_parse_buffering(msg, &(m_bufferInfo.bufferPercent));
			gst_message_parse_buffering_stats(msg, &mode, &(m_bufferInfo.avgInRate), &(m_bufferInfo.avgOutRate), &(m_bufferInfo.bufferingLeft));
			m_event((iPlayableService*)this, evBuffering);
			break;
		}
		case GST_MESSAGE_STREAM_STATUS:
		{
			GstStreamStatusType type;
			GstElement *owner;
			gst_message_parse_stream_status (msg, &type, &owner);
			if ( type == GST_STREAM_STATUS_TYPE_CREATE && m_sourceinfo.is_streaming )
			{
				if ( GST_IS_PAD(source) )
					owner = gst_pad_get_parent_element(GST_PAD(source));
				else if ( GST_IS_ELEMENT(source) )
					owner = GST_ELEMENT(source);
				else
					owner = 0;
				if ( owner )
				{
					GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner));
					const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
					if (!strcmp(name, "souphttpsrc"))
					{
						m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true);
						g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL);
						eDebug("eServiceMP3::GST_STREAM_STATUS_TYPE_CREATE -> setting timeout on %s to %is", name, HTTP_TIMEOUT);
					}
					
				}
				if ( GST_IS_PAD(source) )
					gst_object_unref(owner);
			}
			break;
		}
		default:
			break;
	}
	g_free (sourceName);
}
void scim_bridge_client_imcontext_commit (ScimBridgeClientIMContext *imcontext)
{
    g_signal_emit_by_name ((ScimBridgeClientIMContext*) imcontext, "commit", imcontext->commit_string);
}
Пример #28
0
/*
 * Runs the RTP pipeline.
 * @param p Pointer to the RTP pipeline.
 */
static void
rtp_pipeline_run (rtp_pipeline * p)
{
  GstFlowReturn flow_ret;
  GMainLoop *mainloop = NULL;
  GstBus *bus;
  gint i, j;

  /* Check parameters. */
  if (p == NULL) {
    return;
  }

  /* Create mainloop. */
  mainloop = g_main_loop_new (NULL, FALSE);
  if (!mainloop) {
    return;
  }

  /* Add bus callback. */
  bus = gst_pipeline_get_bus (GST_PIPELINE (p->pipeline));

  gst_bus_add_watch (bus, rtp_bus_callback, (gpointer) mainloop);
  gst_object_unref (bus);

  /* Set pipeline to PLAYING. */
  gst_element_set_state (p->pipeline, GST_STATE_PLAYING);

  /* Push custom event into the pipeline */
  if (p->custom_event) {
    GstPad *srcpad;

    /* Install a probe to drop the event after it being serialized */
    srcpad = gst_element_get_static_pad (p->rtppay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        pay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);

    /* Install a probe to trace the deserialized event after depayloading */
    srcpad = gst_element_get_static_pad (p->rtpdepay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        depay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);
    /* Send the event */
    gst_element_send_event (p->appsrc, gst_event_ref (p->custom_event));
  }

  /* Push data into the pipeline */
  for (i = 0; i < LOOP_COUNT; i++) {
    const guint8 *data = p->frame_data;

    for (j = 0; j < p->frame_count; j++) {
      GstBuffer *buf;

      buf =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          (guint8 *) data, p->frame_data_size, 0, p->frame_data_size, NULL,
          NULL);

      g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
      fail_unless_equals_int (flow_ret, GST_FLOW_OK);
      data += p->frame_data_size;

      gst_buffer_unref (buf);
    }
  }

  g_signal_emit_by_name (p->appsrc, "end-of-stream", &flow_ret);

  /* Run mainloop. */
  g_main_loop_run (mainloop);

  /* Set pipeline to NULL. */
  gst_element_set_state (p->pipeline, GST_STATE_NULL);

  /* Release mainloop. */
  g_main_loop_unref (mainloop);

  fail_if (p->custom_event);
}
Пример #29
0
void FrameLoaderClient::dispatchDidHandleOnloadEvents()
{
    g_signal_emit_by_name(getViewFromFrame(m_frame), "onload-event", m_frame);
}
static void
thunar_vfs_volume_manager_hal_device_added (LibHalContext *context,
                                            const gchar   *udi)
{
  ThunarVfsVolumeManagerHal *manager_hal = libhal_ctx_get_user_data (context);
  ThunarVfsVolumeHal        *volume_hal;
  LibHalVolume              *hv;
  LibHalDrive               *hd;
  const gchar               *drive_udi;

  _thunar_vfs_return_if_fail (THUNAR_VFS_IS_VOLUME_MANAGER_HAL (manager_hal));
  _thunar_vfs_return_if_fail (manager_hal->context == context);

  /* check if we have a volume here */
  hv = libhal_volume_from_udi (context, udi);

  /* HAL might want us to ignore this volume for some reason */
  if (G_UNLIKELY (hv != NULL && libhal_volume_should_ignore (hv)))
    {
      libhal_volume_free (hv);
      return;
    }

  /* emit the "device-added" signal (to support thunar-volman) */
  g_signal_emit_by_name (G_OBJECT (manager_hal), "device-added", udi);

  if (G_LIKELY (hv != NULL))
    {
      /* check if we have a mountable file system here */
      if (libhal_volume_get_fsusage (hv) == LIBHAL_VOLUME_USAGE_MOUNTABLE_FILESYSTEM)
        {
          /* determine the UDI of the drive to which this volume belongs */
          drive_udi = libhal_volume_get_storage_device_udi (hv);
          if (G_LIKELY (drive_udi != NULL))
            {
              /* determine the drive for the volume */
              hd = libhal_drive_from_udi (context, drive_udi);
              if (G_LIKELY (hd != NULL))
                {
                  /* check if we already have a volume object for the UDI */
                  volume_hal = thunar_vfs_volume_manager_hal_get_volume_by_udi (manager_hal, udi);
                  if (G_LIKELY (volume_hal == NULL))
                    {
                      /* otherwise, we allocate a new volume object */
                      volume_hal = g_object_new (THUNAR_VFS_TYPE_VOLUME_HAL, NULL);
                      volume_hal->udi = g_strdup (udi);
                    }

                  /* update the volume object with the new data from the HAL volume/drive */
                  thunar_vfs_volume_hal_update (volume_hal, context, hv, hd);

                  /* add the volume object to our list if we allocated a new one */
                  if (g_list_find (THUNAR_VFS_VOLUME_MANAGER (manager_hal)->volumes, volume_hal) == NULL)
                    {
                      /* add the volume to the volume manager */
                      thunar_vfs_volume_manager_add (THUNAR_VFS_VOLUME_MANAGER (manager_hal), THUNAR_VFS_VOLUME (volume_hal));

                      /* release the reference on the volume */
                      g_object_unref (G_OBJECT (volume_hal));
                    }

                  /* release the HAL drive */
                  libhal_drive_free (hd);
                }
            }
        }

      /* release the HAL volume */
      libhal_volume_free (hv);
    }
  else
    {
      /* but maybe we have a floppy disk drive here */
      hd = libhal_drive_from_udi (context, udi);
      if (G_UNLIKELY (hd == NULL))
        return;

      /* check if we have a floppy disk drive */
      if (G_LIKELY (libhal_drive_get_type (hd) == LIBHAL_DRIVE_TYPE_FLOPPY))
        {
          /* check if we already have a volume object for the UDI */
          volume_hal = thunar_vfs_volume_manager_hal_get_volume_by_udi (manager_hal, udi);
          if (G_LIKELY (volume_hal == NULL))
            {
              /* otherwise, we allocate a new volume object */
              volume_hal = g_object_new (THUNAR_VFS_TYPE_VOLUME_HAL, NULL);
              volume_hal->udi = g_strdup (udi);
            }

          /* update the volume object with the new data from the HAL volume/drive */
          thunar_vfs_volume_hal_update (volume_hal, context, NULL, hd);

          /* add the volume object to our list if we allocated a new one */
          if (g_list_find (THUNAR_VFS_VOLUME_MANAGER (manager_hal)->volumes, volume_hal) == NULL)
            {
              /* add the volume to the volume manager */
              thunar_vfs_volume_manager_add (THUNAR_VFS_VOLUME_MANAGER (manager_hal), THUNAR_VFS_VOLUME (volume_hal));

              /* release the reference on the volume */
              g_object_unref (G_OBJECT (volume_hal));
            }
        }

      /* release the HAL drive */
      libhal_drive_free (hd);
    }
}