Beispiel #1
0
void init_camera(int argc, char* argv[])
{
    gst_init (&argc, &argv);
    recognition_info.timer = g_timer_new();
}
Beispiel #2
0
int main(int argc, char **argv)
{

#ifdef MEMLEAK_CHECK
	atexit(DumpUnfreed);
#endif

#ifdef OBJECT_DEBUG
	atexit(object_dump);
#endif

	gst_init(&argc, &argv);

	// set pythonpath if unset
	setenv("PYTHONPATH", eEnv::resolve("${libdir}/enigma2/python").c_str(), 0);
	printf("PYTHONPATH: %s\n", getenv("PYTHONPATH"));
	printf("DVB_API_VERSION %d DVB_API_VERSION_MINOR %d\n", DVB_API_VERSION, DVB_API_VERSION_MINOR);

	bsodLogInit();

	ePython python;
	eMain main;

#if 1
	ePtr<gMainDC> my_dc;
	gMainDC::getInstance(my_dc);

	//int double_buffer = my_dc->haveDoubleBuffering();

	ePtr<gLCDDC> my_lcd_dc;
	gLCDDC::getInstance(my_lcd_dc);


	/* ok, this is currently hardcoded for arabic. */
	/* some characters are wrong in the regular font, force them to use the replacement font */
	for (int i = 0x60c; i <= 0x66d; ++i)
		eTextPara::forceReplacementGlyph(i);
	eTextPara::forceReplacementGlyph(0xfdf2);
	for (int i = 0xfe80; i < 0xff00; ++i)
		eTextPara::forceReplacementGlyph(i);

	eWidgetDesktop dsk(my_dc->size());
	eWidgetDesktop dsk_lcd(my_lcd_dc->size());

	dsk.setStyleID(0);
	dsk_lcd.setStyleID(my_lcd_dc->size().width() == 96 ? 2 : 1);

/*	if (double_buffer)
	{
		eDebug("[MAIN] - double buffering found, enable buffered graphics mode.");
		dsk.setCompositionMode(eWidgetDesktop::cmBuffered);
	} */

	wdsk = &dsk;
	lcddsk = &dsk_lcd;

	dsk.setDC(my_dc);
	dsk_lcd.setDC(my_lcd_dc);

	dsk.setBackgroundColor(gRGB(0,0,0,0xFF));
#endif

		/* redrawing is done in an idle-timer, so we have to set the context */
	dsk.setRedrawTask(main);
	dsk_lcd.setRedrawTask(main);

	eDebug("[MAIN] Loading spinners...");

	{
		int i;
#define MAX_SPINNER 64
		ePtr<gPixmap> wait[MAX_SPINNER];
		for (i=0; i<MAX_SPINNER; ++i)
		{
			char filename[64];
			std::string rfilename;
			snprintf(filename, sizeof(filename), "${datadir}/enigma2/skin_default/spinner/wait%d.png", i + 1);
			rfilename = eEnv::resolve(filename);
			loadPNG(wait[i], rfilename.c_str());

			if (!wait[i])
			{
				if (!i)
					eDebug("[MAIN] failed to load %s: %m", rfilename.c_str());
				else
					eDebug("[MAIN] found %d spinner!\n", i);
				break;
			}
		}
		if (i)
			my_dc->setSpinner(eRect(ePoint(100, 100), wait[0]->size()), wait, i);
		else
			my_dc->setSpinner(eRect(100, 100, 0, 0), wait, 1);
	}

	gRC::getInstance()->setSpinnerDC(my_dc);

	eRCInput::getInstance()->keyEvent.connect(slot(keyEvent));

	printf("[MAIN] executing main\n");

	bsodCatchSignals();
	catchTermSignal();

	setIoPrio(IOPRIO_CLASS_BE, 3);

	/* start at full size */
	eVideoWidget::setFullsize(true);

	// python.execute("mytest", "__main__");
	python.execFile(eEnv::resolve("${libdir}/enigma2/python/mytest.py").c_str());

	/* restore both decoders to full size */
	eVideoWidget::setFullsize(true);

	if (exit_code == 5) /* python crash */
	{
		eDebug("[MAIN] (exit code 5)");
		bsodFatal(0);
	}

	dsk.paint();
	dsk_lcd.paint();

	{
		gPainter p(my_lcd_dc);
		p.resetClip(eRect(ePoint(0, 0), my_lcd_dc->size()));
		p.clear();
		p.flush();
	}

	return exit_code;
}
Beispiel #3
0
int main (int argc, char **argv)
{
  SDL_SysWMinfo info;
  Display *gtkglext_display = NULL;
  Window gtkglext_window = 0;
  GLXContext gtkglext_gl_context = NULL;
  
  GstPipeline *pipeline = NULL;
  GstBus *bus = NULL;
  GstElement *glfilter = NULL;
  GstElement *fakesink = NULL;
  GstState state;
  GAsyncQueue *queue_input_buf = NULL;
  GAsyncQueue *queue_output_buf = NULL;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);
  gtk_gl_init(&argc, &argv);
    
  gint major; 
  gint minor;
  gdk_gl_query_version(&major, &minor);
  g_print("\nOpenGL extension version - %d.%d\n", major, minor);
  /* Try double-buffered visual */

  GdkGLConfig* glconfig;
  // the line above does not work in C++ if the cast is not there.
  glconfig = gdk_gl_config_new_by_mode(static_cast<GdkGLConfigMode>(GDK_GL_MODE_RGB | GDK_GL_MODE_DOUBLE));
  if (glconfig == NULL)
  {
      g_print("*** Cannot find the double-buffered visual.\n");
      g_print("*** Trying single-buffered visual.\n");
      /* Try single-buffered visual */
      glconfig = gdk_gl_config_new_by_mode(static_cast<GdkGLConfigMode>(GDK_GL_MODE_RGB));
      if (glconfig == NULL)
      {
          g_print ("*** No appropriate OpenGL-capable visual found.\n");
          exit(1);
      }
  }
  examine_gl_config_attrib(glconfig);

  // Main GTK window
  GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
  gtk_widget_set_size_request(window, 640, 480);
  gtk_window_set_title(GTK_WINDOW (window), "Toonloop 1.3 experimental");
  GdkGeometry geometry;
  geometry.min_width = 1;
  geometry.min_height = 1;
  geometry.max_width = -1;
  geometry.max_height = -1;
  gtk_window_set_geometry_hints(GTK_WINDOW(window), window, &geometry, GDK_HINT_MIN_SIZE);
  g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(on_delete_event), NULL);

  //area where the video is drawn
  GtkWidget* drawing_area = gtk_drawing_area_new();
  gtk_container_add(GTK_CONTAINER(window), drawing_area);

  /* Set OpenGL-capability to the widget. */
  gtk_widget_set_gl_capability(drawing_area, glconfig, NULL, TRUE, GDK_GL_RGBA_TYPE);

  /* Loop, drawing and checking events */
  g_signal_connect_after(G_OBJECT(drawing_area), "realize", G_CALLBACK(on_realize), NULL);
  g_signal_connect(G_OBJECT(drawing_area), "configure_event", G_CALLBACK(on_configure_event), NULL);
  g_signal_connect(G_OBJECT(drawing_area), "expose_event", G_CALLBACK(on_expose_event), NULL);

  gtk_widget_show_all(window);

 // ------------------ done with the GTK GUI


  /* retrieve and turn off gtkglext opengl context */
  SDL_VERSION (&info.version);
  SDL_GetWMInfo (&info);
  gtkglext_display = info.info.x11.display;
  gtkglext_window = info.info.x11.window;
  gtkglext_gl_context = glXGetCurrentContext ();
  glXMakeCurrent (gtkglext_display, None, 0);

  pipeline =
      GST_PIPELINE (gst_parse_launch
      ("videotestsrc ! video/x-raw-yuv, width=320, height=240, framerate=(fraction)30/1 ! "
          "glupload ! gleffects effect=5 ! fakesink sync=1", NULL));

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), NULL);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), NULL);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), NULL);
  gst_object_unref (bus);

  /* gtkglext_gl_context is an external OpenGL context with which gst-plugins-gl want to share textures */
  glfilter = gst_bin_get_by_name (GST_BIN (pipeline), "gleffects0");
  g_object_set (G_OBJECT (glfilter), "external-opengl-context",
      gtkglext_gl_context, NULL);
  g_object_unref (glfilter);

  /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and
   * shared with the gtkglext one */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
  state = GST_STATE_PAUSED;
  if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL,
          GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) {
    g_debug ("failed to pause pipeline\n");
    return -1;
  }

  /* turn on back gtk opengl context */
  glXMakeCurrent (gtkglext_display, gtkglext_window, gtkglext_gl_context);

  /* append a gst-gl texture to this queue when you do not need it no more */
  fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0");
  g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL);
  g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL);
  queue_input_buf = g_async_queue_new ();
  queue_output_buf = g_async_queue_new ();
  g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf);
  g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf);
  g_object_unref (fakesink);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  
  gtk_main();

  /* before to deinitialize the gst-gl-opengl context,
   * no shared context (here the gtkglext one) must be current
   */
  glXMakeCurrent (gtkglext_display, gtkglext_window, gtkglext_gl_context);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  g_object_unref (pipeline);

  /* turn on back gtkglext opengl context */
  glXMakeCurrent (gtkglext_display, None, 0);

  /* make sure there is no pending gst gl buffer in the communication queues 
   * between gtkglext and gst-gl
   */
  while (g_async_queue_length (queue_input_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf);
    gst_buffer_unref (buf);
  }

  while (g_async_queue_length (queue_output_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf);
    gst_buffer_unref (buf);
  }

  return 0;
}
Beispiel #4
0
gint
main (gint argc, gchar * argv[])
{

  gst_init (&argc, &argv);

  GST_DEBUG_CATEGORY_INIT (cat_default, "GST_Check_default", 0,
      "default category for this test");
  GST_DEBUG_CATEGORY_INIT (cat2, "GST_Check_2", 0,
      "second category for this test");
#ifndef GST_DISABLE_GST_DEBUG
  g_assert (gst_debug_remove_log_function (gst_debug_log_default) == 1);
#endif
  gst_debug_add_log_function (check_message, NULL);

  count = 0;
  GST_ERROR ("This is an error.");
  ++count;
  GST_WARNING ("This is a warning.");
  ++count;
  GST_INFO ("This is an info message.");
  ++count;
  GST_DEBUG ("This is a debug message.");
  ++count;
  GST_LOG ("This is a log message.");
  ++count;
  GST_CAT_ERROR (cat2, "This is an error with category.");
  ++count;
  GST_CAT_WARNING (cat2, "This is a warning with category.");
  ++count;
  GST_CAT_INFO (cat2, "This is an info message with category.");
  ++count;
  GST_CAT_DEBUG (cat2, "This is a debug message with category.");
  ++count;
  GST_CAT_LOG (cat2, "This is a log message with category.");
  count = -1;
  pipeline = gst_element_factory_make ("pipeline", "testelement");
  count = 10;
  GST_ERROR_OBJECT (pipeline, "This is an error with object.");
  ++count;
  GST_WARNING_OBJECT (pipeline, "This is a warning with object.");
  ++count;
  GST_INFO_OBJECT (pipeline, "This is an info message with object.");
  ++count;
  GST_DEBUG_OBJECT (pipeline, "This is a debug message with object.");
  ++count;
  GST_LOG_OBJECT (pipeline, "This is a log message with object.");
  ++count;
  GST_CAT_ERROR_OBJECT (cat2, pipeline,
      "This is an error with category and object.");
  ++count;
  GST_CAT_WARNING_OBJECT (cat2, pipeline,
      "This is a warning with category and object.");
  ++count;
  GST_CAT_INFO_OBJECT (cat2, pipeline,
      "This is an info message with category and object.");
  ++count;
  GST_CAT_DEBUG_OBJECT (cat2, pipeline,
      "This is a debug message with category and object.");
  ++count;
  GST_CAT_LOG_OBJECT (cat2, pipeline,
      "This is a log message with category and object.");
  count = -1;

#ifndef GST_DISABLE_GST_DEBUG
  g_assert (gst_debug_remove_log_function (check_message) == 1);
#endif

  return 0;
}
Beispiel #5
0
int
main (int argc, char *argv[])
{
  gboolean print_all = FALSE;
  gboolean do_print_blacklist = FALSE;
  gboolean plugin_name = FALSE;
  gboolean print_aii = FALSE;
  gboolean uri_handlers = FALSE;
  gboolean check_exists = FALSE;
  gchar *min_version = NULL;
  guint minver_maj = GST_VERSION_MAJOR;
  guint minver_min = GST_VERSION_MINOR;
  guint minver_micro = 0;
#ifndef GST_DISABLE_OPTION_PARSING
  GOptionEntry options[] = {
    {"print-all", 'a', 0, G_OPTION_ARG_NONE, &print_all,
        N_("Print all elements"), NULL},
    {"print-blacklist", 'b', 0, G_OPTION_ARG_NONE, &do_print_blacklist,
        N_("Print list of blacklisted files"), NULL},
    {"print-plugin-auto-install-info", '\0', 0, G_OPTION_ARG_NONE, &print_aii,
        N_("Print a machine-parsable list of features the specified plugin "
              "or all plugins provide.\n                                       "
              "Useful in connection with external automatic plugin "
              "installation mechanisms"), NULL},
    {"plugin", '\0', 0, G_OPTION_ARG_NONE, &plugin_name,
        N_("List the plugin contents"), NULL},
    {"exists", '\0', 0, G_OPTION_ARG_NONE, &check_exists,
        N_("Check if the specified element or plugin exists"), NULL},
    {"atleast-version", '\0', 0, G_OPTION_ARG_STRING, &min_version,
        N_
          ("When checking if an element or plugin exists, also check that its "
              "version is at least the version specified"), NULL},
    {"uri-handlers", 'u', 0, G_OPTION_ARG_NONE, &uri_handlers,
          N_
          ("Print supported URI schemes, with the elements that implement them"),
        NULL},
    GST_TOOLS_GOPTION_VERSION,
    {NULL}
  };
  GOptionContext *ctx;
  GError *err = NULL;
#endif

  setlocale (LC_ALL, "");

#ifdef ENABLE_NLS
  bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
  bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
  textdomain (GETTEXT_PACKAGE);
#endif

  /* avoid glib warnings when inspecting deprecated properties */
  g_setenv ("G_ENABLE_DIAGNOSTIC", "0", FALSE);

  g_set_prgname ("gst-inspect-" GST_API_VERSION);

#ifndef GST_DISABLE_OPTION_PARSING
  ctx = g_option_context_new ("[ELEMENT-NAME | PLUGIN-NAME]");
  g_option_context_add_main_entries (ctx, options, GETTEXT_PACKAGE);
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_printerr ("Error initializing: %s\n", err->message);
    g_clear_error (&err);
    g_option_context_free (ctx);
    return -1;
  }
  g_option_context_free (ctx);
#else
  gst_init (&argc, &argv);
#endif

  gst_tools_print_version ();

  if (print_all && argc > 1) {
    g_printerr ("-a requires no extra arguments\n");
    return -1;
  }

  if (uri_handlers && argc > 1) {
    g_printerr ("-u requires no extra arguments\n");
    return -1;
  }

  /* --atleast-version implies --exists */
  if (min_version != NULL) {
    if (sscanf (min_version, "%u.%u.%u", &minver_maj, &minver_min,
            &minver_micro) < 2) {
      g_printerr ("Can't parse version '%s' passed to --atleast-version\n",
          min_version);
      return -1;
    }
    check_exists = TRUE;
  }

  if (check_exists) {
    int exit_code;

    if (argc == 1) {
      g_printerr ("--exists requires an extra command line argument\n");
      exit_code = -1;
    } else {
      if (!plugin_name) {
        GstPluginFeature *feature;

        feature = gst_registry_lookup_feature (gst_registry_get (), argv[1]);
        if (feature != NULL && gst_plugin_feature_check_version (feature,
                minver_maj, minver_min, minver_micro)) {
          exit_code = 0;
        } else {
          exit_code = 1;
        }
      } else {
        /* FIXME: support checking for plugins too */
        g_printerr ("Checking for plugins is not supported yet\n");
        exit_code = -1;
      }
    }
    return exit_code;
  }

  /* if no arguments, print out list of elements */
  if (uri_handlers) {
    print_all_uri_handlers ();
  } else if (argc == 1 || print_all) {
    if (do_print_blacklist)
      print_blacklist ();
    else {
      if (print_aii)
        print_all_plugin_automatic_install_info ();
      else
        print_element_list (print_all);
    }
  } else {
    /* else we try to get a factory */
    GstElementFactory *factory;
    GstPlugin *plugin;
    const char *arg = argv[argc - 1];
    int retval;

    if (!plugin_name) {
      factory = gst_element_factory_find (arg);

      /* if there's a factory, print out the info */
      if (factory) {
        retval = print_element_info (factory, print_all);
        gst_object_unref (factory);
      } else {
        retval = print_element_features (arg);
      }
    } else {
      retval = -1;
    }

    /* otherwise check if it's a plugin */
    if (retval) {
      plugin = gst_registry_find_plugin (gst_registry_get (), arg);

      /* if there is such a plugin, print out info */
      if (plugin) {
        if (print_aii) {
          print_plugin_automatic_install_info (plugin);
        } else {
          print_plugin_info (plugin);
          print_plugin_features (plugin);
        }
      } else {
        GError *error = NULL;

        if (g_file_test (arg, G_FILE_TEST_EXISTS)) {
          plugin = gst_plugin_load_file (arg, &error);

          if (plugin) {
            if (print_aii) {
              print_plugin_automatic_install_info (plugin);
            } else {
              print_plugin_info (plugin);
              print_plugin_features (plugin);
            }
          } else {
            g_printerr (_("Could not load plugin file: %s\n"), error->message);
            g_clear_error (&error);
            return -1;
          }
        } else {
          g_printerr (_("No such element or plugin '%s'\n"), arg);
          return -1;
        }
      }
    }
  }

  return 0;
}
int main(int argc,char *argv[])
{
    GMainLoop *loop;
    GstElement *pipeline,*source,*decoder,*sink;//定义组件
    GstBus *bus;

    gst_init(&argc,&argv);
    loop = g_main_loop_new(NULL,FALSE);//创建主循环,在执行 g_main_loop_run后正式开始循环

	if(signal(SIGUSR1, sig_add_volume) == SIG_ERR)
	{
		g_print("gst_play error: signal SIGUSR1 FAILED!\n");
		return -2;
	}
	if(signal(SIGUSR2, sig_dec_volume) == SIG_ERR)
	{
		g_print("gst_play error: signal SIGUSR2 FAILED!\n");
		return -2;
	}
	


    if(argc != 2)
    {
        g_printerr("Usage:%s <mp3 filename>\n",argv[0]);
        return -1;
    }
    //创建管道和组件
    pipeline = gst_pipeline_new("audio-player");
    source = gst_element_factory_make("filesrc","file-source");
    decoder = gst_element_factory_make("mad","mad-decoder");
    sink = gst_element_factory_make("autoaudiosink","audio-output");

    if(!pipeline||!source||!decoder||!sink){
        g_printerr("One element could not be created.Exiting.\n");
        return -1;
    }
    //设置 source的location 参数。即 文件地址.
    g_object_set(G_OBJECT(source),"location",argv[1],NULL);
    //得到 管道的消息总线
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
   //添加消息监视器
    gst_bus_add_watch(bus,bus_call,loop);
    gst_object_unref(bus);
    //把组件添加到管道中.管道是一个特殊的组件,可以更好的让数据流动
    gst_bin_add_many(GST_BIN(pipeline),source,decoder,sink,NULL);
   //依次连接组件
   gst_element_link_many(source,decoder,sink,NULL);
   //开始播放
    gst_element_set_state(pipeline,GST_STATE_PLAYING);
	volume = gst_bin_get_by_name(GST_BIN(pipeline), "volume");
	if(NULL == volume)
	{
		g_print("No volume\n");
	}
    g_print("Running\n");
    //开始循环
    g_main_loop_run(loop);
    g_print("Returned,stopping playback\n");
    gst_element_set_state(pipeline,GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(pipeline));
    return 0;
}
Beispiel #7
0
int
main (int argc, char **argv)
{
  GstElement *pipe, *crop;
  gint left, right;
  gint top, bottom;
  gint ldir, rdir;
  gint tdir, bdir;
  gint round, type, stop;

  gst_init (&argc, &argv);

  type = 0;
  stop = -1;

  if (argc > 1) {
    type = atoi (argv[1]);
    stop = type + 1;
  }

  while (TRUE) {
    GstMessage *message;

    pipe = make_pipeline (type);
    if (pipe == NULL)
      break;

    crop = gst_bin_get_by_name (GST_BIN (pipe), "crop");
    g_assert (crop);

    top = bottom = left = right = 0;
    tdir = bdir = 10;
    ldir = rdir = 10;

    for (round = 0; round < MAX_ROUND; round++) {
      g_print ("crop to %4d %4d %4d %4d (%d/%d)   \r", top, bottom, left, right,
          round, MAX_ROUND);

      g_object_set (crop, "top", top, "bottom", bottom, "left", left, "right",
          right, NULL);

      if (round == 0)
        gst_element_set_state (pipe, GST_STATE_PLAYING);

      top += tdir;
      if (top >= 80)
        tdir = -10;
      else if (top < 10)
        tdir = 10;

      bottom += bdir;
      if (bottom >= 60)
        bdir = -10;
      else if (bottom < 10)
        bdir = 10;

      left += ldir;
      if (left >= 100)
        ldir = -10;
      else if (left < 10)
        ldir = 10;

      right += rdir;
      if (right >= 80)
        rdir = -10;
      else if (right < 10)
        rdir = 10;

      message =
          gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_ERROR,
          50 * GST_MSECOND);
      if (message) {
        g_print ("got error           \n");

        gst_message_unref (message);
      }
    }
    g_print ("test %d done                    \n", type);

    gst_object_unref (crop);
    gst_element_set_state (pipe, GST_STATE_NULL);
    gst_object_unref (pipe);

    type++;
    if (type == stop)
      break;
  }
  return 0;
}
int
main (int argc, char **argv)
{
  GMainLoop *loop;
  gint i;

  GstElement *audiotestsrc;
  GstElement *audioconvert1, *audioconvert2;
  GstElement *pitch;
  GstElement *sink;
  GstElement *pipeline;
  GstControlSource *cs;
  GstTimedValueControlSource *tvcs;

  if (argc != 2) {
    g_printerr ("Usage: %s <audiosink>\n", argv[0]);
    return 1;
  }

  /* initialize GStreamer */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);

  pipeline = gst_pipeline_new ("audio-player");
  audiotestsrc = gst_element_factory_make ("audiotestsrc", "audiotestsrc");
  g_assert (audiotestsrc != NULL);
  audioconvert1 = gst_element_factory_make ("audioconvert", "audioconvert1");
  g_assert (audioconvert1 != NULL);
  audioconvert2 = gst_element_factory_make ("audioconvert", "audioconvert2");
  g_assert (audioconvert2 != NULL);
  pitch = gst_element_factory_make ("pitch", "pitch");
  g_assert (pitch != NULL);
  sink = gst_element_factory_make (argv[1], "sink");
  g_assert (sink != NULL);

  gst_bin_add_many (GST_BIN (pipeline),
      audiotestsrc, audioconvert1, pitch, audioconvert2, sink, NULL);
  gst_element_link_many (audiotestsrc, audioconvert1, pitch, audioconvert2,
      sink, NULL);

  /* set up a controller */
  cs = gst_interpolation_control_source_new ();
  g_object_set (cs, "mode", GST_INTERPOLATION_MODE_LINEAR, NULL);

  gst_object_add_control_binding (pitch,
      gst_direct_control_binding_new (pitch, "pitch", cs));
  tvcs = (GstTimedValueControlSource *) cs;

  for (i = 0; i < 100; ++i) {
    if (i % 2)
      gst_timed_value_control_source_set (tvcs, i * GST_SECOND, 0.5);
    else
      gst_timed_value_control_source_set (tvcs, i * GST_SECOND, 1.5);
  }

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_print ("Running\n");
  g_main_loop_run (loop);

  /* clean up nicely */
  gst_object_unref (cs);
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}
gint
main (gint argc, gchar * argv[])
{
  GThread *threads[MAX_THREADS];
  gint num_threads;
  gint t;
  GstBuffer *tmp;
  GstClockTime start, end;

  gst_init (&argc, &argv);
  g_mutex_init (&mutex);

  if (argc != 3) {
    g_print ("usage: %s <num_threads> <nbbuffers>\n", argv[0]);
    exit (-1);
  }

  num_threads = atoi (argv[1]);
  nbbuffers = atoi (argv[2]);

  if (num_threads <= 0 || num_threads > MAX_THREADS) {
    g_print ("number of threads must be between 0 and %d\n", MAX_THREADS);
    exit (-2);
  }

  if (nbbuffers <= 0) {
    g_print ("number of buffers must be greater than 0\n");
    exit (-3);
  }

  g_mutex_lock (&mutex);
  /* Let's just make sure the GstBufferClass is loaded ... */
  tmp = gst_buffer_new ();

  printf ("main(): Creating %d threads.\n", num_threads);
  for (t = 0; t < num_threads; t++) {
    GError *error = NULL;

    threads[t] = g_thread_try_new ("bufferstresstest", run_test,
        GINT_TO_POINTER (t), &error);

    if (error) {
      printf ("ERROR: g_thread_try_new() %s\n", error->message);
      g_clear_error (&error);
      exit (-1);
    }
  }

  /* Signal all threads to start */
  start = gst_util_get_timestamp ();
  g_mutex_unlock (&mutex);

  for (t = 0; t < num_threads; t++) {
    if (threads[t])
      g_thread_join (threads[t]);
  }

  end = gst_util_get_timestamp ();
  g_print ("*** total %" GST_TIME_FORMAT " - average %" GST_TIME_FORMAT
      "  - Done creating %" G_GUINT64_FORMAT " buffers\n",
      GST_TIME_ARGS (end - start),
      GST_TIME_ARGS ((end - start) / (num_threads * nbbuffers)),
      num_threads * nbbuffers);


  gst_buffer_unref (tmp);

  return 0;
}
Beispiel #10
0
int
main (int argc, char *argv[])
{
#if GST_GL_HAVE_WINDOW_X11
  XInitThreads ();
#endif
  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  GstElement* pipeline = gst_pipeline_new ("pipeline");

  //window that contains an area where the video is drawn
  GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
  gtk_window_set_default_size (GTK_WINDOW (window), 640, 480);
  gtk_window_move (GTK_WINDOW (window), 300, 10);
  gtk_window_set_title (GTK_WINDOW (window), "gtkgstwidget");

  //window to control the states
  GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
  gtk_window_move (GTK_WINDOW (window_control), 10, 10);
  GtkWidget* grid = gtk_grid_new ();
  gtk_container_add (GTK_CONTAINER (window_control), grid);

  //control state null
  GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
  g_signal_connect (G_OBJECT (button_state_null), "clicked",
      G_CALLBACK (button_state_null_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_null, 0, 1, 1, 1);
  gtk_widget_show (button_state_null);

  //control state ready
  GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
  g_signal_connect (G_OBJECT (button_state_ready), "clicked",
      G_CALLBACK (button_state_ready_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_ready, 0, 2, 1, 1);
  gtk_widget_show (button_state_ready);

  //control state paused
  GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
  g_signal_connect (G_OBJECT (button_state_paused), "clicked",
      G_CALLBACK (button_state_paused_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_paused, 0, 3, 1, 1);
  gtk_widget_show (button_state_paused);

  //control state playing
  GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
  g_signal_connect (G_OBJECT (button_state_playing), "clicked",
      G_CALLBACK (button_state_playing_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_playing, 0, 4, 1, 1);
  gtk_widget_show (button_state_playing);

  gtk_widget_show (grid);
  gtk_widget_show (window_control);

  //area where the video is drawn
  g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);

  //configure the pipeline
  GstElement* videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc");
  GstElement* effect = gst_element_factory_make ("glfiltercube", "cube");
  GstElement* videosink = gst_element_factory_make ("gtkglsink", "gtkglsink");

  GtkWidget *area;
  g_object_get (videosink, "widget", &area, NULL);
  gtk_container_add (GTK_CONTAINER (window), area);

  gtk_widget_realize(area);

  GstCaps *caps = gst_caps_new_simple("video/x-raw",
                                      "width", G_TYPE_INT, 640,
                                      "height", G_TYPE_INT, 480,
                                      "format", G_TYPE_STRING, "BGRA",
                                      NULL);

  gst_bin_add_many (GST_BIN (pipeline), videosrc, effect, videosink, NULL);

  gboolean link_ok = gst_element_link (effect, videosink) ;
  if(!link_ok)
  {
      g_warning("Failed to link glfiltercube to gtkglsink!\n") ;
      return -1;
  }
  if (!gst_element_link_filtered (videosrc, effect, caps)) {
      g_warning("Failed to link viideotestsrc to glfiltercube!\n") ;
      return -1;
  }
  gst_caps_unref (caps);

  //set window id on this event
  GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
  g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
  g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);
  gst_object_unref (bus);

  //start
  GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE)
  {
      g_print ("Failed to start up pipeline!\n");
      return -1;
  }

  gtk_widget_show_all (window);

  gtk_main();

  gst_deinit ();

  return 0;
}
Beispiel #11
0
int main(int argc, char **argv)
{
#ifdef MEMLEAK_CHECK
	atexit(DumpUnfreed);
#endif

#ifdef OBJECT_DEBUG
	atexit(object_dump);
#endif

#ifdef HAVE_GSTREAMER
	gst_init(&argc, &argv);
#endif

	// set pythonpath if unset
	setenv("PYTHONPATH", LIBDIR "/enigma2/python", 0);
	printf("PYTHONPATH: %s\n", getenv("PYTHONPATH"));
	
	bsodLogInit();

	ePython python;
	eMain main;

#if 1
#ifdef WITH_SDL
	ePtr<gSDLDC> my_dc;
	gSDLDC::getInstance(my_dc);
#else
	ePtr<gFBDC> my_dc;
	gFBDC::getInstance(my_dc);
	
	int double_buffer = my_dc->haveDoubleBuffering();
#endif

	ePtr<gLCDDC> my_lcd_dc;
	gLCDDC::getInstance(my_lcd_dc);


		/* ok, this is currently hardcoded for arabic. */
			/* some characters are wrong in the regular font, force them to use the replacement font */
	for (int i = 0x60c; i <= 0x66d; ++i)
		eTextPara::forceReplacementGlyph(i);
	eTextPara::forceReplacementGlyph(0xfdf2);
	for (int i = 0xfe80; i < 0xff00; ++i)
		eTextPara::forceReplacementGlyph(i);

	eWidgetDesktop dsk(eSize(720, 576));
	eWidgetDesktop dsk_lcd(eSize(132, 64));
	
	dsk.setStyleID(0);
	dsk_lcd.setStyleID(1);
	
/*	if (double_buffer)
	{
		eDebug(" - double buffering found, enable buffered graphics mode.");
		dsk.setCompositionMode(eWidgetDesktop::cmBuffered);
	} */
	
	wdsk = &dsk;
	lcddsk = &dsk_lcd;

	dsk.setDC(my_dc);
	dsk_lcd.setDC(my_lcd_dc);

	ePtr<gPixmap> m_pm;
	loadPNG(m_pm, DATADIR "/enigma2/skin_default/pal.png");
	if (!m_pm)
	{
		eFatal("pal.png not found!");
	} else
		dsk.setPalette(*m_pm);

	dsk.setBackgroundColor(gRGB(0,0,0,0xFF));
#endif

		/* redrawing is done in an idle-timer, so we have to set the context */
	dsk.setRedrawTask(main);
	dsk_lcd.setRedrawTask(main);
	
	
	eDebug("Loading spinners...");
	
	{
		int i;
#define MAX_SPINNER 64
		ePtr<gPixmap> wait[MAX_SPINNER];
		for (i=0; i<MAX_SPINNER; ++i)
		{
			char filename[strlen(DATADIR) + 41];
			sprintf(filename, DATADIR "/enigma2/skin_default/spinner/wait%d.png", i + 1);
			loadPNG(wait[i], filename);
			
			if (!wait[i])
			{
				if (!i)
					eDebug("failed to load %s! (%m)", filename);
				else
					eDebug("found %d spinner!\n", i);
				break;
			}
		}
		if (i)
			my_dc->setSpinner(eRect(ePoint(100, 100), wait[0]->size()), wait, i);
		else
			my_dc->setSpinner(eRect(100, 100, 0, 0), wait, 1);
	}
	
	gRC::getInstance()->setSpinnerDC(my_dc);

	eRCInput::getInstance()->keyEvent.connect(slot(keyEvent));

#if defined(__sh__) 
	evfd * vfd = new evfd; 
	vfd->init(); 
	delete vfd; 
#endif 
	
	printf("executing main\n");
	
	bsodCatchSignals();

	setIoPrio(IOPRIO_CLASS_BE, 3);

//	python.execute("mytest", "__main__");
	python.execFile("/usr/lib/enigma2/python/mytest.py");

	if (exit_code == 5) /* python crash */
	{
		eDebug("(exit code 5)");
		bsodFatal(0);
	}
	
	dsk.paint();
	dsk_lcd.paint();

	{
		gPainter p(my_lcd_dc);
		p.resetClip(eRect(0, 0, 132, 64));
		p.clear();
	}

	return exit_code;
}
/**
 * main:
 **/
int
main (int argc, gchar **argv)
{
    GOptionContext *context;
    guint i;
    guint len;
    gboolean ret;
    gchar **codecs = NULL;
    gint xid = 0;
    const gchar *suffix;
    gchar *resource;
    _cleanup_error_free_ GError *error = NULL;
    _cleanup_free_ gchar *desktop_id = NULL;
    _cleanup_free_ gchar *interaction = NULL;
    _cleanup_free_ gchar *startup_id = NULL;
    _cleanup_ptrarray_unref_ GPtrArray *array = NULL;
    _cleanup_strv_free_ gchar **resources = NULL;

    const GOptionEntry options[] = {
        { "transient-for", '\0', 0, G_OPTION_ARG_INT, &xid, "The XID of the parent window", NULL },
        { "desktop-id", '\0', 0, G_OPTION_ARG_STRING, &desktop_id, "The desktop ID of the calling application", NULL },
        { "interaction", '\0', 0, G_OPTION_ARG_STRING, &interaction, "Interaction mode specifying which UI elements should be shown", NULL },
        { "startup-notification-id", '\0', 0, G_OPTION_ARG_STRING, &startup_id, "The startup notification ID for focus stealing prevention", NULL },
        { G_OPTION_REMAINING, '\0', 0, G_OPTION_ARG_FILENAME_ARRAY, &codecs, "GStreamer install infos", NULL },
        { NULL }
    };

#if (GLIB_MAJOR_VERSION == 2 && GLIB_MINOR_VERSION < 35)
    g_type_init ();
#endif

    gst_init (&argc, &argv);

    context = g_option_context_new ("Install missing codecs");
    g_option_context_add_main_entries (context, options, NULL);

    if (!g_option_context_parse (context, &argc, &argv, &error)) {
        g_print ("%s\nRun '%s --help' to see a full list of available command line options.\n",
                 error->message, argv[0]);
        return GST_INSTALL_PLUGINS_ERROR;
    }
    if (codecs == NULL) {
        g_print ("Missing codecs information\n");
        g_print ("Run 'with --help' to see a full list of available command line options.\n");
        return GST_INSTALL_PLUGINS_ERROR;
    }

    /* this is our parent window */
    g_message ("PackageKit: xid = %i", xid);
    g_message ("PackageKit: desktop_id = %s", desktop_id);

    /* use a ()(64bit) suffix for 64 bit */
    suffix = pk_gst_get_arch_suffix ();

    array = g_ptr_array_new_with_free_func (g_free);
    len = g_strv_length (codecs);

    /* process argv */
    for (i = 0; i < len; i++) {
        PkGstCodecInfo *info;
        gchar *type;
        const gchar *gstreamer_version;

        info = pk_gst_parse_codec (codecs[i]);
        if (info == NULL) {
            g_message ("skipping %s", codecs[i]);
            continue;
        }

        /* gstreamer1 is the provide name used for the
         * first version of the new release */
        if (g_strcmp0 (info->gstreamer_version, "1.0") == 0)
            gstreamer_version = "1";
        else
            gstreamer_version = info->gstreamer_version;

        g_message ("PackageKit: Codec nice name: %s", info->codec_name);
        if (info->structure != NULL) {
            _cleanup_free_ gchar *s = NULL;
            s = pk_gst_structure_to_provide (info->structure);
            type = g_strdup_printf ("gstreamer%s(%s-%s)%s%s",
                                    gstreamer_version,
                                    info->type_name,
                                    gst_structure_get_name (info->structure),
                                    s, suffix);
            g_message ("PackageKit: structure: %s", type);
        } else {
            type = g_strdup_printf ("gstreamer%s(%s)%s",
                                    gstreamer_version,
                                    info->type_name,
                                    suffix);
            g_message ("PackageKit: non-structure: %s", type);
        }

        /* "encode" */
        resource = g_strdup_printf ("%s|%s", info->codec_name, type);
        g_ptr_array_add (array, resource);

        /* free codec structure */
        pk_gst_codec_free (info);
    }

    /* nothing parsed */
    if (array->len == 0) {
        g_message ("no codec lines could be parsed");
        return GST_INSTALL_PLUGINS_ERROR;
    }

    /* convert to a GStrv */
    resources = pk_ptr_array_to_strv (array);

    /* first try the new interface */
    ret = pk_gst_dbus_install_resources (resources, desktop_id, startup_id, interaction, &error);
    if (g_error_matches (error, G_DBUS_ERROR, G_DBUS_ERROR_UNKNOWN_METHOD)) {
        /* ... and if that fails, fall back to the compat interface */
        g_clear_error (&error);
        g_message ("PackageKit: falling back to compat dbus interface");
        ret = pk_gst_dbus_install_resources_compat (resources, xid, &error);
    }
    if (!ret) {
        /* use the error string to return a good GStreamer exit code */
        g_message ("PackageKit: Did not install codec: %s", error->message);
        if (g_strrstr (error->message, "did not agree to search") != NULL)
            return GST_INSTALL_PLUGINS_USER_ABORT;
        if (g_strrstr (error->message, "not all codecs were installed") != NULL)
            return GST_INSTALL_PLUGINS_PARTIAL_SUCCESS;
        return GST_INSTALL_PLUGINS_NOT_FOUND;
    }
    return GST_INSTALL_PLUGINS_SUCCESS;
}
int main(int argc, char *argv[])
{
	GstElement *pipeline;
  	GstElement *src, *colorspace, *codec, *wrapper, *netsink;
  	gboolean status;
  	GstCaps *capsRaw;
  	gchar *params;
  	FILE *lf;
	int width, height;
	/* Initialize GStreamer */
	gst_init (&argc, &argv);
	loop = g_main_loop_new(NULL, FALSE);

	/* Create pipeline */
	pipeline = gst_pipeline_new("truba");
	//src = gst_element_factory_make("videotestsrc", "src");
	src = gst_element_factory_make("appsrc", "src");
	colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspaceconverter");
	codec = gst_element_factory_make(CODEC, "codec");
	wrapper = gst_element_factory_make("rtph264pay", "wrapper");
	netsink = gst_element_factory_make("udpsink", "netsink");
	
	if (NULL == getenv("VIT_WIDTH")) {
		width = IMWIDTH;
	}
	else {
		width = atoi(getenv("VIT_WIDTH"));
	}

	if (NULL == getenv("VIT_HEIGHT")) {
		height = IMHEIGHT;
	}
	else {
		height = atoi(getenv("VIT_HEIGHT"));
	}

	/* Set up pipeline */
	capsRaw = gst_caps_new_simple(	"video/x-raw-gray",
					"bpp", G_TYPE_INT, 8,
					"depth", G_TYPE_INT, 8,
					"width", G_TYPE_INT, width,
					"height", G_TYPE_INT, height,
					"framerate", GST_TYPE_FRACTION, 25, 1,
					NULL);
	g_signal_connect(src, "need-data", G_CALLBACK(cb_need_data), NULL);
	g_object_set(G_OBJECT(src), "caps", capsRaw, NULL);
	g_object_set(G_OBJECT(src), "stream-type", 0, "format",
		         GST_FORMAT_TIME, NULL);
	if (NULL == getenv("VIT_HOST"))
		g_object_set(G_OBJECT(netsink), "host", HOST, NULL);
	else {	
		g_object_set(G_OBJECT(netsink), "host", getenv("VIT_HOST"), NULL);
		printf("Connected to host %s\n", getenv("VIT_HOST"));
	}
	g_object_set(G_OBJECT(netsink), "port", PORT, NULL);

	gst_bin_add_many(GST_BIN(pipeline), src, colorspace, codec, wrapper,
		             netsink, NULL);
	status = gst_element_link_many(src, colorspace, codec, wrapper, netsink,
		                           NULL);

	if(!status) {
		printf("Linking elements failed!\n");
	}
	else {
		printf("Linking elements succeed!\n");
	}

	params = NULL;

	/* Create lock file */
	lf = fopen(LOCKFILE, "w");
	fprintf(lf, "%d", getpid());
	fclose(lf);

	/* Setup signal handler */
	if (SIG_ERR == signal(SIGUSR1, _t_sigusr1))
	{
		printf("Failed to spoof signal handler\n");
	}

	//Run
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	g_main_loop_run (loop);
	gst_element_set_state(pipeline, GST_STATE_NULL);

	//Deinit
	gst_object_unref(GST_OBJECT(pipeline));
	g_main_loop_unref(loop);
	return 0;
}
F::F ()
{
  gst_init (NULL, NULL);
  GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, GST_DEFAULT_NAME, 0,
                           GST_DEFAULT_NAME);
}
Beispiel #15
0
int main (int argc, char **argv)
{
	GstElement *bin, *filesrc, *decoder, *aconvin, *cmplxin, *tee;
	GstElement *cmplxfft, *teefft;
	GstElement *waterfall, *imagesink;
	GstElement *fshift, *filter, *teecor;
	GstElement *afc, *fakesink;
	GstElement *queue1, *queue2, *queue3, *queue4, *queue5, *queue6;
	GstElement *cmplxout, *aconvout, *audiosink;
	GstElement *polar, *bpskrcdem, *nrzikiss, *kisssink;
	GstCaps *caps;
	struct afc_context afc_context;

	gst_init (&argc, &argv);

	if (argc != 2) {
		g_print("No input file!\n");
		g_print("usage: %s <input file>\n", argv[0]);
		g_print("\n");
		g_print("Decoded KISS data will be send to stdout.\n");
		g_print("Debugging information will be send to stderr.\n");
		g_print("\n");
		exit (-1);
	}

	bin = gst_pipeline_new ("pipeline");
	g_assert (bin);

	filesrc = gst_element_factory_make("filesrc", "disk_source");
	g_assert(filesrc);
	g_object_set(G_OBJECT (filesrc), "location", argv[1], NULL);
	decoder = gst_element_factory_make ("wavparse", "decode");
	g_assert(decoder);
	aconvin = gst_element_factory_make("audioconvert", "aconvin");
	g_assert(aconvin);
 	cmplxin = gst_element_factory_make("iqcmplx", "cmplxin");
	g_assert(cmplxin);
	tee = gst_element_factory_make("tee", "teein");
	g_assert(tee);

	cmplxfft = gst_element_factory_make("cmplxfft", "cmplxfft");
	g_assert(cmplxfft);
	teefft = gst_element_factory_make("tee", "teefft");
	g_assert(teefft);
	queue1 = gst_element_factory_make("queue", "queue1");
	g_assert(queue1);
	queue2 = gst_element_factory_make("queue", "queue2");
	g_assert(queue2);
	queue3 = gst_element_factory_make("queue", "queue3");
	g_assert(queue3);
	queue4 = gst_element_factory_make("queue", "queue4");
	g_assert(queue4);
	queue5 = gst_element_factory_make("queue", "queue5");
	g_assert(queue5);
	queue6 = gst_element_factory_make("queue", "queue6");
	g_assert(queue6);

	waterfall = gst_element_factory_make("waterfall", "waterfall");
	g_assert(waterfall);
	imagesink = gst_element_factory_make("xvimagesink", "imagesink");
	g_assert(imagesink);

	afc = gst_element_factory_make("afc", "afc");
	g_assert(afc);
	fakesink = gst_element_factory_make("fakesink", "fakesink");
	g_assert(fakesink);
	
	fshift = gst_element_factory_make("iqfshift", "fshift");
	g_assert(fshift);
	filter = gst_element_factory_make("firblock", "filter");
	g_assert(filter);
	g_object_set(G_OBJECT(filter), "frequency", 1300, NULL);
	g_object_set(G_OBJECT(filter), "depth", 3, NULL);
	cmplxout = gst_element_factory_make("iqcmplx", "cmplxout");
	g_assert(cmplxout);
	teecor = gst_element_factory_make("tee", "teecor");
	g_assert(teecor);
	polar = gst_element_factory_make("iqpolar", "polar");
	g_assert(polar);
	bpskrcdem = gst_element_factory_make("bpskrcdem", "bpskrcdem");
	g_assert(bpskrcdem);
	nrzikiss = gst_element_factory_make("nrzikiss", "nrzikiss");
	g_assert(nrzikiss);
	kisssink = gst_element_factory_make("fakesink", "kisssink");
	g_assert(kisssink);
 
	aconvout = gst_element_factory_make("audioconvert", "aconvout");
	g_assert(aconvout);
	audiosink = gst_element_factory_make ("osssink", "play_audio");
	g_assert (audiosink);

	gst_bin_add_many (GST_BIN (bin), filesrc, decoder, aconvin, cmplxin,
	    tee, queue1, queue2, queue4,
	    cmplxfft, teefft,
	    queue3, waterfall, imagesink,
	    afc, fakesink,
	    fshift, filter, teecor,
	    queue5, polar, bpskrcdem, nrzikiss, kisssink,
	    queue6, cmplxout, aconvout, audiosink,
	    NULL);

	gst_element_link(filesrc, decoder);
	g_signal_connect(decoder, "pad-added", G_CALLBACK(new_pad), aconvin);
	gst_element_link_many(aconvin, cmplxin, tee, NULL);
	gst_element_link_many(tee, queue1, fshift, filter, teecor, NULL);
	gst_element_link_many(teecor, queue5, polar, bpskrcdem, NULL);
	caps = gst_caps_new_simple("application/x-raw-float",
	    "rate", G_TYPE_INT, 1200, NULL);
	gst_element_link_filtered(bpskrcdem, nrzikiss, caps);
	gst_caps_unref(caps);
	gst_element_link_many(nrzikiss, kisssink, NULL);
	gst_element_link_many(teecor, queue6, cmplxout, aconvout, audiosink, NULL);
	gst_element_link_many(tee, queue2, cmplxfft, teefft, NULL);
	gst_element_link_many(teefft, queue3, waterfall, imagesink, NULL);
	gst_element_link_many(teefft, queue4, afc, fakesink, NULL);

	afc_context.fshift = fshift;
	afc_context.waterfall = waterfall;
	afc_context.cnt = 0;
	gst_pad_add_buffer_probe(gst_element_get_pad(afc, "src"),
	    G_CALLBACK(new_afc_value), &afc_context);
	gst_pad_add_buffer_probe(gst_element_get_pad(kisssink, "sink"),
	    G_CALLBACK(new_kiss_data), NULL);

	gst_element_set_state (bin, GST_STATE_PLAYING);
	event_loop (bin);
	gst_element_set_state (bin, GST_STATE_NULL);
	fprintf(stderr, "Done\n");

	exit (0);
}
int main(int argc, char *argv[]) {
		CustomData data;
		GstBus *bus;
		GstMessage *msg;
		GstStateChangeReturn ret;
		gboolean terminate = FALSE;

		/* Initialize GStreamer */
		gst_init (&argc, &argv);

		/* Create the elements */
		data.source = gst_element_factory_make ("uridecodebin", "source");
		data.convert_audio = gst_element_factory_make ("audioconvert", "convert_audio");
		data.convert_video = gst_element_factory_make ("ffmpegcolorspace", "convert_video");
		data.sink_audio = gst_element_factory_make ("autoaudiosink", "sink_audio");
		data.sink_video = gst_element_factory_make ("autovideosink", "sink_video");

		/* Create the empty pipeline */
		data.pipeline = gst_pipeline_new ("test-pipeline");

		if (!data.pipeline || !data.source || !data.convert_video || !data.convert_audio || !data.sink_audio || !data.sink_video) {
				g_printerr ("Not all elements could be created.\n");
				return -1;
		}

		/* Build the pipeline. Note that we are NOT linking the source at this
		 * point. We will do it later. */
		gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert_audio, data.convert_video,  data.sink_video, data.sink_audio, NULL);
		if (!gst_element_link (data.convert_video, data.sink_video)) {
				g_printerr ("Elements could not be linked video.\n");
				gst_object_unref (data.pipeline);
				return -1;
		}
		if (!gst_element_link (data.convert_audio, data.sink_audio)) {
				g_printerr ("Elements could not be linked audio.\n");
				gst_object_unref (data.pipeline);
				return -1;
		}

		/* Set the URI to play */
		g_object_set (data.source, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);

		/* Connect to the pad-added signal */
		g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);

		/* Start playing */
		ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
		if (ret == GST_STATE_CHANGE_FAILURE) {
				g_printerr ("Unable to set the pipeline to the playing state.\n");
				gst_object_unref (data.pipeline);
				return -1;
		}

		/* Listen to the bus */
		bus = gst_element_get_bus (data.pipeline);
		do {
				msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
								GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

				/* Parse message */
				if (msg != NULL) {
						GError *err;
						gchar *debug_info;

						switch (GST_MESSAGE_TYPE (msg)) {
								case GST_MESSAGE_ERROR:
										gst_message_parse_error (msg, &err, &debug_info);
										g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
										g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
										g_clear_error (&err);
										g_free (debug_info);
										terminate = TRUE;
										break;
								case GST_MESSAGE_EOS:
										g_print ("End-Of-Stream reached.\n");
										terminate = TRUE;
										break;
								case GST_MESSAGE_STATE_CHANGED:
										/* We are only interested in state-changed messages from the pipeline */
										if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
												GstState old_state, new_state, pending_state;
												gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
												g_print ("Pipeline state changed from %s to %s:\n",
																gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
										}
										break;
								default:
										/* We should not reach here */
										g_printerr ("Unexpected message received.\n");
										break;
						}
						gst_message_unref (msg);
				}
		} while (!terminate);

		/* Free resources */
		gst_object_unref (bus);
		gst_element_set_state (data.pipeline, GST_STATE_NULL);
		gst_object_unref (data.pipeline);
		return 0;
}
Beispiel #17
0
void transcode_init(void) {
    gst_init(NULL, NULL);
}
int start_streaming(rtsplink_t *vidout, const char *host, const int port) {
  // set up gstreamer
  GstStateChangeReturn ret;
  gboolean link_ok;
  if (!gst_initialized) {
    gst_init(NULL, NULL);
  }

  // create elements
  vidout->src = (void *)gst_element_factory_make("v4l2src", "src");
  vidout->enc = (void *)gst_element_factory_make("x264enc", "enc");
  vidout->mux = (void *)gst_element_factory_make("mpegtsmux", "mux");
  vidout->sink = (void *)gst_element_factory_make("tcpserversink", "sink");

  // modify the element's properties
  g_object_set((GstElement *)vidout->src, "device", "/dev/video0", NULL);
  gst_util_set_object_arg(G_OBJECT((GstElement *)vidout->enc), "tune", "zerolatency");
  gst_util_set_object_arg(G_OBJECT((GstElement *)vidout->enc), "pass", "quant");
  g_object_set((GstElement *)vidout->enc, "quantizer", 20, NULL);
  g_object_set((GstElement *)vidout->sink, "host", host, NULL);
  g_object_set((GstElement *)vidout->sink, "port", port, NULL);

  // create capabilites
  vidout->caps = (void *)gst_caps_new_simple("video/x-raw",
      "width", G_TYPE_INT, 640,
      "height", G_TYPE_INT, 480,
      NULL);

  // create pipeline
  printf("creating pipeline\n");
  vidout->pipeline = (void *)gst_pipeline_new("vidpipeline");
  if (!vidout->src ||
      !vidout->enc ||
      !vidout->mux ||
      !vidout->sink ||
      !vidout->pipeline) {
    g_printerr("not all elements created %p %p %p %p %p\n",
      vidout->src,
      vidout->enc,
      vidout->mux,
      vidout->sink,
      vidout->pipeline);
    memset(vidout, 0, sizeof(rtsplink_t)); // TODO
    return -1;
  }

  // build pipeline
  printf("building pipeline\n");
  gst_bin_add_many(
      GST_BIN((GstElement *)vidout->pipeline),
      (GstElement *)vidout->src,
      (GstElement *)vidout->enc,
      (GstElement *)vidout->mux,
      (GstElement *)vidout->sink,
      NULL);
  link_ok = gst_element_link_filtered(
      (GstElement *)vidout->src,
      (GstElement *)vidout->enc,
      (GstCaps *)vidout->caps);
  gst_caps_unref((GstCaps *)vidout->caps);
  if (link_ok != TRUE) {
    g_printerr("Source and encoder could not be linked\n");
    goto error;
  }
  link_ok = gst_element_link_many(
      (GstElement *)vidout->enc,
      (GstElement *)vidout->mux,
      (GstElement *)vidout->sink,
      NULL);
  if (link_ok != TRUE) {
    g_printerr("Encoder, mux, and sink could not be linked\n");
    goto error;
  }

  // start playing
  printf("playing\n");
  ret = gst_element_set_state((GstElement *)vidout->pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr("Unable to set the pipeline to the playing state\n");
    goto error;
  }
  return 0;

error:
  gst_object_unref((GstElement *)vidout->pipeline);
  memset(vidout, 0, sizeof(rtsplink_t));
  return -1;
}
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *spectrum, *audioconvert, *sink;
  GstBus *bus;
  GtkWidget *appwindow, *vbox, *widget;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  bin = gst_pipeline_new ("bin");

  src = gst_element_factory_make ("audiotestsrc", "src");
  g_object_set (G_OBJECT (src), "wave", 0, NULL);

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "message", TRUE, NULL);

  audioconvert = gst_element_factory_make ("audioconvert", "audioconvert");

  sink = gst_element_factory_make (DEFAULT_AUDIOSINK, "sink");

  gst_bin_add_many (GST_BIN (bin), src, spectrum, audioconvert, sink, NULL);
  if (!gst_element_link_many (src, spectrum, audioconvert, sink, NULL)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  sync_clock = gst_pipeline_get_clock (GST_PIPELINE (bin));

  appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (appwindow), "destroy",
      G_CALLBACK (on_window_destroy), NULL);
  vbox = gtk_vbox_new (FALSE, 6);

  widget = gtk_hscale_new_with_range (50.0, 20000.0, 10);
  gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE);
  gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP);
  gtk_range_set_value (GTK_RANGE (widget), 440.0);
  g_signal_connect (G_OBJECT (widget), "value-changed",
      G_CALLBACK (on_frequency_changed), (gpointer) src);
  gtk_box_pack_start (GTK_BOX (vbox), widget, FALSE, FALSE, 0);

  drawingarea = gtk_drawing_area_new ();
  gtk_widget_set_size_request (drawingarea, spect_bands, spect_height);
  g_signal_connect (G_OBJECT (drawingarea), "configure-event",
      G_CALLBACK (on_configure_event), (gpointer) spectrum);
  gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0);

  gtk_container_add (GTK_CONTAINER (appwindow), vbox);
  gtk_widget_show_all (appwindow);

  gst_element_set_state (bin, GST_STATE_PLAYING);
  gtk_main ();
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (sync_clock);
  gst_object_unref (bin);

  return 0;
}
int main(int argc, char *argv[]) {
	CustomData data;
	GstStateChangeReturn ret;
	GstBus *bus;

	/* Initialize GTK */
	gtk_init (&argc, &argv);

	/* Initialize GStreamer */
	gst_init (&argc, &argv);

	/* Initialize our data structure */
	memset (&data, 0, sizeof (data));
	data.duration = GST_CLOCK_TIME_NONE;

	/* Create the elements */
	data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");

	if (!data.playbin2) {
		g_printerr ("Not all elements could be created.\n");
		return -1;
	}

	/* Set the URI to play */
	g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", NULL);

	/* Connect to interesting signals in playbin2 */
	g_signal_connect (G_OBJECT (data.playbin2), "video-tags-changed", (GCallback) tags_cb, &data);
	g_signal_connect (G_OBJECT (data.playbin2), "audio-tags-changed", (GCallback) tags_cb, &data);
	g_signal_connect (G_OBJECT (data.playbin2), "text-tags-changed", (GCallback) tags_cb, &data);

	/* Create the GUI */
	create_ui (&data);

	/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
	bus = gst_element_get_bus (data.playbin2);
	gst_bus_add_signal_watch (bus);
	g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
	gst_object_unref (bus);

	/* Start playing */
	ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE) {
		g_printerr ("Unable to set the pipeline to the playing state.\n");
		gst_object_unref (data.playbin2);
		return -1;
	}

	/* Register a function that GLib will call every second */
	g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);

	/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
	gtk_main ();

	/* Free resources */
	gst_element_set_state (data.playbin2, GST_STATE_NULL);
	gst_object_unref (data.playbin2);
	return 0;
}
Beispiel #21
0
int main (int   argc, char *argv[])
{
	int opt;
	int quiet = 0;

	/* Initialize the app_data structure */
	vc_init_data(&app_data);

	/* Parse command line */
	while ((opt = getopt(argc, argv, "dqs:h")) != -1) {
		switch (opt) {
		case 'd':
			/* Daemonize - do not open /dev/stdin */
			app_data.daemonize = 1;
			break;
		case 'q':
			/* Quiet - do not print startup messages */
			quiet = 1;
			break;
		case 's':
			/* Server IP address */
			strcpy(app_data.cfg.server_ip_addr, optarg);
			break;
		case 'h':
		default: /* '?' */
			vs_print_help();
			return 0;
		}
	}
	
	if(!strlen(app_data.cfg.server_ip_addr)){
		vs_print_help();
		return 0;
	}

	if(!quiet){
		printf("glivec - Gstreamer Live Example Client \n"
				"(C) John Weber, Avnet Electronics Marketing\n");
	}

	/* Initialize configuration data */
	app_data.cfg.rtp_recv_port  = DEFAULT_RX_RTP_PORT;
	app_data.cfg.rtcp_send_port = DEFAULT_TX_RTCP_PORT;
	app_data.cfg.rtcp_recv_port = DEFAULT_RX_RTCP_PORT;
	app_data.cfg.server_port    = DEFAULT_SERVER_PORT;

	printf( "Server IP address:           %s\n"
			"Server port:                 %d\n"
			"Sending RTP data on port:    %d\n"
			"Sending RTCP data on port:   %d\n"
			"Expecting RTCP data on port: %d\n",
			app_data.cfg.server_ip_addr,
			app_data.cfg.server_port,
			app_data.cfg.rtp_recv_port,
			app_data.cfg.rtcp_send_port,
			app_data.cfg.rtcp_recv_port);

	/* Setup the file descriptors for polling, starting with /dev/stdin */
	if(!app_data.daemonize){
		app_data.fds[FD_INDEX_STDIN].fd = open("/dev/stdin", O_RDONLY);
		if( app_data.fds[FD_INDEX_STDIN].fd == -1) {
			printf("Error opening /dev/stdin for reading\n");
			return -1;
		}
		app_data.fds[FD_INDEX_STDIN].events = POLLIN;
	}
	else {
		printf("glivec starting as background task.\n");
	}

	if(link_init_as_client((char*)app_data.cfg.server_ip_addr, app_data.cfg.server_port) < 0){
		debug_printf("%s: Failed to init link as client\n", __func__);
		return -1;
	}

	/* Initialization */
	gst_init (&argc, &argv);

	vc_sigint_setup();

	/* Main Loop */
	printf ("Running...\n");

	vc_mainloop(&app_data);

	/* Out of the main loop, clean up nicely */
	printf ("Returned, stopping playback\n");

	return vc_cleanup(&app_data);

}
Beispiel #22
0
int
main (int   argc,
      char *argv[])
{
    GMainLoop *loop;

    GstElement *rtpPipe,
                *udpRtpRecv, *udpRtcpRecv, *udpRtpSend,
                *zrtp, *sinkRtp, *sinkRtcp,

                *udpRtpRecvVid, *udpRtcpRecvVid, *udpRtpSendVid,
                *zrtpVid, *sinkRtpVid, *sinkRtcpVid;
    GstBus *bus;

    /* Initialisation */
    gst_init (&argc, &argv);

    loop = g_main_loop_new (NULL, FALSE);

    /* Create gstreamer elements for the first stream, could be an audio stream */
    rtpPipe  = gst_pipeline_new ("rtp-recv");

    udpRtpRecv  = gst_element_factory_make("udpsrc", "udp-rtp-recv");
    udpRtcpRecv = gst_element_factory_make("udpsrc", "udp-rtcp-recv");
    udpRtpSend  = gst_element_factory_make("udpsink", "udp-rtp-send");

    zrtp        = gst_element_factory_make("zrtpfilter", "ZRTP");

    sinkRtp     = gst_element_factory_make("fakesink", "rtp-sink");
    sinkRtcp    = gst_element_factory_make("fakesink", "rtcp-sink");

    if (!rtpPipe || !udpRtpRecv || !udpRtcpRecv || !udpRtpSend || !zrtp || !sinkRtp || !sinkRtcp) {
        g_printerr ("One of first stream elements could not be created. Exiting.\n");
        return -1;
    }

    /* Create gstreamer elements for the second stream, could be a video stream */
    udpRtpRecvVid  = gst_element_factory_make("udpsrc", "udp-rtp-recv-vid");
    udpRtcpRecvVid = gst_element_factory_make("udpsrc", "udp-rtcp-recv-vid");
    udpRtpSendVid  = gst_element_factory_make("udpsink", "udp-rtp-send-vid");

    zrtpVid        = gst_element_factory_make("zrtpfilter", "ZRTP-Vid");

    sinkRtpVid     = gst_element_factory_make("fakesink", "rtp-sink-vid");
    sinkRtcpVid    = gst_element_factory_make("fakesink", "rtcp-sink-vid");

    if (!udpRtpRecvVid || !udpRtcpRecvVid || !udpRtpSendVid || !zrtpVid || !sinkRtpVid || !sinkRtcpVid) {
        g_printerr ("One of second stream elements could not be created. Exiting.\n");
        return -1;
    }

    /* Setup for receiver first RTP and RTCP stream, even port is RTP, odd port is RTCP */
    g_object_set(G_OBJECT(udpRtpRecv), "port", 5002, NULL);
    g_object_set(G_OBJECT(udpRtcpRecv), "port", 5003, NULL);

    /* UDP sink sends to loclhost, port 5002 */
    g_object_set(G_OBJECT(udpRtpSend), "clients", "127.0.0.1:5004", NULL);
    g_object_set(G_OBJECT(udpRtpSend), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(udpRtpSend), "async", FALSE, NULL);

    /* Setup the RTP and RTCP sinks after the ZRTP filter */
    g_object_set(G_OBJECT(sinkRtp), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtp), "async", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtp), "dump", TRUE, NULL);

    g_object_set(G_OBJECT(sinkRtcp), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtcp), "async", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtcp), "dump", TRUE, NULL);


    /* Setup receiver for second RTP and RTCP stream, even port is RTP, odd port is RTCP */
    g_object_set(G_OBJECT(udpRtpRecvVid), "port", 5012, NULL);
    g_object_set(G_OBJECT(udpRtcpRecvVid), "port", 5013, NULL);

    /* UDP sink sends to loclhost, port 5014 */
    g_object_set(G_OBJECT(udpRtpSendVid), "clients", "127.0.0.1:5014", NULL);
    g_object_set(G_OBJECT(udpRtpSendVid), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(udpRtpSendVid), "async", FALSE, NULL);

    /* Setup the RTP and RTCP sinks after the ZRTP filter */
    g_object_set(G_OBJECT(sinkRtpVid), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtpVid), "async", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtpVid), "dump", TRUE, NULL);

    g_object_set(G_OBJECT(sinkRtcpVid), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtcpVid), "async", FALSE, NULL);
    g_object_set(G_OBJECT(sinkRtcpVid), "dump", TRUE, NULL);



    /* Set the ZRTP cache name and initialize ZRTP with autosense mode ON, parameter
     * to "initialize" is TRUE.
     *
     * Because this is a RTP receiver only we do not send RTP and thus don't have any
     * SSRC data. Therefore set a local SSRC. For this demo program this is a fixed
     * value (0xdeadbeef), for real applications this should be a 32 bit random value.
     */
    g_object_set(G_OBJECT(zrtp), "cache-name", "gstZrtpCache.dat", NULL);
    g_object_set(G_OBJECT(zrtp), "local-ssrc", 0xdeadbeef, NULL);
    g_object_set(G_OBJECT(zrtp), "initialize", TRUE, NULL);

    /* NOTE: A slave multi-stream ZRTP plugin must not be 'enabled' during initialization,
     * therefore the parameter to "initialize" is FALSE.
     *
     * Use a different SSRC for the second ZRTP stream to avoid SSRC collisions.
     */
    g_object_set(G_OBJECT(zrtpVid), "cache-name", "gstZrtpCache.dat", NULL);
    g_object_set(G_OBJECT(zrtpVid), "local-ssrc", 0xdeadbeee, NULL);
    g_object_set(G_OBJECT(zrtpVid), "initialize", FALSE, NULL);

    /* we add a message handler */
    bus = gst_pipeline_get_bus(GST_PIPELINE(rtpPipe));
    gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    /* Set up the pipeline, we add all elements into the pipeline */
    gst_bin_add_many(GST_BIN(rtpPipe), udpRtpRecv, udpRtcpRecv, zrtp, sinkRtp, sinkRtcp, udpRtpSend,
                     udpRtpRecvVid, udpRtcpRecvVid, zrtpVid, sinkRtpVid, sinkRtcpVid, udpRtpSendVid, NULL);

    /* setup the RTP and RTCP receiver and the sender for ZRTP communication */
    gst_element_link_pads(udpRtpRecv, "src", zrtp, "recv_rtp_sink");
    gst_element_link_pads(zrtp, "recv_rtp_src", sinkRtp, "sink");

    gst_element_link_pads(udpRtcpRecv, "src", zrtp, "recv_rtcp_sink");
    gst_element_link_pads(zrtp, "recv_rtcp_src", sinkRtcp, "sink");

    gst_element_link_pads(zrtp, "send_rtp_src", udpRtpSend, "sink");


    /* setup the Video RTP and RTCP receiver and the sender for ZRTP communication */
    gst_element_link_pads(udpRtpRecvVid, "src", zrtpVid, "recv_rtp_sink");
    gst_element_link_pads(zrtpVid, "recv_rtp_src", sinkRtpVid, "sink");

    gst_element_link_pads(udpRtcpRecvVid, "src", zrtpVid, "recv_rtcp_sink");
    gst_element_link_pads(zrtpVid, "recv_rtcp_src", sinkRtcpVid, "sink");

    gst_element_link_pads(zrtpVid, "send_rtp_src", udpRtpSendVid, "sink");

    /* Connect the ZRTP callback (signal) functions.*/
    /* NOTE: The connect call to the status signal of the master ZRTP stream hands over the
     * slave's ZRTP plugin pointer. This enables the master to set the multi-stream
     * parameters when it detects that it has reached the 'zrtp_InfoSecureStateOn' status.
     */
    g_signal_connect(zrtp, "status",        G_CALLBACK(zrtp_statusInfoMaster), zrtpVid);
    g_signal_connect(zrtp, "sas",           G_CALLBACK(zrtp_sas), zrtp);
    g_signal_connect(zrtp, "algorithm",     G_CALLBACK(zrtp_algorithm), zrtp);
    g_signal_connect(zrtp, "negotiation",   G_CALLBACK(zrtp_negotiationFail), zrtp);
    g_signal_connect(zrtp, "security-off",  G_CALLBACK(zrtp_securityOff), zrtp);
    g_signal_connect(zrtp, "not-supported", G_CALLBACK(zrtp_notSupported), zrtp);

    /* Connect slave ZRTP stream to different status signal callback
     */
    g_signal_connect(zrtpVid, "status",        G_CALLBACK(zrtp_statusInfo), zrtpVid);
    g_signal_connect(zrtpVid, "sas",           G_CALLBACK(zrtp_sas), zrtpVid);
    g_signal_connect(zrtpVid, "algorithm",     G_CALLBACK(zrtp_algorithm), zrtpVid);
    g_signal_connect(zrtpVid, "negotiation",   G_CALLBACK(zrtp_negotiationFail), zrtpVid);
    g_signal_connect(zrtpVid, "security-off",  G_CALLBACK(zrtp_securityOff), zrtpVid);
    g_signal_connect(zrtpVid, "not-supported", G_CALLBACK(zrtp_notSupported), zrtpVid);

    g_print("Starting ZRTP receive pipeline\n");
    gst_element_set_state(rtpPipe, GST_STATE_PLAYING);

    g_print("Receiving...\n");
    g_main_loop_run (loop);

    g_print("Exit main loop\n");

    g_print ("Deleting ZRTP pipe\n");
    gst_object_unref(GST_OBJECT(rtpPipe));

    return 0;
}
Beispiel #23
0
int
main (int   argc,
      char *argv[])
{
  GMainLoop *loop;
  GstElement *pipeline, *source, *demuxer, *decoder, *conv1, *conv2, *sink, *filtro_sepia;
  GstElement * hora;
  GstBus *bus;
   //Initialisation
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

	/* Comprobamos argumentos */
	if (argc != 2) {
		g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
		return -1;
	}
   
	/* Se crean los elementos gstreamer */
	pipeline = gst_pipeline_new ("test_pipeline");
	source = gst_element_factory_make ("filesrc", "file-source");
	demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
	decoder = gst_element_factory_make ("theoradec", "theoradec-decoder");
  hora     = gst_element_factory_make ("clockoverlay", "hora_actual");
  filtro_sepia = gst_element_factory_make ("coloreffects", "filtro-sepia");
	conv1 = gst_element_factory_make ("videoconvert", "converter1");
	conv2 = gst_element_factory_make ("videoconvert", "converter2");
	sink = gst_element_factory_make ("ximagesink", "video_out");
	if (!pipeline || !source || !demuxer || !decoder || !hora || !filtro_sepia || !conv1 || !conv2 || !sink) {
		g_printerr ("One element could not be created. Exiting.\n");
		return -1;
	}

	/* we set the input filename to the source element */
	g_object_set (G_OBJECT (source), "location", argv[1], NULL);
	
	// Cambiamos propiedad para que aparezca grande la hora actual
	g_object_set (G_OBJECT(hora), "auto-resize", FALSE, NULL);

	// Filtro Sepia
	g_object_set (G_OBJECT(filtro_sepia), "preset", 2, NULL);

   //Set up the pipeline
   //we set the input filename to the source element
   //we add a message handler
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);
 
	/* anyadimos todos los elementos al pipeline*/
	/* file-source | ogg-demuxer | theoradec-decoder | hora_actual | converter | filtro_sepia | converter | video_out */
	gst_bin_add_many (GST_BIN (pipeline), source, demuxer, decoder, hora, conv1, filtro_sepia, conv2, sink, NULL);

	/* we link the elements together */
	/* file-source -> ogg-demuxer -> vorbis-decoder -> hora -> conv1 -> filtro_sepia -> conv2 -> video_sink */
	gst_element_link (source, demuxer);
	gst_element_link_many (decoder, hora, conv1, filtro_sepia, conv2, sink, NULL);

	g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), decoder);

   //Set the pipeline to "playing" state
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
   //Iterate
  g_main_loop_run (loop);
  // Out of the main loop, clean up nicely
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  return 0;
}	
Beispiel #24
0
int
main (int argc, char *argv[])
{
    int i, j;
    int num_channels;

    char buffer[20];

    GList *input_channels;        /* structure holding all the input channels */

    input_channel_t *channel_in;

    GstElement *main_bin;
    GstElement *adder;
    GstElement *audiosink;

    GstPad *pad;                  /* to request pads for the adder */

    gst_init (&argc, &argv);

    if (argc == 1) {
        g_print ("usage: %s <filename1> <filename2> <...>\n", argv[0]);
        exit (-1);
    }
    num_channels = argc - 1;

    /* set up output channel and main bin */

    /* create adder */
    adder = gst_element_factory_make ("adder", "adderel");

    /* create an audio sink */
    audiosink = gst_element_factory_make ("esdsink", "play_audio");

    /* create main bin */
    main_bin = gst_pipeline_new ("bin");

    /* link adder and output to bin */
    GST_INFO ("main: adding adder to bin");
    gst_bin_add (GST_BIN (main_bin), adder);
    GST_INFO ("main: adding audiosink to bin");
    gst_bin_add (GST_BIN (main_bin), audiosink);

    /* link adder and audiosink */

    gst_pad_link (gst_element_get_pad (adder, "src"),
                  gst_element_get_pad (audiosink, "sink"));

    /* start looping */
    input_channels = NULL;

    for (i = 1; i < argc; ++i) {
        printf ("Opening channel %d from file %s...\n", i, argv[i]);
        channel_in = create_input_channel (i, argv[i]);
        input_channels = g_list_append (input_channels, channel_in);

        if (i > 1)
            gst_element_set_state (main_bin, GST_STATE_PAUSED);
        gst_bin_add (GST_BIN (main_bin), channel_in->pipe);

        /* request pads and link to adder */
        GST_INFO ("requesting pad\n");
        pad = gst_element_get_request_pad (adder, "sink%d");
        printf ("\tGot new adder sink pad %s\n", gst_pad_get_name (pad));
        sprintf (buffer, "channel%d", i);
        gst_pad_link (gst_element_get_pad (channel_in->pipe, buffer), pad);

        /* register a volume envelope */
        printf ("\tregistering volume envelope...\n");

        /*
         * this is the volenv :
         * each song gets a slot of 5 seconds, with a 5 second fadeout
         * at the end of that, all audio streams play simultaneously
         * at a level ensuring no distortion
         * example for three songs :
         * song1 : starts at full level, plays 5 seconds, faded out at 10 seconds,
         *             sleep until 25, fade to end level at 30
         * song2 : starts silent, fades in at 5 seconds, full blast at 10 seconds,
         *             full level until 15, faded out at 20, sleep until 25, fade to end at 30
         * song3 : starts muted, fades in from 15, full at 20, until 25, fade to end level
         */

        if (i == 1) {
            /* first song gets special treatment for end style */
            env_register_cp (channel_in->volenv, 0.0, 1.0);
        } else {
            env_register_cp (channel_in->volenv, 0.0, 0.0000001);     /* start muted */
            env_register_cp (channel_in->volenv, i * 10.0 - 15.0, 0.0000001); /* start fade in */
            env_register_cp (channel_in->volenv, i * 10.0 - 10.0, 1.0);
        }
        env_register_cp (channel_in->volenv, i * 10.0 - 5.0, 1.0);  /* end of full level */

        if (i != num_channels) {
            env_register_cp (channel_in->volenv, i * 10.0, 0.0000001);        /* fade to black */
            env_register_cp (channel_in->volenv, num_channels * 10.0 - 5.0, 0.0000001);       /* start fade in */
        }
        env_register_cp (channel_in->volenv, num_channels * 10.0, 1.0 / num_channels);      /* to end level */

#ifndef GST_DISABLE_LOADSAVE
        gst_xml_write_file (GST_ELEMENT (main_bin), fopen ("mixer.xml", "w"));
#endif

        /* start playing */
        gst_element_set_state (main_bin, GST_STATE_PLAYING);

        /* write out the schedule */
        gst_scheduler_show (GST_ELEMENT_SCHEDULER (main_bin));
        playing = TRUE;

        j = 0;
        /*printf ("main: start iterating from 0"); */
        while (playing && j < 100) {
            /*      printf ("main: iterating %d\n", j); */
            gst_bin_iterate (GST_BIN (main_bin));
            /*fprintf(stderr,"after iterate()\n"); */
            ++j;
        }
    }
    printf ("main: all the channels are open\n");
    while (playing) {
        gst_bin_iterate (GST_BIN (main_bin));
        /*fprintf(stderr,"after iterate()\n"); */
    }
    /* stop the bin */
    gst_element_set_state (main_bin, GST_STATE_NULL);

    while (input_channels) {
        destroy_input_channel (input_channels->data);
        input_channels = g_list_next (input_channels);
    }
    g_list_free (input_channels);

    gst_object_unref (audiosink);

    gst_object_unref (main_bin);

    exit (0);
}
Beispiel #25
0
int
main (int   argc,
      char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink;
  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  if (argc != 2) {
    g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
    return -1;
  }


  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("audio-player");
  source   = gst_element_factory_make ("filesrc",       "file-source");
  demuxer  = gst_element_factory_make ("oggdemux",      "ogg-demuxer");
  decoder  = gst_element_factory_make ("vorbisdec",     "vorbis-decoder");
  conv     = gst_element_factory_make ("audioconvert",  "converter");
  sink     = gst_element_factory_make ("autoaudiosink", "audio-output");

  if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set up the pipeline */

  /* we set the input filename to the source element */
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
  gst_bin_add_many (GST_BIN (pipeline),
                    source, demuxer, decoder, conv, sink, NULL);

  /* we link the elements together */
  /* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
  gst_element_link (source, demuxer);
  gst_element_link_many (decoder, conv, sink, NULL);
  g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), decoder);


  /* Set the pipeline to "playing" state*/
  system("clear");

  g_print ("Welcome to K++ Think Open. Here is the starting point for your application development - a very simple media player to play OGG/Vorbis audio files. Improve it and flaunt it!\nNow playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Iterate */
  g_print ("Running... Wait patiently for the program to terminate after the file is played.\n");
  g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
  g_main_loop_run (loop);


  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  g_main_loop_unref(loop);
  return 0;
}
Beispiel #26
0
int
main (int argc,
      char *argv[])
{
    
  /* Initialisation */
  gst_init (&argc, &argv);

  GList *element_list = gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DEPAYLOADER, 
							       GST_RANK_NONE);
  GList *iter = element_list;
  while (iter != NULL)
    {
      g_print ("+++++\n");
      g_print ("%s -- ", gst_element_factory_get_longname ((GstElementFactory *)iter->data));
      g_print ("%s\n", gst_plugin_feature_get_name ((GstPluginFeature *)iter->data));
	 
      const GList *static_pads = 
	gst_element_factory_get_static_pad_templates ((GstElementFactory *)iter->data);
	 
      while (NULL != static_pads)
	{
	  GstStaticPadTemplate *pad = (GstStaticPadTemplate *)static_pads->data; 
	  //the following is EMPTY gchar *caps_str = gst_caps_to_string (&pad->static_caps.caps); 
	  //g_free (caps_str); 
	  /* g_print ("string: %s\n",  */
	  /* 	      pad->static_caps.string);  */
	  GstCaps *caps = gst_caps_from_string (pad->static_caps.string);
	  guint caps_size = gst_caps_get_size (caps);
	  if (! gst_caps_is_any (caps))
	    for (guint i = caps_size; i > 0; i--) 
	      {
		GstStructure *caps_struct = gst_caps_get_structure (caps, i-1);
		if (gst_structure_has_name (caps_struct,"application/x-rtp")) 
		  {
		    g_print ("string: %s\n",   
			     gst_structure_to_string (caps_struct));   
		    
		    {//payload 
		      const GValue *val = gst_structure_get_value (caps_struct, "payload");  
		      if (NULL != val) 
			{ 
			  //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if(GST_VALUE_HOLDS_INT_RANGE(val)) 
			    { 
			      g_print ("payload min %d\n", gst_value_get_int_range_min (val));  
			    } 
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("payload list %d\n", g_value_get_int (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_INT (val)) 
			    { 
			      g_print ("payload int %d\n", g_value_get_int (val)); 
			    } 
			} 
		    } 
		    { //encodeing-name
		      const GValue *val = gst_structure_get_value (caps_struct, "encoding-name");  
		      if (NULL != val) 
			{
			  //g_print ("encoding-name struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("encoding-name list %s\n", g_value_get_string (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_STRING (val)) 
			    { 
			      g_print ("encoding-name string %s\n", g_value_get_string (val)); 
			    } 
				      
			}
		    } 
		    {//media
		      const GValue *val = gst_structure_get_value (caps_struct, "media");  
		      if (NULL != val) 
			{
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("media list %s\n", g_value_get_string (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_STRING (val)) 
			    { 
			      g_print ("media string %s\n", g_value_get_string (val)); 
			    } 
				      
			}
		    } 

		    {//clock rate 
		      const GValue *val = gst_structure_get_value (caps_struct, "clock-rate");  
		      if (NULL != val) 
			{ 
			  //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if(GST_VALUE_HOLDS_INT_RANGE(val)) 
			    { 
			      g_print ("clock-rate min %d\n", gst_value_get_int_range_min (val));  
			    } 
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("clock-rate list %d\n", g_value_get_int (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_INT (val)) 
			    { 
			      g_print ("clock-rate int %d\n", g_value_get_int (val)); 
			    } 
			} 
		    } 

		    /* g_print ("\nencoding-name %s\n",   */
		    /* 	 gst_structure_get_string (caps_struct,  */
		    /* 				   "encoding-name"));  */
			
		  }
	      }
	  static_pads = g_list_next (static_pads); 
	  gst_caps_unref (caps);
	}
	 
      iter = g_list_next (iter);
    }
  gst_plugin_feature_list_free (element_list);
    
  return 0;
}
Beispiel #27
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  GError *error = NULL;
  GstBus *bus;

  gst_init (&argc, &argv);

  GST_DEBUG_CATEGORY_INIT (appsrc_playbin_debug, "appsrc-playbin", 0,
      "appsrc playbin example");

  if (argc < 2) {
    g_print ("usage: %s <filename>\n", argv[0]);
    return -1;
  }

  /* try to open the file as an mmapped file */
  app->file = g_mapped_file_new (argv[1], FALSE, &error);
  if (error) {
    g_print ("failed to open file: %s\n", error->message);
    g_error_free (error);
    return -2;
  }
  /* get some vitals, this will be used to read data from the mmapped file and
   * feed it to appsrc. */
  app->length = g_mapped_file_get_length (app->file);
  app->data = (guint8 *) g_mapped_file_get_contents (app->file);
  app->offset = 0;

  /* create a mainloop to get messages */
  app->loop = g_main_loop_new (NULL, TRUE);

  app->playbin = gst_element_factory_make ("playbin2", NULL);
  g_assert (app->playbin);

  bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));

  /* add watch for messages */
  gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);

  /* set to read from appsrc */
  g_object_set (app->playbin, "uri", "appsrc://", NULL);

  /* get notification when the source is created so that we get a handle to it
   * and can configure it */
  g_signal_connect (app->playbin, "deep-notify::source",
      (GCallback) found_source, app);

  /* go to playing and wait in a mainloop. */
  gst_element_set_state (app->playbin, GST_STATE_PLAYING);

  /* this mainloop is stopped when we receive an error or EOS */
  g_main_loop_run (app->loop);

  GST_DEBUG ("stopping");

  gst_element_set_state (app->playbin, GST_STATE_NULL);

  /* free the file */
  g_mapped_file_free (app->file);

  gst_object_unref (bus);
  g_main_loop_unref (app->loop);

  return 0;
}
Beispiel #28
0
gint main (gint argc, gchar *argv[])
{
    GtkWidget *area;
    gst_init (&argc, &argv);
    gtk_init (&argc, &argv);

    GstElement* pipeline = gst_pipeline_new ("pipeline");
    GstElement* videosrc  = gst_element_factory_make ("videotestsrc", "videotestsrc");
    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");

    gst_bin_add_many (GST_BIN (pipeline), videosrc, videosink, NULL);

    gboolean link_ok = gst_element_link_many(videosrc, videosink, NULL) ;
    if(!link_ok)
    {
        g_warning("Failed to link an element!\n") ;
        return -1;
    }

    //set window id on this event
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_signal_watch (bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);

    gst_element_set_state(pipeline, GST_STATE_READY);

    area = gtk_drawing_area_new();
    gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL);
    gst_object_unref (bus);

    //window that contains an area where the video is drawn
    GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_widget_set_size_request (window, 640, 480);
    gtk_window_move (GTK_WINDOW (window), 300, 10);
    gtk_window_set_title (GTK_WINDOW (window), "glimagesink implement the gstvideooverlay interface");
    GdkGeometry geometry;
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE);

    //window to control the states
    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);
    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
    gtk_window_move (GTK_WINDOW (window_control), 10, 10);
    GtkWidget* table = gtk_grid_new ();
    gtk_container_add (GTK_CONTAINER (window_control), table);

    //control state null
    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
    g_signal_connect (G_OBJECT (button_state_null), "clicked",
        G_CALLBACK (button_state_null_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_null, 0, 0, 1, 1);
    gtk_widget_show (button_state_null);

    //control state ready
    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
    g_signal_connect (G_OBJECT (button_state_ready), "clicked",
        G_CALLBACK (button_state_ready_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_ready, 0, 1, 1, 1);
    gtk_widget_show (button_state_ready);

    //control state paused
    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
    g_signal_connect (G_OBJECT (button_state_paused), "clicked",
        G_CALLBACK (button_state_paused_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_paused, 0, 2, 1, 1);
    gtk_widget_show (button_state_paused);

    //control state playing
    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
    g_signal_connect (G_OBJECT (button_state_playing), "clicked",
        G_CALLBACK (button_state_playing_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_playing, 0, 3, 1, 1);
    gtk_widget_show (button_state_playing);

    //change framerate
    GtkWidget* slider_fps = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL,
        1, 30, 2);
    g_signal_connect (G_OBJECT (slider_fps), "format-value",
        G_CALLBACK (slider_fps_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), slider_fps, 1, 0, 1, 4);
    gtk_widget_show (slider_fps);

    gtk_widget_show (table);
    gtk_widget_show (window_control);

    //configure the pipeline
    g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);

    //area where the video is drawn
    gtk_container_add (GTK_CONTAINER (window), area);

    gtk_widget_realize(area);

    //needed when being in GST_STATE_READY, GST_STATE_PAUSED
    //or resizing/obscuring the window
    g_signal_connect(area, "draw", G_CALLBACK(draw_cb), videosink);

    gtk_widget_show_all (window);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    gtk_main();

    return 0;
}
Beispiel #29
0
int main( int argc, char *argv[] )
{

	/* Set default preferences and retrieve existing preferences */
	preferences_set_default_preferences();
	preferences_get_preferences_from_xml_file("preferences.xml");

	state = IDLE;

	/* Initialize libraries */
	gtk_init (&argc, &argv);
	gst_init(NULL, NULL);
	if(msrp_init(events_msrp) < 0) {
		printf("Error initializing the MSRP library...\n");
	}

	/* Display the main GUI */
	imsUA = create_imsUA ();
	gtk_widget_show (imsUA);

	/* Setup our IPTV window */
	videoWin = create_videoWin();
	gtk_window_set_decorated(GTK_WINDOW(videoWin),FALSE);

	/* Prepare a GST pipeline for the background video */
	backgroundVideoPipeline = NULL;




	if (initialise_eXosip() < 0)
	{
		fprintf(stderr, "Could not initialise - Is port 5060 in use?\n");
	}
	else
	{
		eXosip_set_user_agent("UCT IMS Client");

		presence_get_buddy_list_from_file("buddylist");

		/* Check for incoming eXosip events every 200 ms */
		g_timeout_add (200, get_exosip_events, imsUA);

		/* run the main GUI */
		gtk_main ();

		/* if registered to a proxy deregister on exit*/
		if(registered == REGISTERED)//if client is registered
		{
			ims_send_deregister_message ();

			sleep(1);

			eXosip_event_t *je;

			while((je = eXosip_event_wait(0,50)) != NULL)
			{
				if((je->type == EXOSIP_REGISTRATION_FAILURE) && ((je->response)->status_code == 401))
					ims_process_401(je);
			}

		}

		presence_write_buddy_list_to_file("buddylist");

		preferences_write_preferences_to_xml_file("preferences.xml");

		msrp_quit();
		eXosip_quit();
		return 0;
	}
}
Beispiel #30
0
int
main (int   argc,
      char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline, *source, *demuxer, *parser, *decoder, *conv, *sink;
  GstBus *bus;
  guint bus_watch_id;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  if (argc != 2) {
    g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
    return -1;
  }


  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("avi-player");
  source   = gst_element_factory_make ("filesrc",       "file-source");
  demuxer  = gst_element_factory_make ("avidemux",      "a.i-demuxer");
  parser   = gst_element_factory_make ("h264parse",     "h264-parser");
  decoder  = gst_element_factory_make ("avdec_h264",    "h264-decoder");
  conv     = gst_element_factory_make ("videoconvert",  "converter");
  sink     = gst_element_factory_make ("sdlvideosink",  "video-output");

  if (!pipeline || !source || !demuxer || !parser || !decoder || !conv || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set up the pipeline */

  /* we set the input filename to the source element */
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
  gst_bin_add_many (GST_BIN (pipeline),
                    source, demuxer, parser, decoder, conv, sink, NULL);

  /* we link the elements together */
  /* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
  gst_element_link (source, demuxer);
  gst_element_link_many (parser, decoder, conv, sink, NULL);
  g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), parser);

  /* note that the demuxer will be linked to the decoder dynamically.
     The reason is that Ogg may contain various streams (for example
     audio and video). The source pad(s) will be created at run time,
     by the demuxer when it detects the amount and nature of streams.
     Therefore we connect a callback function which will be executed
     when the "pad-added" is emitted.*/


  /* Set the pipeline to "playing" state*/
  g_print ("Now playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);


  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);


  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);

  return 0;
}