コード例 #1
0
ファイル: gstPlayRegion.c プロジェクト: guofengzh/gst-app-dev
void
gst_PlayRegion_create_pipeline (GstPlayRegion * PlayRegion, const char *uri)
{
   GstElement *pipeline, *source, *conv, *sink;
   GstBus *bus;

  /* build */
  pipeline = gst_pipeline_new ("my-pipeline");

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("audio-player");
  source   = gst_element_factory_make ("uridecodebin", "src");
  conv     = gst_element_factory_make ("audioconvert",  "converter");
  sink     = gst_element_factory_make ("autoaudiosink", "audio-output");

  if (!pipeline || !source || !conv || !sink) {
	  g_error ("One element could not be created. Exiting.\n");
	  return ;
  }

  /* we set the input filename to the source element */
  g_object_set (G_OBJECT (source), "uri", uri, NULL);

  gst_bin_add_many (GST_BIN (pipeline), source, conv, sink, NULL);

  /* can't link src yet, it has no pads */
  gst_element_link_many (conv, sink, NULL);

  /* retrieves a pad from element by name */
  GstPad *sinkpad = gst_element_get_static_pad (conv, "sink");

  PlayRegion->counter = 0 ;
  PlayRegion->prerolled = FALSE ;

  /* for each pad block that is installed, we will increment
   * the counter. for each pad block that is signaled, we
   * decrement the counter. When the counter is 0 we post
   * an app message to tell the app that all pads are
   * blocked. Start with 1 that is decremented when no-more-pads
   * is signaled to make sure that we only post the message
   * after no-more-pads */

  g_atomic_int_set (&PlayRegion->counter, 1);

  g_signal_connect (source, "pad-added",
      (GCallback) cb_pad_added, PlayRegion);
  g_signal_connect (source, "no-more-pads",
      (GCallback) cb_no_more_pads, PlayRegion);

  PlayRegion->pipeline = pipeline;

  gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE);
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, gst_PlayRegion_handle_message,PlayRegion);

  PlayRegion->bus = bus ;
  PlayRegion->source_element = source ;
  PlayRegion->sink_element = sink ;
  PlayRegion->sinkpad = sinkpad ;
}
コード例 #2
0
ファイル: camerabin.c プロジェクト: spunktsch/svtplayer
/* Validate captured files by playing them with playbin
 * and checking that no errors occur. */
static gboolean
check_file_validity (const gchar * filename)
{
  GstBus *bus;
  GMainLoop *loop = g_main_loop_new (NULL, FALSE);
  GstElement *playbin = gst_element_factory_make ("playbin2", NULL);
  GstElement *fakevideo = gst_element_factory_make ("fakesink", NULL);
  GstElement *fakeaudio = gst_element_factory_make ("fakesink", NULL);
  gchar *uri = g_strconcat ("file://", make_test_file_name (filename), NULL);

  GST_DEBUG ("checking uri: %s", uri);
  g_object_set (G_OBJECT (playbin), "uri", uri, "video-sink", fakevideo,
      "audio-sink", fakeaudio, NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (playbin));
  gst_bus_add_watch (bus, (GstBusFunc) validity_bus_cb, loop);

  gst_element_set_state (playbin, GST_STATE_PLAYING);
  g_main_loop_run (loop);
  gst_element_set_state (playbin, GST_STATE_NULL);

  g_free (uri);
  gst_object_unref (bus);
  gst_object_unref (playbin);

  return TRUE;
}
コード例 #3
0
void
gst_switchui_create_pipeline_playbin (GstSwitchUI * switchui, const char *uri)
{
  GstElement *pipeline;
  GError *error = NULL;

  pipeline = gst_pipeline_new (NULL);
  gst_bin_add (GST_BIN (pipeline),
      gst_element_factory_make ("playbin", "source"));

  if (error) {
    g_print ("pipeline parsing error: %s\n", error->message);
    gst_object_unref (pipeline);
    return;
  }

  switchui->pipeline = pipeline;

  gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE);
  switchui->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (switchui->bus, gst_switchui_handle_message, switchui);

  switchui->source_element = gst_bin_get_by_name (GST_BIN (pipeline), "source");
  g_print ("source_element is %p\n", switchui->source_element);

  g_print ("setting uri to %s\n", uri);
  g_object_set (switchui->source_element, "uri", uri, NULL);
}
コード例 #4
0
ファイル: gstintertest.c プロジェクト: LCW523/gst-plugins-bad
void
gst_inter_test_create_pipeline_playbin (GstInterTest * intertest,
    const char *uri)
{
  GstElement *pipeline;
  GError *error = NULL;

  if (uri == NULL) {
    gst_inter_test_create_pipeline_vts (intertest);
    return;
  }

  pipeline = gst_pipeline_new (NULL);
  gst_bin_add (GST_BIN (pipeline),
      gst_element_factory_make ("playbin2", "source"));

  if (error) {
    g_print ("pipeline parsing error: %s\n", error->message);
    gst_object_unref (pipeline);
    return;
  }

  intertest->pipeline = pipeline;

  gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE);
  intertest->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (intertest->bus, gst_inter_test_handle_message, intertest);

  intertest->source_element =
      gst_bin_get_by_name (GST_BIN (pipeline), "source");
  g_print ("source_element is %p\n", intertest->source_element);

  g_print ("setting uri to %s\n", uri);
  g_object_set (intertest->source_element, "uri", uri, NULL);
}
コード例 #5
0
ファイル: utils.c プロジェクト: amipro/sphone
static int utils_gst_start(gchar *path)
{
	if(utils_gst_play)
		return 0;

	GstBus *bus;
	gchar *uri=g_filename_to_uri(path,NULL,NULL);

	if(!uri)
		return 1;

	utils_gst_play = gst_element_factory_make ("playbin2", "play");
	g_object_set (G_OBJECT (utils_gst_play), "uri", uri, NULL);

	bus = gst_pipeline_get_bus (GST_PIPELINE (utils_gst_play));
	gst_bus_add_watch (bus, utils_gst_bus_callback, NULL);
	gst_object_unref (bus);

	// Set audio routing
	utils_audio_route_save();
	utils_audio_route_set_play();

	gst_element_set_state (utils_gst_play, GST_STATE_PLAYING);
	g_free(uri);

	return 0;
}
コード例 #6
0
void pcm_play_dma_init(void)
{
    maemo_init_libplayback();

    GMainContext *ctx = g_main_loop_get_context(maemo_main_loop);
    pcm_loop = g_main_loop_new (ctx, true);

    gst_init (NULL, NULL);

    gst_pipeline = gst_pipeline_new ("rockbox");

    gst_appsrc = gst_element_factory_make ("appsrc", NULL);
    gst_volume = gst_element_factory_make ("volume", NULL);
    gst_pulsesink = gst_element_factory_make ("pulsesink", NULL);

    /* Connect elements */
    gst_bin_add_many (GST_BIN (gst_pipeline),
                        gst_appsrc, gst_volume, gst_pulsesink, NULL);
    gst_element_link_many (gst_appsrc, gst_volume, gst_pulsesink, NULL);

    /* Connect to gstreamer bus of the pipeline */
    gst_bus = gst_pipeline_get_bus (GST_PIPELINE (gst_pipeline));
    bus_watch_id = gst_bus_add_watch (gst_bus, (GstBusFunc) gst_bus_message, NULL);

    maemo_configure_appsrc();
}
コード例 #7
0
int main(int argc, char **argv){
	GMainLoop *loop;
	GstElement *pipeline, *video_src, *ffmpegcolorspace, *videoscale, *videorate, *video_caps, *ffmpegcolorspace2, *video_sink;
	GstCaps *caps;
	GstBus *bus;

	gst_init(&argc, &argv);

	pipeline = gst_pipeline_new("pipeline");
	g_assert(pipeline);

	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	g_assert(bus);

	video_src = gst_element_factory_make("autovideosrc", NULL);
	g_assert(video_src);

	ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace", NULL);
	g_assert(ffmpegcolorspace);

	videoscale = gst_element_factory_make("videoscale", NULL);
	g_assert(videoscale);

	videorate = gst_element_factory_make("videorate", NULL);
	g_assert(videorate);

	video_caps = gst_element_factory_make("capsfilter", NULL);
	g_assert(video_caps);
	caps = gst_caps_new_simple(
		"video/x-raw-rgb",
		"width", G_TYPE_INT, 800,
		"height", G_TYPE_INT, 480,
		"framerate", GST_TYPE_FRACTION, 15, 1,
		"bpp", G_TYPE_INT, 24,
		"depth", G_TYPE_INT, 24,
		NULL
	);
	g_object_set(video_caps, "caps", caps, NULL);
	gst_caps_unref(caps);

	ffmpegcolorspace2 = gst_element_factory_make("ffmpegcolorspace", NULL);
	g_assert(ffmpegcolorspace2);

	video_sink = gst_element_factory_make("autovideosink", NULL);
	g_assert(video_sink);

	gst_bin_add_many(pipeline, video_src, ffmpegcolorspace, videoscale, videorate, video_caps, ffmpegcolorspace2, video_sink, NULL);
	gst_element_link_many(video_src, ffmpegcolorspace, videoscale, videorate, video_caps, ffmpegcolorspace2, video_sink, NULL);

	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	loop = g_main_loop_new(NULL, FALSE);
	gst_bus_add_watch(bus, async_bus_cb, loop); // <--- Note that async_bus_cb is a function defined in libs/helpers.c !
	g_main_loop_run(loop);

	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(bus);

	return 0;
}
コード例 #8
0
ファイル: PlayBin.c プロジェクト: hejinyi/GStreamer
gint main(gint argc, gchar *argv[]) {
    GMainLoop *loop;
    GstElement *play;
    GstBus *bus;

    /* init GStreamer */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* make sure we have a URI */
    if (argc != 2) {
        g_print ("Usage: %s <URI>\n", argv[0]);
        return -1;
    }

    /* set up */
    play = gst_element_factory_make ("playbin", "play");
    g_object_set(G_OBJECT(play), "uri", argv[1], NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (play));
    gst_bus_add_watch (bus, my_bus_callback, loop);

    gst_object_unref (bus);

    gst_element_set_state (play, GST_STATE_PLAYING);

    /* now run */
    g_main_loop_run (loop);

    /* also clean up */
    gst_element_set_state (play, GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (play));

    return 0;
}
コード例 #9
0
ファイル: gstreamer.c プロジェクト: jCoderZ/m3player
void
gstreamer_init (GMainLoop *main_loop)
{
    GstBus *bus;

    gst_init (NULL, NULL);

    pipeline = gst_pipeline_new ("m3player-pipeline");

    source = gst_element_factory_make ("souphttpsrc", "source");
    filter = gst_element_factory_make ("mad", "filter");
    sink = gst_element_factory_make ("alsasink", "sink");

    gst_bin_add_many (GST_BIN (pipeline), source, filter, sink, NULL);

    if (!gst_element_link_many (source, filter, sink, NULL)) {
        g_warning ("Failed to link elements!");
    }

    // set up
    //pipeline = gst_element_factory_make ("playbin2", "play");
    //g_debug("pipeline=%ld", (long int) pipeline):


    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    g_debug("bus=%ld", (long int) bus);
    gst_bus_add_watch (bus, bus_call, main_loop);
    gst_object_unref (bus);

    //DISABLED: g_timeout_add (500, (GSourceFunc) cb_print_position, pipeline);

    mime_types = gstreamer_find_mime_types ();
}
コード例 #10
0
ファイル: tageditor.c プロジェクト: MrsZTP/rhythmcat
static gboolean rc_plugin_tag_reader_init()
{
    GstElement *audio_fakesink = NULL;
    GstElement *video_fakesink = NULL;
    GstBus *bus;
    tag_reader_bin = gst_element_factory_make("playbin2", NULL);
    if(tag_reader_bin==NULL)
        tag_reader_bin = gst_element_factory_make("playbin", NULL);
    if(tag_reader_bin==NULL) return FALSE;
    audio_fakesink = gst_element_factory_make("fakesink", NULL);
    video_fakesink = gst_element_factory_make("fakesink", NULL);
    if(audio_fakesink==NULL || video_fakesink==NULL)
    {
        if(audio_fakesink!=NULL) gst_object_unref(audio_fakesink);
        if(video_fakesink!=NULL) gst_object_unref(video_fakesink);
        if(tag_reader_bin!=NULL) gst_object_unref(tag_reader_bin);
        tag_reader_bin = NULL;
    }
    g_object_set(G_OBJECT(video_fakesink), "sync", TRUE, NULL);
    g_object_set(G_OBJECT(audio_fakesink), "sync", TRUE, NULL);
    g_object_set(G_OBJECT(tag_reader_bin), "video-sink", video_fakesink, 
        "audio-sink", audio_fakesink, NULL);
    tag_reader_sink_pad = gst_element_get_static_pad(audio_fakesink, "sink");
    bus = gst_pipeline_get_bus(GST_PIPELINE(tag_reader_bin));
    gst_bus_add_watch(bus, (GstBusFunc)rc_plugin_tag_reader_bus_handler,
        NULL);
    gst_object_unref(bus);
    gst_element_set_state(tag_reader_bin, GST_STATE_NULL);
    gst_element_set_state(tag_reader_bin, GST_STATE_READY);
    return TRUE;
}
コード例 #11
0
///////////////////////////////////////////////////////////
// virtual
bool LLMediaImplGStreamer::navigateTo (const std::string urlIn)
{
	LL_DEBUGS("MediaImpl") << "Setting media URI: " << urlIn.c_str()
	    << LL_ENDL;

	if (mPump == NULL || mPlaybin == NULL)
	{
		return false;
	}

	setStatus( LLMediaBase::STATUS_NAVIGATING );

	// set URI
	g_object_set (G_OBJECT (mPlaybin), "uri", urlIn.c_str(), NULL);

	// get playbin's bus - perhaps this can/should be done in ctor
	GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (mPlaybin));
	if (!bus)
	{
		return false;
	}
	gst_bus_add_watch (bus, bus_callback, this);
	gst_object_unref (bus);

	mState = GST_STATE_READY;

	return true;
}
コード例 #12
0
ファイル: player-bak.c プロジェクト: zhoujianchun/gst-pro
int main(int argc, char** argv)
{
    GMainLoop *loop;
    GstElement *play;

    gst_init(&argc, &argv);
	gtk_init(&argc, &argv);
	GtkWidget *window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
	gtk_widget_set_size_request(window, 200, 200);

    play = gst_element_factory_make("playbin2", "play");
	//play->set_properties("volume", 10);
    loop = g_main_loop_new(NULL, FALSE);

    g_object_set(G_OBJECT(play), "uri", argv[1], NULL);
    gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(play)), bus_cb, loop);
    g_print("playing......\n");
    gst_element_set_state(play, GST_STATE_PLAYING);
    g_print("start g_main_loop_run\n");
    g_print("g_main_loop_run return\n");
    gst_element_set_state(play, GST_STATE_NULL);
	gtk_widget_show_all(window);

	gtk_main();
    return 0;
}
コード例 #13
0
ファイル: streamer.c プロジェクト: mov-q/papaya
/* this is the first function where pipeline is built */
void 
setup_streamer(int narg, char *sarg[])
{
    /* the message bus */
    GstBus *bus;

    /* this init the global sources structures */
    init_global_sources();
    init_global_outputs();

    pipeline = gst_pipeline_new("pipeline");
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, cb_papaya_pipeline_bus, loop);
    gst_object_unref (bus);

    switch (STREAM_TYPE){
        case AUDIO_PLAYLIST:
        case AUDIO_LIVE:
            papaya_audio_tree_build();
            break;
        case VIDEO_PLAYLIST:
        case VIDEO_LIVE:
        case XORG_LIVE:
        case DVB_LIVE:
        case DV1394_LIVE:
            papaya_video_tree_build();
            break;
    }
    /* after having built the chain, let's
     * play some media */ 
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    pipeclock = gst_element_get_clock(pipeline);
}
コード例 #14
0
void StreamPipeline::Init(const Song& song) {
  pipeline_ = gst_pipeline_new("pipeline");
  GstElement* uridecodebin = CreateElement("uridecodebin", pipeline_);
  qLog(Debug) << "Streaming:" << song.url();
  g_object_set(
      G_OBJECT(uridecodebin), "uri", song.url().toString().toUtf8().constData(), NULL);
  g_signal_connect(
      G_OBJECT(uridecodebin), "pad-added", G_CALLBACK(NewPadCallback), this);

  GError* error = NULL;
  convert_bin_ = gst_parse_bin_from_description(kPipeline, TRUE, &error);
  gst_bin_add(GST_BIN(pipeline_), convert_bin_);

  gst_element_set_state(uridecodebin, GST_STATE_PLAYING);

  app_sink_ = CreateElement("appsink", pipeline_);
  g_object_set(G_OBJECT(app_sink_), "emit-signals", TRUE, NULL);
  g_signal_connect(
      G_OBJECT(app_sink_), "new-buffer", G_CALLBACK(NewBufferCallback), this);

  qLog(Debug) << "Linking appsink:" << gst_element_link(convert_bin_, app_sink_);

  gst_bus_set_sync_handler(
      gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this);
  bus_cb_id_ = gst_bus_add_watch(
      gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this);
}
コード例 #15
0
ファイル: main.c プロジェクト: apeny/antkillerfarm_crazy
void media_init()
{
  GstBus *bus;

  gst_init (NULL, NULL);
  //gst_debug_set_default_threshold(GST_LEVEL_MEMDUMP);

#if (TRANS_TYPE == TRANS_TYPE_TCP)
  gst_pipeline_tcp_init();
#else
  gst_pipeline_rtp_init();
#endif
  
  int i;
  for (i = 0; i < SERVER_LIST_NUM; i++)
    {
#if (TRANS_TYPE == TRANS_TYPE_TCP)
      add_server_to_pipeline_tcp(control_service_data[i].server_ip);
#else
      add_server_to_pipeline_rtp(control_service_data[i].server_ip);
#endif
    }
  
  bus = gst_element_get_bus (gst_data.playbin);
  gst_bus_add_watch (bus, bus_call, NULL);
  g_object_unref (bus);
}
コード例 #16
0
ファイル: Graphtest.c プロジェクト: fusonmb/Spectrum-Analyzer
int
spectrum_run (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *audioconvert, *spectrum, *sink;
  GstBus *bus;
  GstCaps *caps;
  GMainLoop *loop;

  gst_init (&argc, &argv);

//	g_print("Enter Upper Frequency Bound:");
//	scanf("%d" , &inputfreq);
//	AUDIOFREQ = (inputfreq * 2);
//	g_print("Enter Number of Frequncy Bands:");
//	scanf("%d" , &spect_bands);

  bin = gst_pipeline_new ("bin");

  src = gst_element_factory_make ("audiotestsrc", "src");
  g_object_set (G_OBJECT (src), "wave", 0, "freq", 6000.0, NULL);
  audioconvert = gst_element_factory_make ("audioconvert", NULL);
  g_assert (audioconvert);

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "message", TRUE, "message-phase", TRUE, NULL);

  sink = gst_element_factory_make ("fakesink", "sink");
  g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);

  gst_bin_add_many (GST_BIN (bin), src, audioconvert, spectrum, sink, NULL);

  caps = gst_caps_new_simple ("audio/x-raw-int",
      "rate", G_TYPE_INT, AUDIOFREQ, NULL);

  if (!gst_element_link (src, audioconvert) ||
      !gst_element_link_filtered (audioconvert, spectrum, caps) ||
      !gst_element_link (spectrum, sink)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }
  gst_caps_unref (caps);

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  gst_element_set_state (bin, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);
	
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (bin);
	
  return 0;
}
コード例 #17
0
int
main (int argc, char *argv[])
{
  GstBus *bus;
  gint watch_id;

  if (argc != 2) {
    g_print ("usage: giosrc-mounting URI\n");
    return -1;
  }

  gst_init (NULL, NULL);
  gtk_init (NULL, NULL);

  pipeline = gst_element_factory_make ("playbin", NULL);
  g_assert (pipeline);
  g_object_set (G_OBJECT (pipeline), "uri", argv[1], NULL);

  bus = gst_element_get_bus (pipeline);
  watch_id = gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  gtk_main ();

  g_source_remove (watch_id);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
コード例 #18
0
	AudioAbstracterPrivate () {
		//pipeline = gst_pipeline_new ("mmls-pipeline");
		
		decoder = gst_element_factory_make ("playbin2", "decoder-bin");
		//sink = gst_element_factory_make ("alsasink", "audio-output");
		
		if (! decoder) {
			g_printerr ("GStreamer abstraction: Some elements could not be created.\n");
		}
		
		//gst_bin_add_many (GST_BIN (pipeline), decoder, sink, NULL);
		
		bus = gst_pipeline_get_bus (GST_PIPELINE (decoder));
		
		// Only play audio
		unsigned int flags = (1 << 1);
		
		g_object_set (G_OBJECT (decoder), "flags", flags, NULL);
		//g_object_set (G_OBJECT (decoder), "audio-sink", sink, NULL);
		
		bcd.element = decoder;
		
		gst_bus_add_watch (bus, bus_call, &bcd);
		gst_object_unref (bus);
	}
コード例 #19
0
ファイル: main.c プロジェクト: wwplaygh/gejengel
int main(int argc, char* argv[])
{
    gst_init(&argc, &argv);

    if (argc < 2)
    {
        printf("Usage: %s filename\n", argv[0]);
        return -1;
    }

    filename = argv[1];
    mainloop = g_main_loop_new(NULL, FALSE);

    playBin = gst_element_factory_make ("playbin2", "player");
    g_assert(playBin);
    bus = gst_element_get_bus(playBin);
    g_assert(bus);

    gst_bus_add_watch(bus, onBusMessage, NULL);

    filename = g_filename_to_uri(argv[1], NULL, NULL);
    g_assert(filename);
    
    g_object_set(G_OBJECT (playBin), "uri", filename, NULL);

    setState(GST_STATE_PLAYING);

    g_main_loop_run(mainloop);

    return 0;
}
コード例 #20
0
ファイル: test-utils.c プロジェクト: cfoch/ges
gboolean
play_timeline (GESTimeline * timeline)
{
  GstBus *bus;
  GESPipeline *pipeline;
  GMainLoop *loop = g_main_loop_new (NULL, FALSE);

  ges_timeline_commit (timeline);
  pipeline = ges_pipeline_new ();

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, (GstBusFunc) my_bus_callback, loop);
  gst_object_unref (bus);

  ges_pipeline_set_timeline (pipeline, timeline);
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  gst_element_get_state (GST_ELEMENT (pipeline), NULL, NULL, -1);

  g_main_loop_run (loop);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  gst_element_get_state (GST_ELEMENT (pipeline), NULL, NULL, -1);

  gst_object_unref (pipeline);

  return TRUE;
}
コード例 #21
0
ファイル: gstapi.cpp プロジェクト: PeterXu/gst-mobile
bool CGstPlayback::Init()
{
    returnv_assert(!m_playbin, true);

    g_print("%s, begin", __func__);

    m_playbin = gst_element_factory_make ("playbin", "playbin");
    returnb_assert(m_playbin);

    GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE(m_playbin));
    returnb_assert(bus);
    gst_bus_add_watch (bus, (GstBusFunc)handle_message, this);
    gst_object_unref (bus);

    //m_audio_sink = gst_element_factory_make("autoaudiosink", NULL);
    //returnb_assert(m_audio_sink);
    //g_object_set (GST_OBJECT(m_playbin), "audio-sink", m_audio_sink, NULL);

    //m_video_sink = gst_element_factory_make("eglglessink", NULL);
    //returnb_assert(m_video_sink);
    //g_object_set (GST_OBJECT(m_playbin), "video-sink", m_video_sink, NULL);

    g_print("%s, end", __func__);

    return true;
}
コード例 #22
0
ファイル: helloworld.cpp プロジェクト: fei1700/apptest
int MusicPlayer::gstinit(void)
{
	g_print("gst ");
//		get_bitrate("/mmc/media/zhou.avi");
//	MediaInfo_New();
		return 0;
  	gst_init (NULL,NULL);
	g_print("init ");

  	GstElement *audiosink, *aibin, *videosink, *auconv;
	GstPad *aupad, *vipad;
	GstBus *bus;

	m_playbin = gst_element_factory_make("zplaybin","playbin");
	if(m_playbin==NULL)
	{
		g_error("Could't create 'playbin' element\n");
		return -1;
	}

//print time
	g_timeout_add(100,(GSourceFunc)print_position,GST_PIPELINE(m_playbin));
//	g_timeout_add(200,(GSourceFunc)check_drop_frame,GST_PIPELINE(m_playbin));

	bus=gst_pipeline_get_bus(GST_PIPELINE(m_playbin));
	gst_bus_add_watch(bus,playbin_bus_callback,NULL);
	gst_object_unref(bus);
        g_print("all initialized!\n");

}
コード例 #23
0
static int load_elements (char* pURL)
{
    guint frame_delay = 0;

    /* Create gstreamer elements */
    pipeline = gst_pipeline_new ("Mplayer-Standalone");

    source  = gst_element_factory_make_or_warn (MPLAYER_SOURCE, NULL);
    decoder = gst_element_factory_make_or_warn (MPLAYER_DECODER, NULL);
    v_sink  = gst_element_factory_make_or_warn (MPLAYER_V_SINK, NULL);
    a_conv  = gst_element_factory_make_or_warn (MPLAYER_A_CONV, NULL);
    a_proc  = gst_element_factory_make_or_warn (MPLAYER_A_PROC, NULL);
    a_sink  = gst_element_factory_make_or_warn (MPLAYER_A_SINK, NULL);

    /* Set the source path to get the stream from */
    load_url (pURL);

    /* Add Elements to the pipelines */
    gst_bin_add_many (GST_BIN(pipeline), source, decoder, v_sink, a_conv, a_proc, a_sink, NULL);

    /* we link the elements together */
    gst_element_link (source, decoder);
    gst_element_link (a_conv, a_proc);
    gst_element_link (a_proc, a_sink);

    //listening for End Of Stream (EOS) events, etc.
    GstBus* bin_bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bin_bus, bus_call, NULL);
    gst_object_unref(bin_bus);

    //will try to connect demux with viddec and audio on the fly
    g_signal_connect (decoder, "new-decoded-pad", G_CALLBACK (on_pad_added), NULL);
    g_signal_connect (decoder, "element-added", G_CALLBACK (on_auto_element_added),  NULL);
    return 0;
}
コード例 #24
0
ファイル: main.c プロジェクト: joojler/jrepo
gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline, *pl2, *appsrc, *conv, *videosink, *h264parse;
  GError *error = NULL;
  CustomData data;


  memset (&data, 0, sizeof (CustomData));
  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  //pl2 = gst_parse_launch ("videotestsrc ! video/x-raw,width=352,height=255 ! xvimagesink", &error);

  /* setup pipeline */
  pipeline = gst_pipeline_new ("pipeline");
  appsrc = gst_element_factory_make ("appsrc", "source");
  h264parse = gst_element_factory_make ("h264parse", "parse");
  conv = gst_element_factory_make ("videoconvert", "conv");
  videosink = gst_element_factory_make ("xvimagesink", "videosink");

  /* setup */
  g_object_set (G_OBJECT (appsrc), "caps",
  		gst_caps_new_simple ("video/x-raw",
				     "format", G_TYPE_STRING, "RGB16",
				     "width", G_TYPE_INT, 384,
				     "height", G_TYPE_INT, 288,
				     "framerate", GST_TYPE_FRACTION, 10, 1,
					 "block", G_TYPE_BOOLEAN, TRUE,
					 "max-bytes", G_TYPE_INT, 2*1024*1024,
				     NULL), NULL);
  gst_bin_add_many (GST_BIN (pipeline), appsrc, h264parse, conv, videosink, NULL);
  gst_element_link_many (appsrc, h264parse, conv, videosink, NULL);

  /* setup appsrc */
  g_object_set (G_OBJECT (appsrc),
		"stream-type", 0,
		"format", GST_FORMAT_TIME, NULL);
  g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);
//  g_signal_connect (appsrc, "need-data", G_CALLBACK (start_feed), &data);
//  g_signal_connect (appsrc, "enough-data", G_CALLBACK (stop_feed), &data);

  /* play */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
//  gst_element_set_state (pl2, GST_STATE_PLAYING);
	gst_bus_add_watch ((GST_ELEMENT_BUS (pipeline)),
					my_bus_callback, 
					loop);
  g_main_loop_run (loop);

  /* clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  g_main_loop_unref (loop);

  return 0;
  }
コード例 #25
0
/*
 * Runs the RTP pipeline.
 * @param p Pointer to the RTP pipeline.
 */
static void
rtp_pipeline_run (rtp_pipeline * p)
{
  GstFlowReturn flow_ret;
  GMainLoop *mainloop = NULL;
  GstBus *bus;
  gint i, j;

  /* Check parameters. */
  if (p == NULL) {
    return;
  }

  /* Create mainloop. */
  mainloop = g_main_loop_new (NULL, FALSE);
  if (!mainloop) {
    return;
  }

  /* Add bus callback. */
  bus = gst_pipeline_get_bus (GST_PIPELINE (p->pipeline));

  gst_bus_add_watch (bus, rtp_bus_callback, (gpointer) mainloop);
  gst_object_unref (bus);

  /* Set pipeline to PLAYING. */
  gst_element_set_state (p->pipeline, GST_STATE_PLAYING);

  /* Push data into the pipeline */
  for (i = 0; i < LOOP_COUNT; i++) {
    const guint8 *data = p->frame_data;

    for (j = 0; j < p->frame_count; j++) {
      GstBuffer *buf;

      buf =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          (guint8 *) data, p->frame_data_size, 0, p->frame_data_size, NULL,
          NULL);

      g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
      fail_unless_equals_int (flow_ret, GST_FLOW_OK);
      data += p->frame_data_size;

      gst_buffer_unref (buf);
    }
  }

  g_signal_emit_by_name (p->appsrc, "end-of-stream", &flow_ret);

  /* Run mainloop. */
  g_main_loop_run (mainloop);

  /* Set pipeline to NULL. */
  gst_element_set_state (p->pipeline, GST_STATE_NULL);

  /* Release mainloop. */
  g_main_loop_unref (mainloop);
}
コード例 #26
0
static gboolean create_pipeline(SpiceGstDecoder *decoder)
{
    gchar *desc;
    gboolean auto_enabled;
    guint opt;
    GstAppSinkCallbacks appsink_cbs = { NULL };
    GError *err = NULL;
    GstBus *bus;

    auto_enabled = (g_getenv("SPICE_GSTVIDEO_AUTO") != NULL);
    if (auto_enabled || !VALID_VIDEO_CODEC_TYPE(decoder->base.codec_type)) {
        SPICE_DEBUG("Trying %s for codec type %d %s",
                    gst_opts[0].dec_name, decoder->base.codec_type,
                    (auto_enabled) ? "(SPICE_GSTVIDEO_AUTO is set)" : "");
        opt = 0;
    } else {
        opt = decoder->base.codec_type;
    }

    /* - We schedule the frame display ourselves so set sync=false on appsink
     *   so the pipeline decodes them as fast as possible. This will also
     *   minimize the risk of frames getting lost when we rebuild the
     *   pipeline.
     * - Set max-bytes=0 on appsrc so it does not drop frames that may be
     *   needed by those that follow.
     */
    desc = g_strdup_printf("appsrc name=src is-live=true format=time max-bytes=0 block=true "
                           "%s ! %s ! videoconvert ! appsink name=sink "
                           "caps=video/x-raw,format=BGRx sync=false drop=false",
                           gst_opts[opt].dec_caps, gst_opts[opt].dec_name);
    SPICE_DEBUG("GStreamer pipeline: %s", desc);

    decoder->pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &err);
    g_free(desc);
    if (!decoder->pipeline) {
        spice_warning("GStreamer error: %s", err->message);
        g_clear_error(&err);
        return FALSE;
    }

    decoder->appsrc = GST_APP_SRC(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "src"));
    decoder->appsink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "sink"));

    appsink_cbs.new_sample = new_sample;
    gst_app_sink_set_callbacks(decoder->appsink, &appsink_cbs, decoder, NULL);
    bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipeline));
    gst_bus_add_watch(bus, handle_pipeline_message, decoder);
    gst_object_unref(bus);

    decoder->clock = gst_pipeline_get_clock(GST_PIPELINE(decoder->pipeline));

    if (gst_element_set_state(decoder->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        SPICE_DEBUG("GStreamer error: Unable to set the pipeline to the playing state.");
        free_pipeline(decoder);
        return FALSE;
    }

    return TRUE;
}
コード例 #27
0
bool
MediaPluginGStreamer010::load()
{
	if (!mDoneInit)
		return false; // error

	setStatus(STATUS_LOADING);

	DEBUGMSG("setting up media...");

	mIsLooping = false;
	mVolume = (float) 0.1234567; // minor hack to force an initial volume update

	// Create a pumpable main-loop for this media
	mPump = g_main_loop_new (NULL, FALSE);
	if (!mPump)
	{
		setStatus(STATUS_ERROR);
		return false; // error
	}

	// instantiate a playbin element to do the hard work
	mPlaybin = gst_element_factory_make ("playbin", "play");
	if (!mPlaybin)
	{
		setStatus(STATUS_ERROR);
		return false; // error
	}

	// get playbin's bus
	GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (mPlaybin));
	if (!bus)
	{
		setStatus(STATUS_ERROR);
		return false; // error
	}
	mBusWatchID = gst_bus_add_watch (bus,
					   llmediaimplgstreamer_bus_callback,
					   this);
	gst_object_unref (bus);

	if (NULL == getenv("LL_GSTREAMER_EXTERNAL")) {
		// instantiate a custom video sink
		mVideoSink =
			GST_SLVIDEO(gst_element_factory_make ("private-slvideo", "slvideo"));
		if (!mVideoSink)
		{
			WARNMSG("Could not instantiate private-slvideo element.");
			// todo: cleanup.
			setStatus(STATUS_ERROR);
			return false; // error
		}

		// connect the pieces
		g_object_set(mPlaybin, "video-sink", mVideoSink, NULL);
	}

	return true;
}
コード例 #28
0
ファイル: camerabin.c プロジェクト: PeterXu/gst-mobile
static void
setup (void)
{
  GstTagSetter *setter;
  gchar *desc_str;
  GstCaps *filter_caps;
  GstBus *bus;
  gint i;

  GST_INFO ("init");

  main_loop = g_main_loop_new (NULL, TRUE);

  camera = gst_check_setup_element ("camerabin");

  setup_camerabin_elements (camera);

  g_signal_connect (camera, "image-done", G_CALLBACK (capture_done), main_loop);

  bus = gst_pipeline_get_bus (GST_PIPELINE (camera));
  gst_bus_add_watch (bus, (GstBusFunc) capture_bus_cb, main_loop);
  gst_bus_set_sync_handler (bus, bus_sync_callback, main_loop);
  gst_object_unref (bus);

  filter_caps = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420");
  g_object_set (G_OBJECT (camera), "filter-caps", filter_caps, NULL);
  gst_caps_unref (filter_caps);

  /* force a low framerate here to not timeout the tests because of the
   * encoders */
  g_signal_emit_by_name (camera, "set-video-resolution-fps", 320, 240, 5, 1,
      NULL);

  /* Set some default tags */
  setter = GST_TAG_SETTER (camera);
  desc_str = g_strdup_printf ("Created by %s", g_get_real_name ());

  gst_tag_setter_add_tags (setter, GST_TAG_MERGE_REPLACE,
      GST_TAG_DESCRIPTION, desc_str, NULL);
  g_free (desc_str);

  if (gst_element_set_state (GST_ELEMENT (camera), GST_STATE_PLAYING) ==
      GST_STATE_CHANGE_FAILURE) {
    GST_WARNING ("setting camerabin to PLAYING failed");
    gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
    gst_object_unref (camera);
    camera = NULL;
  }

  /* create the taglists */
  for (i = 0; i < TAGLISTS_COUNT; i++) {
    taglists[i] = gst_tag_list_new_full (GST_TAG_ARTIST, "test-artist",
        GST_TAG_GEO_LOCATION_LONGITUDE, g_random_double_range (-180, 180),
        GST_TAG_GEO_LOCATION_LATITUDE, g_random_double_range (-90, 90),
        GST_TAG_GEO_LOCATION_ELEVATION, g_random_double_range (0, 3000), NULL);
  }

  GST_INFO ("init finished");
}
コード例 #29
0
int bus_watcher( Gst* gst )
{
		/* Add a message handler */
		gst->bus = gst_pipeline_get_bus( GST_PIPELINE( gst->pipeline) );
		gst->bus_watch_id = gst_bus_add_watch( gst->bus, my_bus_callback, gst );
		gst_object_unref( gst->bus );
		return 0;
}
コード例 #30
0
gboolean GstPipeliner::push_thread_context(gpointer user_data) {
  auto context = static_cast<GstPipeliner*>(user_data);
  g_main_context_push_thread_default(context->main_loop_->get_main_context());

  gst_bus_add_watch(
      gst_pipeline_get_bus(GST_PIPELINE(context->get_pipeline())), bus_watch, user_data);
  return FALSE;
}