Beispiel #1
0
	void RgAnalyser::HandleErrorMsg (GstMessage *msg)
	{
		GError *gerror = nullptr;
		gchar *debug = nullptr;
		gst_message_parse_error (msg, &gerror, &debug);

		const auto& msgStr = QString::fromUtf8 (gerror->message);
		const auto& debugStr = QString::fromUtf8 (debug);

		const auto code = gerror->code;
		const auto domain = gerror->domain;

		g_error_free (gerror);
		g_free (debug);

		qWarning () << Q_FUNC_INFO
				<< domain
				<< code
				<< msgStr
				<< debugStr;


		if (IsDraining_)
			return;

		IsDraining_ = true;
		const auto bus = gst_pipeline_get_bus (GST_PIPELINE (Pipeline_));
		while (const auto msg = gst_bus_timed_pop (bus, 0.01 * GST_SECOND))
			handleMessage (std::shared_ptr<GstMessage> (msg, gst_message_unref));
		IsDraining_ = false;

		gst_element_set_state (Pipeline_, GST_STATE_NULL);
		PopThread_->Resume ();

		const auto trackInfoPos = std::find_if (Result_.Tracks_.begin (), Result_.Tracks_.end (),
				[this] (const TrackRgResult& info) { return info.TrackPath_ == CurrentPath_; });
		if (trackInfoPos == Result_.Tracks_.end ())
			Result_.Tracks_.append ({ CurrentPath_, 0, 0 });

		CheckFinish ();
	}
GST_END_TEST
GST_START_TEST (request_audio_src_pad_pending)
{
  GstElement *dummysrc;
  gchar *padname = NULL;
  GstBus *bus;

  loop = g_main_loop_new (NULL, TRUE);
  pipeline = gst_pipeline_new (__FUNCTION__);
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  dummysrc = gst_element_factory_make ("dummysrc", NULL);
  g_signal_connect (dummysrc, "pad-added", G_CALLBACK (pad_added_delayed),
      &padname);

  gst_bin_add (GST_BIN (pipeline), dummysrc);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* request src pad using action */
  g_signal_emit_by_name (dummysrc, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &padname);
  fail_if (padname == NULL);

  GST_DEBUG ("Pad name %s", padname);
  g_object_set (G_OBJECT (dummysrc), "audio", TRUE, NULL);

  g_free (padname);

  g_timeout_add_seconds (4, print_timedout_pipeline, NULL);
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_object_unref (pipeline);
  g_main_loop_unref (loop);
}
Beispiel #3
0
gint
main (gint argc, gchar * argv[])
{
  GstStateChangeReturn res;
  GstElement *player;
  GMainLoop *loop;
  GstBus *bus;

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, TRUE);

  player = gst_element_factory_make ("playbin", "player");
  g_assert (player);

  bus = gst_pipeline_get_bus (GST_PIPELINE (player));
  gst_bus_add_signal_watch (bus);

  g_signal_connect (bus, "message::eos", G_CALLBACK (eos_cb), loop);
  g_signal_connect (bus, "message::error", G_CALLBACK (error_cb), loop);
  g_signal_connect (bus, "message::warning", G_CALLBACK (warning_cb), NULL);

  g_object_set (G_OBJECT (player), "uri", argv[1], NULL);

  res = gst_element_set_state (player, GST_STATE_PLAYING);
  if (res == GST_STATE_CHANGE_FAILURE) {
    g_print ("could not play\n");
    return -1;
  }

  g_timeout_add (UPDATE_INTERVAL, (GSourceFunc) update_scale, player);

  g_main_loop_run (loop);

  /* tidy up */
  gst_element_set_state (player, GST_STATE_NULL);
  gst_object_unref (player);
  gst_object_unref (bus);

  return 0;
}
Beispiel #4
0
int main(int argc, char** argv)
{
    GMainLoop *loop;
    GstElement *play;

    gst_init(&argc, &argv);

    play = gst_element_factory_make("playbin2", "play");
	//play->set_properties("volume", 10);
    loop = g_main_loop_new(NULL, FALSE);

    g_object_set(G_OBJECT(play), "uri", argv[1], NULL);
    gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(play)), bus_cb, loop);
    g_print("playing......\n");
    gst_element_set_state(play, GST_STATE_PLAYING);
    g_print("start g_main_loop_run\n");
    g_main_loop_run(loop);
    g_print("g_main_loop_run return\n");
    gst_element_set_state(play, GST_STATE_NULL);
    return 0;
}
void CrowdDetectorFilterImpl::postConstructor ()
{
  GstBus *bus;
  std::shared_ptr<MediaPipelineImpl> pipe;

  FilterImpl::postConstructor ();

  pipe = std::dynamic_pointer_cast<MediaPipelineImpl> (getMediaPipeline() );

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipe->getPipeline() ) );

  bus_handler_id = register_signal_handler (G_OBJECT (bus),
                   "message",
                   std::function <void (GstElement *, GstMessage *) >
                   (std::bind (&CrowdDetectorFilterImpl::busMessage, this,
                               std::placeholders::_2) ),
                   std::dynamic_pointer_cast<CrowdDetectorFilterImpl>
                   (shared_from_this() ) );

  g_object_unref (bus);
}
void MediaPlayerPrivate::createGSTPlayBin(String url)
{
    ASSERT(!m_playBin);
    m_playBin = gst_element_factory_make("playbin2", "play");

    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_playBin));
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus, "message", G_CALLBACK(mediaPlayerPrivateMessageCallback), this);
    gst_object_unref(bus);

    g_object_set(G_OBJECT(m_playBin), "uri", url.utf8().data(), NULL);

    m_videoSink = webkit_video_sink_new();

    g_object_ref_sink(m_videoSink);
    g_object_set(m_playBin, "video-sink", m_videoSink, NULL);

    g_signal_connect(m_videoSink, "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);

    setVolume(m_volume);
}
Beispiel #7
0
void gstreamer_determine_video_dimensions(const char *uri, int *video_width,
int *video_height) {
	GMainLoop *loop = g_main_loop_new(NULL, FALSE);

	char *playbin_launch_str = malloc(strlen(uri) + 64);
	sprintf(playbin_launch_str, PLAYBIN_STR
		" uri=%s audio-sink=fakesink video-sink=fakesink", uri);
	GError *error2 = NULL;
	GstElement *playbin = gst_parse_launch(playbin_launch_str, &error2);
	if (error2) {
		printf("Error: Could not create gstreamer pipeline for identification.\n");
		printf("Parse error: %s\n", error2->message);
		exit(1);
	}

	playbin_pipeline = playbin;
	bus_quit_on_playing = TRUE;
	GstBus *playbin_bus = gst_pipeline_get_bus(GST_PIPELINE(playbin));
	guint type_find_bus_watch_id = gst_bus_add_watch(playbin_bus, bus_callback, loop);
	gst_object_unref(playbin_bus);

	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_READY);
	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PLAYING);
	g_main_loop_run(loop);
	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PAUSED);

	GstPad *pad = gst_pad_new("", GST_PAD_UNKNOWN);
	g_signal_emit_by_name(playbin, "get-video-pad", 0, &pad, NULL);
	GstCaps *caps = gst_pad_get_current_caps(pad);
	*video_width = g_value_get_int(gst_structure_get_value(
		gst_caps_get_structure(caps, 0), "width"));
	*video_height = g_value_get_int(gst_structure_get_value(
		gst_caps_get_structure(caps, 0), "height"));
	g_object_unref(pad);

	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(playbin));
	g_source_remove(type_find_bus_watch_id);
	g_main_loop_unref(loop);
}
Beispiel #8
0
static void
gst_play_file(const char *filename){
 
	GMainLoop *loop;
	GstElement *pipeline;
	GstBus *bus;
	GstElement *source , *parser , *sink;
 
	loop = g_main_loop_new(NULL , TRUE);
 
	pipeline = gst_pipeline_new("audio-player");
 
	source = gst_element_factory_make("filesrc" , "source");
	parser = gst_element_factory_make("wavparse" , "parser");
	sink = gst_element_factory_make("alsasink" , "output");
 
	g_object_set(G_OBJECT(source) , "location"
			, filename , NULL);
 
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	gst_bus_add_watch(bus , bus_watch , loop);
	g_object_unref(bus);
 
	gst_bin_add_many(GST_BIN(pipeline)
			, source , parser , sink , NULL);
 
	g_signal_connect(parser
			, "pad-added" , G_CALLBACK(add_pad) , sink);
 
	if(! gst_element_link(source , parser)){
		g_warning("linke source to parser failed");
	}
 
	gst_element_set_state(pipeline , GST_STATE_PLAYING);
	printf("Start playing...\n");
	g_main_loop_run(loop);
	printf("Playing stopped!!!\n");
	gst_element_set_state(pipeline , GST_STATE_NULL);
	g_object_unref(pipeline);
}
Beispiel #9
0
void
gst_inter_test_create_pipeline_vts (GstInterTest * intertest)
{
  GString *pipe_desc;
  GstElement *pipeline;
  GError *error = NULL;

  pipe_desc = g_string_new ("");

  g_string_append (pipe_desc, "videotestsrc name=source num-buffers=10000 ! ");
  g_string_append (pipe_desc,
      "video/x-raw-yuv,format=(fourcc)I420,width=320,height=240 ! ");
  g_string_append (pipe_desc, "timeoverlay ! ");
  g_string_append (pipe_desc, "intervideosink name=sink sync=true ");
  g_string_append (pipe_desc,
      "audiotestsrc samplesperbuffer=1600 num-buffers=100 ! ");
  g_string_append (pipe_desc, "interaudiosink ");

  if (verbose)
    g_print ("pipeline: %s\n", pipe_desc->str);

  pipeline = (GstElement *) gst_parse_launch (pipe_desc->str, &error);
  g_string_free (pipe_desc, FALSE);

  if (error) {
    g_print ("pipeline parsing error: %s\n", error->message);
    gst_object_unref (pipeline);
    return;
  }

  intertest->pipeline = pipeline;

  gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE);
  intertest->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (intertest->bus, gst_inter_test_handle_message, intertest);

  intertest->source_element =
      gst_bin_get_by_name (GST_BIN (pipeline), "source");
  intertest->sink_element = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
}
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstBus *bus;

  gst_init (&argc, &argv);

  if (argc < 2) {
    g_print ("usage: %s <uri>\n", argv[0]);
    return -1;
  }

  /* create a new bin to hold the elements */
  bin = gst_element_factory_make ("playbin", "bin");
  g_assert (bin);
  g_object_set (bin, "uri", argv[1], NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (bin));
  gst_bus_add_watch (bus, handle_message, bin);

  /* go to the PAUSED state and wait for preroll */
  g_message ("prerolling first frame");
  gst_element_set_state (bin, GST_STATE_PAUSED);
  gst_element_get_state (bin, NULL, NULL, -1);

  loop = g_main_loop_new (NULL, TRUE);
  g_main_loop_run (loop);

  g_message ("finished");

  /* stop the bin */
  gst_element_set_state (bin, GST_STATE_NULL);

  g_main_loop_unref (loop);
  gst_object_unref (bus);

  exit (0);
}
Beispiel #11
0
static int mp_play_sound(TPMediaPlayer * mp, const char * uri)
{
    GstElement * playbin = gst_element_factory_make( "playbin" , "play" );

    GstBus * bus = gst_pipeline_get_bus( GST_PIPELINE( playbin ) );

    g_object_set( G_OBJECT( playbin ), "uri", uri, NULL );

    gst_bus_add_signal_watch( bus );

    g_signal_connect_object( bus, "message::error" , G_CALLBACK( play_sound_done ), playbin, G_CONNECT_AFTER );
    g_signal_connect_object( bus, "message::eos", G_CALLBACK( play_sound_done ), playbin, G_CONNECT_AFTER );

    gst_object_unref( GST_OBJECT( bus ) );

    if ( GST_STATE_CHANGE_FAILURE == gst_element_set_state( playbin, GST_STATE_PLAYING ) )
    {
        return 2;
    }

    return 0;
}
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
    if (m_repaintHandler) {
        g_signal_handler_disconnect(m_videoSink.get(), m_repaintHandler);
        m_repaintHandler = 0;
    }

    g_mutex_clear(&m_sampleMutex);

    m_player = 0;

    if (m_volumeSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
        m_volumeSignalHandler = 0;
    }

    if (m_muteSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
        m_muteSignalHandler = 0;
    }

#if USE(GSTREAMER_GL)
    g_cond_clear(&m_drawCondition);
    g_mutex_clear(&m_drawMutex);
#endif

    if (m_pipeline) {
        GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
        ASSERT(bus);
        g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNeedContextMessageCallback), this);
        gst_bus_disable_sync_message_emission(bus.get());
        m_pipeline.clear();
    }

#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
    if (client())
        client()->platformLayerWillBeDestroyed();
#endif
}
Beispiel #13
0
eServiceMP3Record::~eServiceMP3Record()
{
	if (m_recording_pipeline)
	{
		// disconnect sync handler callback
		GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_recording_pipeline));
#if GST_VERSION_MAJOR < 1
		gst_bus_set_sync_handler(bus, NULL, NULL);
#else
		gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
#endif
		gst_object_unref(bus);
	}

	if (m_state > stateIdle)
		stop();

	if (m_recording_pipeline)
	{
		gst_object_unref(GST_OBJECT(m_recording_pipeline));
	}
}
Beispiel #14
0
gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* set up */
  pipeline = make_pipeline ();

  g_signal_connect (pipeline, "deep_notify",
      G_CALLBACK (gst_object_default_deep_notify), NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, my_bus_callback, loop);
  gst_object_unref (bus);

  g_print ("Starting pipeline\n");

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* add a timeout to cycle between the formats */
  g_timeout_add (1000, (GSourceFunc) do_switch, pipeline);

  /* now run */
  g_main_loop_run (loop);

  g_print ("Nulling pipeline\n");

  /* also clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
int
main (int argc, char *argv[])
{
  GError *error = NULL;
  GstBus *bus;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  builder = gtk_builder_new ();
  if (!gtk_builder_add_from_file (builder, UI_FILE, &error)) {
    g_warning ("Error: %s", error->message);
    g_error_free (error);
    return 1;
  }

  camera = gst_element_factory_make ("camerabin", "camera");
  bus = gst_pipeline_get_bus (GST_PIPELINE (camera));
  gst_bus_add_watch (bus, bus_callback, NULL);
  gst_bus_set_sync_handler (bus, bus_sync_callback, NULL, NULL);
  gst_object_unref (bus);

  if (!init_gtkwidgets_data ()) {
    goto error;
  }

  ui_main_window = GTK_WIDGET (gtk_builder_get_object (builder, "mainWindow"));
  gtk_builder_connect_signals (builder, NULL);
  gtk_widget_show_all (ui_main_window);

  gst_element_set_state (camera, GST_STATE_PLAYING);

  gtk_main ();

error:
  gst_element_set_state (camera, GST_STATE_NULL);
  gst_object_unref (camera);
  return 0;
}
Beispiel #16
0
//BUILDER COMMENT. DO NOT REMOVE. auxcode begin
void Music::init(const string newName, AL::ALPtr<AL::ALBroker> parentBroker) {
	Component::init(newName, parentBroker);

	#ifdef WEBOTS
	return;
	#endif

	// init GStreamer
	gst_init (NULL, NULL);
  	loop = g_main_loop_new (NULL, FALSE);

  	// set up
  	play = gst_element_factory_make ("playbin2", "play");
	bus = gst_pipeline_get_bus (GST_PIPELINE (play));

	// set state
	gst_element_set_state (play, GST_STATE_READY);

	isSetFileMp3 = false;
	isPlayPress = false;
	isStopPress = false;
}
GST_END_TEST
GST_START_TEST (add_later)
{
  GMainLoop *loop = g_main_loop_new (NULL, TRUE);
  GstElement *pipeline = gst_pipeline_new (__FUNCTION__);
  GstElement *videosrc = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *typefind = gst_element_factory_make ("typefind", NULL);
  GstElement *fakesink = gst_element_factory_make ("fakesink", "fakesink");
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  g_object_set (G_OBJECT (fakesink), "sync", FALSE, "signal-handoffs", TRUE,
      NULL);
  g_signal_connect (G_OBJECT (fakesink), "handoff",
      G_CALLBACK (fakesink_hand_off), loop);

  g_signal_connect (G_OBJECT (typefind), "have-type", G_CALLBACK (type_found),
      pipeline);

  gst_bin_add_many (GST_BIN (pipeline), videosrc, typefind, fakesink, NULL);
  gst_element_link (videosrc, typefind);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  g_timeout_add_seconds (10, timeout_check, pipeline);

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (pipeline);
  g_object_unref (bus);
  g_main_loop_unref (loop);
}
Beispiel #18
0
void Pipeline::create()
{
    qDebug("Loading video: %s", m_videoLocation.toAscii().data());

    gst_init (NULL, NULL);

#ifdef WIN32
    m_loop = g_main_loop_new (NULL, FALSE);
#endif
    m_pipeline = gst_pipeline_new ("pipeline");

    m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this);
    gst_bus_set_sync_handler (m_bus, (GstBusSyncHandler) create_window, this);
    gst_object_unref (m_bus);

    GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0");
    GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin0");
    m_glimagesink  = gst_element_factory_make ("glimagesink", "sink0");
    
    if (!videosrc || !decodebin || !m_glimagesink )
    {
        qDebug ("one element could not be found");
        return;
    }

    g_object_set(G_OBJECT(videosrc), "location", m_videoLocation.toAscii().data(), NULL);

    gst_bin_add_many (GST_BIN (m_pipeline), videosrc, decodebin, m_glimagesink, NULL);

    gst_element_link_pads (videosrc, "src", decodebin, "sink");

    g_signal_connect (decodebin, "new-decoded-pad", G_CALLBACK (cb_new_pad), m_glimagesink);

    GstPad* pad = gst_element_get_static_pad (m_glimagesink, "sink");
    g_signal_connect(pad, "notify::caps", G_CALLBACK(cb_video_size), this);
    gst_object_unref (pad);
}
static void
play_agnosticbin_video_passthrough (void)
{
  gboolean ret;
  GstElement *pipeline = gst_pipeline_new (NULL);
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  GstElement *agnosticbin = gst_element_factory_make ("agnosticbin", NULL);
  GstElement *videotestsrc = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *fakesink = gst_element_factory_make ("fakesink", NULL);

  loop = g_main_loop_new (NULL, TRUE);

  g_object_set (G_OBJECT (pipeline), "async-handling", TRUE, NULL);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  g_object_set (G_OBJECT (videotestsrc), "num-buffers", 100, NULL);

  mark_point ();
  gst_bin_add_many (GST_BIN (pipeline), videotestsrc, agnosticbin, fakesink,
      NULL);
  mark_point ();
  ret = gst_element_link_many (videotestsrc, agnosticbin, fakesink, NULL);
  fail_unless (ret);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_main_loop_unref (loop);
  g_object_unref (pipeline);
}
Beispiel #20
0
void		ly_ppl_init		()
{
	GstElement* playbin=NULL;
	GstElement* equalizer=NULL;
	GstElement* convert=NULL;
	GstElement* volume=NULL;
	GstElement* audiosink=NULL;
	GstElement* fakesink=NULL;
	GstPad*     mpad;
	GstBus* bus=NULL;

	playbin=gst_element_factory_make("playbin", "playbin");
	equalizer=gst_element_factory_make("equalizer-10bands","equalizer");
	volume= gst_element_factory_make("volume","volume");
	convert=gst_element_factory_make("audioconvert","autoconvert");
	audiosink=gst_element_factory_make("autoaudiosink","autoaudiosink");
	fakesink=gst_element_factory_make("fakesink","fakesink");
	
	bus=gst_pipeline_get_bus(GST_PIPELINE(playbin));
	gst_element_set_state(playbin,GST_STATE_NULL);
	gst_bus_add_watch(bus,(GstBusFunc)ly_ppl_bus_cb, NULL);
	gst_object_unref(bus);
	
	ly_ppl_audio_bin=gst_bin_new("audio-bin");
	gst_bin_add_many(GST_BIN(ly_ppl_audio_bin),equalizer,convert,volume,audiosink,NULL);
	gst_element_link_many(equalizer,convert,volume,audiosink,NULL);
	mpad = gst_element_get_static_pad(equalizer, "sink");
	gst_element_add_pad(ly_ppl_audio_bin, gst_ghost_pad_new(NULL,mpad));
	g_object_set(G_OBJECT(playbin),"audio-sink",ly_ppl_audio_bin,NULL);
	
	ly_ppl_video_bin=gst_bin_new("video-bin");
	gst_bin_add_many(GST_BIN(ly_ppl_video_bin), fakesink,NULL);
	mpad = gst_element_get_static_pad(fakesink, "sink");
	gst_element_add_pad(ly_ppl_video_bin, gst_ghost_pad_new(NULL,mpad));
	g_object_set(G_OBJECT(playbin),"video-sink",ly_ppl_video_bin,NULL);
	
	ly_ppl_playbin=playbin;
}
Beispiel #21
0
static void
start_source (const gchar * uri)
{
  GstElement *pipeline;
  GstElement *uridecodebin;
  GstBus *bus;

  pipeline = gst_pipeline_new (NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, master_bus_msg, pipeline);
  gst_object_unref (bus);

  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
  g_object_set (uridecodebin, "uri", uri, NULL);
  g_signal_connect (uridecodebin, "pad-added", G_CALLBACK (on_pad_added),
      pipeline);
  g_signal_connect (uridecodebin, "autoplug-select",
      G_CALLBACK (on_autoplug_select), pipeline);

  gst_bin_add (GST_BIN (pipeline), uridecodebin);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
void
mirageaudio_canceldecode(MirageAudio *ma)
{
    if (GST_IS_ELEMENT(ma->pipeline)) {

        GstState state;
        gst_element_get_state(ma->pipeline, &state, NULL, 100*GST_MSECOND);

        if (state != GST_STATE_NULL) {
            g_mutex_lock(ma->decoding_mutex);

            GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(ma->pipeline));
            GstMessage* eosmsg = gst_message_new_eos(GST_OBJECT(ma->pipeline));
            gst_bus_post(bus, eosmsg);
            g_print("libmirageaudio: EOS Message sent\n");
            gst_object_unref(bus);

            ma->invalidate = TRUE;

            g_mutex_unlock(ma->decoding_mutex);
        }
    }
}
void
ZBarFilter::init (std::shared_ptr<MediaPipeline> parent)
{
  element = gst_element_factory_make ("filterelement", NULL);

  g_object_set (element, "filter-factory", "zbar", NULL);
  g_object_ref (element);
  gst_bin_add (GST_BIN (parent->pipeline), element);
  gst_element_sync_state_with_parent (element);

  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (parent->pipeline) );
  GstElement *zbar;

  g_object_get (G_OBJECT (element), "filter", &zbar, NULL);

  this->zbar = zbar;
  g_object_set (G_OBJECT (zbar), "qos", FALSE, NULL);

  bus_handler_id = g_signal_connect (bus, "message", G_CALLBACK (zbar_receive_message), this);
  g_object_unref (bus);
  // There is no need to reference zbar becase its live cycle is the same as the filter live cycle
  g_object_unref (zbar);
}
static void
kms_player_end_point_dispose (GObject * object)
{
  KmsPlayerEndPoint *self = KMS_PLAYER_END_POINT (object);

  g_clear_object (&self->priv->loop);

  if (self->priv->pipeline != NULL) {
    GstBus *bus;

    bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->pipeline));
    gst_bus_set_sync_handler (bus, NULL, NULL, NULL);
    g_object_unref (bus);

    gst_element_set_state (self->priv->pipeline, GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (self->priv->pipeline));
    self->priv->pipeline = NULL;
  }

  /* clean up as possible. May be called multiple times */

  G_OBJECT_CLASS (kms_player_end_point_parent_class)->dispose (object);
}
Beispiel #25
0
void
gst_switchui_create_pipeline (GstSwitchUI * switchui)
{
  GString *pipe_desc;
  GstElement *pipeline;
  GError *error = NULL;

  pipe_desc = g_string_new ("");

  g_string_append (pipe_desc, "videotestsrc name=source num-buffers=100 ! ");
  g_string_append (pipe_desc, "timeoverlay ! ");
  g_string_append (pipe_desc, "xvimagesink name=sink ");
  g_string_append (pipe_desc,
      "audiotestsrc samplesperbuffer=1600 num-buffers=100 ! ");
  g_string_append (pipe_desc, "alsasink ");

  if (verbose)
    g_print ("pipeline: %s\n", pipe_desc->str);

  pipeline = (GstElement *) gst_parse_launch (pipe_desc->str, &error);
  g_string_free (pipe_desc, FALSE);

  if (error) {
    g_print ("pipeline parsing error: %s\n", error->message);
    gst_object_unref (pipeline);
    return;
  }

  switchui->pipeline = pipeline;

  gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE);
  switchui->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (switchui->bus, gst_switchui_handle_message, switchui);

  switchui->source_element = gst_bin_get_by_name (GST_BIN (pipeline), "source");
  switchui->sink_element = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
}
Beispiel #26
0
int main(int argc, char **argv)
{
	GstElement *pipeline;
	GstElement *src;
	GstElement *decoder;
	GstElement *sink;
	int fi = 0;

	if(argc < 2) {
		g_printf("Usage: %s <file>\n", argv[0]);
		exit(1);
	}

	gst_init(NULL, NULL);

	pipeline = gst_pipeline_new("pipeline");
	src = gst_element_factory_make("filesrc", "src");
	decoder = gst_element_factory_make("dtdrdec", "decoder");
	sink = gst_element_factory_make("alsasink", "sink");

	gst_bin_add_many(GST_BIN(pipeline), src, decoder, sink, NULL);
	gst_element_link_many(src, decoder, sink, NULL);

	g_object_set(src, "location", argv[1], NULL);
	gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline)), 
		bus_callback, NULL);

	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	loop = g_main_loop_new(NULL, FALSE);
	g_main_loop_run(loop);

	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);

	exit(0);
}
Beispiel #27
0
/*** block c  from ../../../docs/manual/highlevel-components.xml ***/
gint
main (gint   argc,
      gchar *argv[])
{
  GMainLoop *loop;
  GstElement *play;
  GstBus *bus;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* make sure we have a URI */
  if (argc != 2) {
    g_print ("Usage: %s <URI>\n", argv[0]);
    return -1;
  }

  /* set up */
  play = gst_element_factory_make ("playbin", "play");
  g_object_set (G_OBJECT (play), "uri", argv[1], NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (play));
  gst_bus_add_watch (bus, my_bus_callback, loop);
  gst_object_unref (bus);

  gst_element_set_state (play, GST_STATE_PLAYING);

  /* now run */
  g_main_loop_run (loop);

  /* also clean up */
  gst_element_set_state (play, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (play));

  return 0;
}
Beispiel #28
0
AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback, float sampleRate)
    : m_callback(callback)
    , m_renderBus(2, framesToPull, false)
    , m_sampleRate(sampleRate)
    , m_isPlaying(false)
{
    m_pipeline = gst_pipeline_new("play");
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
    ASSERT(bus);
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus, "message", G_CALLBACK(messageCallback), this);
    gst_object_unref(bus);

    GstElement* webkitAudioSrc = reinterpret_cast<GstElement*>(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC,
                                                                            "rate", sampleRate,
                                                                            "bus", &m_renderBus,
                                                                            "provider", &m_callback,
                                                                            "frames", framesToPull, NULL));

    GstElement* wavParser = gst_element_factory_make("wavparse", 0);

    m_wavParserAvailable = wavParser;
    ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element");
    if (!m_wavParserAvailable)
        return;

#ifndef GST_API_VERSION_1
    g_signal_connect(wavParser, "pad-added", G_CALLBACK(onGStreamerWavparsePadAddedCallback), this);
#endif
    gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL);
    gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING);

#ifdef GST_API_VERSION_1
    GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src"));
    finishBuildingPipelineAfterWavParserPadReady(srcPad.get());
#endif
}
void Pipeline::create()
{
    qDebug("Loading video: %s", m_videoLocation.toLatin1().data());

    gst_init (NULL, NULL);

#ifdef WIN32
    m_loop = g_main_loop_new (NULL, FALSE);
#endif
    m_pipeline = gst_pipeline_new ("pipeline");

    m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this);
    gst_bus_set_sync_handler (m_bus, (GstBusSyncHandler) create_window, this, NULL);
    gst_object_unref (m_bus);

    GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0");
    GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin0");
    m_glimagesink  = gst_element_factory_make ("glimagesink", "sink0");
    
    if (!videosrc || !decodebin || !m_glimagesink )
    {
        qDebug ("one element could not be found");
        return;
    }

    g_object_set(G_OBJECT(videosrc), "num-buffers", 800, NULL);
    g_object_set(G_OBJECT(videosrc), "location", m_videoLocation.toLatin1().data(), NULL);
    g_signal_connect(G_OBJECT(m_glimagesink), "client-reshape", G_CALLBACK (reshapeCallback), NULL);
    g_signal_connect(G_OBJECT(m_glimagesink), "client-draw", G_CALLBACK (drawCallback), NULL);

    gst_bin_add_many (GST_BIN (m_pipeline), videosrc, decodebin, m_glimagesink, NULL);

    gst_element_link_pads (videosrc, "src", decodebin, "sink");

    g_signal_connect (decodebin, "pad-added", G_CALLBACK (cb_new_pad), this);
}
static void
run_pipeline (GstElement * pipeline)
{
  GstMessage *msg;
  GstBus *bus;

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_element_set_state (pipeline, GST_STATE_PAUSED);
  gst_element_get_state (pipeline, NULL, NULL, -1);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  while (1) {
    msg =
        gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
        GST_MESSAGE_EOS | GST_MESSAGE_ERROR | GST_MESSAGE_ELEMENT);

    fail_unless (msg != NULL);
    if (msg) {
      if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ELEMENT) {
        if (gst_message_has_name (msg, "GstMultiFileSink"))
          mfs_messages = g_list_append (mfs_messages, msg);
        else
          gst_message_unref (msg);

        continue;
      }

      fail_unless (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS);
      gst_message_unref (msg);
    }
    break;
  }

  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
}