int lapsi_backend_init(CustomData *data,int* argc,char*** argv) { GstBus *bus; /* Initialize our data structure */ memset (data, 0, sizeof (data)); data->duration = GST_CLOCK_TIME_NONE; /* Create the elements */ data->playbin = gst_element_factory_make ("playbin", "playbin"); if (!data->playbin) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Connect to interesting signals in playbin */ bus = gst_element_get_bus (data->playbin); gst_bus_add_signal_watch (bus); /* Create the GUI */ create_ui (data); /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ gst_bus_set_sync_handler(bus,(GstBusSyncHandler)busSyncHandler,NULL,NULL); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data); g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, data); g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb,data); return 0; }
static void event_loop (GstElement * pipe) { GstBus *bus; GstMessage *message = NULL; bus = gst_element_get_bus (GST_ELEMENT (pipe)); while (TRUE) { message = gst_bus_poll (bus, GST_MESSAGE_ANY, -1); g_assert (message != NULL); switch (message->type) { case GST_MESSAGE_EOS: gst_message_unref (message); return; case GST_MESSAGE_WARNING: case GST_MESSAGE_ERROR:{ GError *gerror; gchar *debug; gst_message_parse_error (message, &gerror, &debug); gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug); gst_message_unref (message); g_error_free (gerror); g_free (debug); return; } default: gst_message_unref (message); break; } } }
static gboolean create_pipeline (AppInfo * info, const gchar * filename) { GstElement *src, *dec; GstBus *bus; info->pipe = gst_pipeline_new ("pipeline"); src = create_element ("filesrc"); g_object_set (src, "location", filename, NULL); dec = create_element ("decodebin"); gst_bin_add_many (GST_BIN (info->pipe), src, dec, NULL); if (!gst_element_link (src, dec)) g_error ("Can't link filesrc to decodebin"); g_signal_connect (dec, "pad-added", G_CALLBACK (new_decoded_pad), info); g_signal_connect (dec, "no-more-pads", G_CALLBACK (no_more_pads), info); /* set up bus */ bus = gst_element_get_bus (info->pipe); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), info); gst_object_unref (bus); return TRUE; }
int main(int argc, char *argv[]) { CustomData data; GstBus *bus; /* Initialize cumstom data structure */ memset (&data, 0, sizeof (data)); data.b = 1; /* For waveform generation */ data.d = 1; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the playbin2 element */ data.pipeline = gst_parse_launch ("playbin2 uri=appsrc://", NULL); g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data); /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ bus = gst_element_get_bus (data.pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data); gst_object_unref (bus); /* Start playing the pipeline */ gst_element_set_state (data.pipeline, GST_STATE_PLAYING); /* Create a GLib Main Loop and set it to run */ data.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (data.main_loop); /* Free resources */ gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0; }
static void test_reset_on_seek (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PAUSED); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_element_seek (p, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, GST_MSECOND * 100, GST_SEEK_TYPE_SET, GST_MSECOND * 200); gst_element_set_state (p, GST_STATE_PLAYING); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); ck_assert_int_eq (e->num_disconts, 1); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
JNIEXPORT jint JNICALL Java_gstJNI_pipelineLaunch(JNIEnv *env, jobject thisObj, jstring launch) { //First, we need to convert the JNI string to a char* const char *inCStr = (*env)->GetStringUTFChars(env, launch, NULL); if(inCStr == NULL) return -1; //error check //printf("In C, the received string is: %s\n", inCStr); GstElement *pipeline; GstBus *bus; GstMessage *msg; GError *e; e = NULL; //Initialize GStreamer gst_init (0, NULL); //still not sure what kind of arguments go here pipeline = gst_parse_launch(inCStr, &e); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); (*env)->ReleaseStringUTFChars(env, launch, inCStr); return 0; }
/* Main method for the native code. This is executed on its own thread. */ static void *app_function (void *userdata) { JavaVMAttachArgs args; GstBus *bus; CustomData *data = (CustomData *)userdata; GSource *bus_source; GError *error = NULL; GST_DEBUG ("Creating pipeline in CustomData at %p", data); /* Create our own GLib Main Context and make it the default one */ data->context = g_main_context_new (); g_main_context_push_thread_default(data->context); /* Build pipeline */ data->pipeline = gst_parse_launch("videotestsrc ! warptv ! videoconvert ! autovideosink", &error); if (error) { gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message); g_clear_error (&error); set_ui_message(message, data); g_free (message); return NULL; } /* Set the pipeline to READY, so it can already accept a window handle, if we have one */ gst_element_set_state(data->pipeline, GST_STATE_READY); data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY); if (!data->video_sink) { GST_ERROR ("Could not retrieve video sink"); return NULL; } /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ bus = gst_element_get_bus (data->pipeline); bus_source = gst_bus_create_watch (bus); g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL); g_source_attach (bus_source, data->context); g_source_unref (bus_source); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data); g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data); gst_object_unref (bus); /* Create a GLib Main Loop and set it to run */ GST_DEBUG ("Entering main loop... (CustomData:%p)", data); data->main_loop = g_main_loop_new (data->context, FALSE); check_initialization_complete (data); g_main_loop_run (data->main_loop); GST_DEBUG ("Exited main loop"); g_main_loop_unref (data->main_loop); data->main_loop = NULL; /* Free resources */ g_main_context_pop_thread_default(data->context); g_main_context_unref (data->context); gst_element_set_state (data->pipeline, GST_STATE_NULL); gst_object_unref (data->video_sink); gst_object_unref (data->pipeline); return NULL; }
static void test_audio_context_configures_buffer_size (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); // sizeof(gint16) * (int)(0.5 + (44100 * (60.0 / 8)) / (120 * 4)) ck_assert_uint_eq (bf->size, 1378); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (bus); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
AlertTonePreview::~AlertTonePreview () { SYS_DEBUG ("Stopping the playback."); if (m_gstVolume) { gst_object_unref (m_gstVolume); m_gstVolume = NULL; } GstBus *bus = gst_element_get_bus (m_gstPipeline); gst_bus_remove_signal_watch (bus); gst_object_unref (bus); gst_element_set_state (m_gstPipeline, GST_STATE_NULL); gst_object_unref (m_gstPipeline); m_gstPipeline = NULL; #ifdef HAVE_LIBRESOURCEQT disconnect (resources, SIGNAL (resourcesGranted (QList<ResourcePolicy::ResourceType>)), this, SLOT (audioResourceAcquired ())); disconnect (resources, SIGNAL (lostResources ()), this, SLOT (audioResourceLost())); resources->release (); #endif }
/*! * \brief CvVideoWriter_GStreamer::close * ends the pipeline by sending EOS and destroys the pipeline and all * elements afterwards */ void CvVideoWriter_GStreamer::close() { if (pipeline) { gst_app_src_end_of_stream(GST_APP_SRC(source)); //wait for EOS to trickle down the pipeline. This will let all elements finish properly GstBus* bus = gst_element_get_bus(pipeline); GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); if(msg != NULL){ gst_message_unref(msg); g_object_unref(G_OBJECT(bus)); } gst_element_set_state (pipeline, GST_STATE_NULL); handleMessage(pipeline); gst_object_unref (GST_OBJECT (pipeline)); if (source) gst_object_unref (GST_OBJECT (source)); if (file) gst_object_unref (GST_OBJECT (file)); } }
void set_notifyfunction(CustomData *data) { GstBus *bus; GSource *bus_source; if (data->notify_time > 0) { bus = gst_element_get_bus(data->pipeline); bus_source = gst_bus_create_watch(bus); g_source_set_callback(bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL); g_source_attach(bus_source, data->context); g_source_unref(bus_source); if (data->timeout_source) { g_source_destroy(data->timeout_source); } /* Register a function that GLib will call 4 times per second */ data->timeout_source = g_timeout_source_new(data->notify_time); g_source_set_callback(data->timeout_source, (GSourceFunc) gst_notify_time_cb, data, NULL); g_source_attach(data->timeout_source, data->context); g_source_unref(data->timeout_source); } }
/* * run_pipeline: * @pipe: the pipeline to run * @desc: the description for use in messages * @events: is a mask of expected events * @tevent: is the expected terminal event. * * the poll call will time out after half a second. */ static void run_pipeline (GstElement * pipe, const gchar * descr, GstMessageType events, GstMessageType tevent, GstState target_state) { GstBus *bus; GstMessage *message; GstMessageType revent; GstStateChangeReturn ret; g_assert (pipe); bus = gst_element_get_bus (pipe); g_assert (bus); fail_if (gst_element_set_state (pipe, target_state) == GST_STATE_CHANGE_FAILURE, "Could not set pipeline %s to playing", descr); ret = gst_element_get_state (pipe, NULL, NULL, 10 * GST_SECOND); if (ret == GST_STATE_CHANGE_ASYNC) { g_critical ("Pipeline '%s' failed to go to PAUSED fast enough", descr); goto done; } else if ((ret != GST_STATE_CHANGE_SUCCESS) && (ret != GST_STATE_CHANGE_NO_PREROLL)) { g_critical ("Pipeline '%s' failed to go into PAUSED state (%s)", descr, gst_element_state_change_return_get_name (ret)); goto done; } while (1) { message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2); /* always have to pop the message before getting back into poll */ if (message) { revent = GST_MESSAGE_TYPE (message); gst_message_unref (message); } else { revent = GST_MESSAGE_UNKNOWN; } if (revent == tevent) { break; } else if (revent == GST_MESSAGE_UNKNOWN) { g_critical ("Unexpected timeout in gst_bus_poll, looking for %d: %s", tevent, descr); break; } else if (revent & events) { continue; } g_critical ("Unexpected message received of type %d, '%s', looking for %d: %s", revent, gst_message_type_get_name (revent), tevent, descr); } done: fail_if (gst_element_set_state (pipe, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE, "Could not set pipeline %s to NULL", descr); gst_element_get_state (pipe, NULL, NULL, GST_CLOCK_TIME_NONE); gst_object_unref (pipe); gst_bus_set_flushing (bus, TRUE); gst_object_unref (bus); }
int main (int argc, char *argv[]) { GstBus *bus; gint watch_id; if (argc != 2) { g_print ("usage: giosrc-mounting URI\n"); return -1; } gst_init (NULL, NULL); gtk_init (NULL, NULL); pipeline = gst_element_factory_make ("playbin", NULL); g_assert (pipeline); g_object_set (G_OBJECT (pipeline), "uri", argv[1], NULL); bus = gst_element_get_bus (pipeline); watch_id = gst_bus_add_watch (bus, message_handler, NULL); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_PLAYING); gtk_main (); g_source_remove (watch_id); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
static void run_main_loop_until_eos (void) { GstElement *bin = (GstElement *) check_gobject_get_object_property (song, "bin"); GMainLoop *main_loop = g_main_loop_new (NULL, FALSE); GstBus *bus = gst_element_get_bus (bin); gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH); g_signal_connect (bus, "message::error", G_CALLBACK (message_received), (gpointer) main_loop); g_signal_connect (bus, "message::eos", G_CALLBACK (message_received), (gpointer) main_loop); gst_object_unref (bus); gst_object_unref (bin); // workaround for some muxers not accepting the seek and thus not going to eos // poll playback position 10 times a second // TODO(ensonic): fixed in 1.0? // basesrc elements do post EOS old_pos = -1; old_playing = FALSE; g_signal_connect (song, "notify::play-pos", G_CALLBACK (on_song_play_pos_notify), (gpointer) main_loop); g_signal_connect (song, "notify::is-playing", G_CALLBACK (on_song_is_playing_notify), (gpointer) main_loop); guint update_id = g_timeout_add_full (G_PRIORITY_HIGH, 1000 / 10, on_song_playback_update, NULL, NULL); bt_song_update_playback_position (song); GST_INFO ("running main_loop"); g_main_loop_run (main_loop); GST_INFO ("finished main_loop"); g_source_remove (update_id); }
int main(int argc, char* argv[]) { gst_init(&argc, &argv); if (argc < 2) { printf("Usage: %s filename\n", argv[0]); return -1; } filename = argv[1]; mainloop = g_main_loop_new(NULL, FALSE); playBin = gst_element_factory_make ("playbin2", "player"); g_assert(playBin); bus = gst_element_get_bus(playBin); g_assert(bus); gst_bus_add_watch(bus, onBusMessage, NULL); filename = g_filename_to_uri(argv[1], NULL, NULL); g_assert(filename); g_object_set(G_OBJECT (playBin), "uri", filename, NULL); setState(GST_STATE_PLAYING); g_main_loop_run(mainloop); return 0; }
static void test_initialized_with_audio_caps (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); fail_unless (e->caps != NULL, NULL); gint i, cs = gst_caps_get_size (e->caps); fail_unless (cs > 0, NULL); for (i = 0; i < cs; i++) { fail_unless (gst_structure_has_name (gst_caps_get_structure (e->caps, i), "audio/x-raw"), NULL); } GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static void impl_get_property (GObject *object, guint prop_id, GValue *value, GParamSpec *pspec) { RBPlayerGst *mp = RB_PLAYER_GST (object); switch (prop_id) { case PROP_PLAYBIN: g_value_set_object (value, mp->priv->playbin); break; case PROP_BUS: if (mp->priv->playbin) { GstBus *bus; bus = gst_element_get_bus (mp->priv->playbin); g_value_set_object (value, bus); gst_object_unref (bus); } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void test_buffers_are_contigous (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=2 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf0 = get_buffer_info (e, 0); BufferFields *bf1 = get_buffer_info (e, 1); ck_assert_uint64_eq (bf1->ts, bf0->ts + bf0->duration); ck_assert_uint64_eq (bf1->offset, bf0->offset + bf0->offset_end); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
int main(int argc, char *argv[]) { GstElement *pipeline; GstBus *bus; GstMessage *msg; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Build the pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
static void test_position_query_time (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); gint64 pos; gboolean res = gst_element_query_position ((GstElement *) e, GST_FORMAT_TIME, &pos); fail_unless (res, NULL); ck_assert_uint64_eq (bf->duration, pos); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
void media_init() { GstBus *bus; gst_init (NULL, NULL); //gst_debug_set_default_threshold(GST_LEVEL_MEMDUMP); #if (TRANS_TYPE == TRANS_TYPE_TCP) gst_pipeline_tcp_init(); #else gst_pipeline_rtp_init(); #endif int i; for (i = 0; i < SERVER_LIST_NUM; i++) { #if (TRANS_TYPE == TRANS_TYPE_TCP) add_server_to_pipeline_tcp(control_service_data[i].server_ip); #else add_server_to_pipeline_rtp(control_service_data[i].server_ip); #endif } bus = gst_element_get_bus (gst_data.playbin); gst_bus_add_watch (bus, bus_call, NULL); g_object_unref (bus); }
void MyGstreamer::start() { qDebug("Gstreamer Started..."); GstElement *pipeline; GstBus *bus; GstMessage *msg; /* Initialize GStreamer */ gst_init (0, 0); /* Build the pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); }
static void xmr_player_get_property(GObject *object, guint prop_id, GValue *value, GParamSpec *pspec) { XmrPlayer *player = XMR_PLAYER(object); XmrPlayerPrivate *priv = player->priv; switch(prop_id) { case PROP_PLAYBIN: g_value_set_object (value, priv->playbin); break; case PROP_BUS: if (priv->playbin) { GstBus *bus; bus = gst_element_get_bus (priv->playbin); g_value_set_object(value, bus); gst_object_unref(bus); } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; } }
int spectrum_run (int argc, char *argv[]) { GstElement *bin; GstElement *src, *audioconvert, *spectrum, *sink; GstBus *bus; GstCaps *caps; GMainLoop *loop; gst_init (&argc, &argv); // g_print("Enter Upper Frequency Bound:"); // scanf("%d" , &inputfreq); // AUDIOFREQ = (inputfreq * 2); // g_print("Enter Number of Frequncy Bands:"); // scanf("%d" , &spect_bands); bin = gst_pipeline_new ("bin"); src = gst_element_factory_make ("audiotestsrc", "src"); g_object_set (G_OBJECT (src), "wave", 0, "freq", 6000.0, NULL); audioconvert = gst_element_factory_make ("audioconvert", NULL); g_assert (audioconvert); spectrum = gst_element_factory_make ("spectrum", "spectrum"); g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80, "message", TRUE, "message-phase", TRUE, NULL); sink = gst_element_factory_make ("fakesink", "sink"); g_object_set (G_OBJECT (sink), "sync", TRUE, NULL); gst_bin_add_many (GST_BIN (bin), src, audioconvert, spectrum, sink, NULL); caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT, AUDIOFREQ, NULL); if (!gst_element_link (src, audioconvert) || !gst_element_link_filtered (audioconvert, spectrum, caps) || !gst_element_link (spectrum, sink)) { fprintf (stderr, "can't link elements\n"); exit (1); } gst_caps_unref (caps); bus = gst_element_get_bus (bin); gst_bus_add_watch (bus, message_handler, NULL); gst_object_unref (bus); gst_element_set_state (bin, GST_STATE_PLAYING); /* we need to run a GLib main loop to get the messages */ loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (loop); gst_element_set_state (bin, GST_STATE_NULL); gst_object_unref (bin); return 0; }
void acam_webcam_setup_devices (acam_webcam_device_s *acam_webcam_device) { gboolean ok = TRUE; GError *tmp_error = NULL; acam_webcam_device->video_pipeline = gst_pipeline_new ("pipeline"); /* Create all bins */ acam_webcam_setup_create_video_display_bin (acam_webcam_device, &tmp_error); acam_webcam_setup_create_photo_save_bin (acam_webcam_device, &tmp_error); acam_webcam_setup_create_video_save_bin (acam_webcam_device, &tmp_error); if (tmp_error != NULL) g_print ("One or more needed gstreamer elements are missing\n"); gst_bin_add_many (GST_BIN (acam_webcam_device->video_pipeline), acam_webcam_device->video_display_bin, acam_webcam_device->photo_save_bin, NULL); ok = gst_element_link (acam_webcam_device->video_display_bin, acam_webcam_device->photo_save_bin); acam_webcam_device->bus = gst_element_get_bus (acam_webcam_device->video_pipeline); gst_bus_add_signal_watch (acam_webcam_device->bus); g_signal_connect (G_OBJECT (acam_webcam_device->bus), "message", G_CALLBACK (acam_webcam_bus_message_cb), acam_webcam_device); gst_bus_set_sync_handler (acam_webcam_device->bus, (GstBusSyncHandler) acam_webcam_bus_sync_handler, acam_webcam_device); }
GstVideoEditor * gst_video_editor_new (GError ** err) { GstVideoEditor *gve = NULL; gve = g_object_new (GST_TYPE_VIDEO_EDITOR, NULL); gve->priv->main_pipeline = gst_pipeline_new ("main_pipeline"); if (!gve->priv->main_pipeline) { g_set_error (err, GVE_ERROR, GST_ERROR_PLUGIN_LOAD, ("Failed to create a GStreamer Bin. " "Please check your GStreamer installation.")); g_object_ref_sink (gve); g_object_unref (gve); return NULL; } /* Create elements */ gve->priv->nle_source = gst_nle_source_new (); /* Listen for a "pad-added" to link the composition with the encoder tail */ gve->priv->bus = gst_element_get_bus (GST_ELEMENT (gve->priv->main_pipeline)); g_signal_connect (gve->priv->nle_source, "pad-added", G_CALLBACK (new_decoded_pad_cb), gve); /*Connect bus signals */ gst_bus_add_signal_watch (gve->priv->bus); gve->priv->sig_bus_async = g_signal_connect (gve->priv->bus, "message", G_CALLBACK (gve_bus_message_cb), gve); return gve; }
static void snra_client_finalize (GObject * object) { SnraClient *client = (SnraClient *) (object); if (client->avahi_sb) avahi_service_browser_free (client->avahi_sb); if (client->avahi_client) avahi_client_free (client->avahi_client); if (client->glib_poll) avahi_glib_poll_free (client->glib_poll); if (client->net_clock) gst_object_unref (client->net_clock); if (client->soup) g_object_unref (client->soup); if (client->json) g_object_unref (client->json); if (client->player) { GstBus *bus = gst_element_get_bus (client->player); gst_bus_remove_signal_watch (bus); gst_object_unref (bus); gst_object_unref (client->player); } g_free (client->server_host); g_free (client->connected_server); G_OBJECT_CLASS (snra_client_parent_class)->finalize (object); }
/** * gst_validate_pipeline_monitor_new: * @pipeline: (transfer none): a #GstPipeline to run Validate on */ GstValidatePipelineMonitor * gst_validate_pipeline_monitor_new (GstPipeline * pipeline, GstValidateRunner * runner, GstValidateMonitor * parent) { GstBus *bus; GstValidatePipelineMonitor *monitor = g_object_new (GST_TYPE_VALIDATE_PIPELINE_MONITOR, "object", pipeline, "validate-runner", runner, "validate-parent", parent, "pipeline", pipeline, NULL); if (GST_VALIDATE_MONITOR_GET_OBJECT (monitor) == NULL) { g_object_unref (monitor); return NULL; } gst_validate_pipeline_monitor_create_scenarios (GST_VALIDATE_BIN_MONITOR (monitor)); bus = gst_element_get_bus (GST_ELEMENT (pipeline)); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message", (GCallback) _bus_handler, monitor); gst_object_unref (bus); if (g_strcmp0 (G_OBJECT_TYPE_NAME (pipeline), "GstPlayBin") == 0) monitor->is_playbin = TRUE; else if (g_strcmp0 (G_OBJECT_TYPE_NAME (pipeline), "GstPlayBin3") == 0) monitor->is_playbin3 = TRUE; return monitor; }
static void event_loop (GstElement * pipe) { GstBus *bus; GstMessage *message = NULL; bus = gst_element_get_bus (GST_ELEMENT (pipe)); while (TRUE) { message = gst_bus_timed_pop_filtered (bus, GST_MESSAGE_ANY, -1); g_assert (message != NULL); switch (message->type) { case GST_MESSAGE_EOS: g_message ("got EOS"); gst_message_unref (message); return; case GST_MESSAGE_WARNING: case GST_MESSAGE_ERROR: { GError *gerror; gchar *debug; gst_message_parse_error (message, &gerror, &debug); gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug); gst_message_unref (message); g_error_free (gerror); g_free (debug); return; } case GST_MESSAGE_STEP_DONE: { GstFormat format; guint64 amount; gdouble rate; gboolean flush, intermediate; guint64 duration; gboolean eos; gst_message_parse_step_done (message, &format, &amount, &rate, &flush, &intermediate, &duration, &eos); if (format == GST_FORMAT_DEFAULT) { g_message ("step done: %" GST_TIME_FORMAT " skipped in %" G_GUINT64_FORMAT " frames", GST_TIME_ARGS (duration), amount); } else { g_message ("step done: %" GST_TIME_FORMAT " skipped", GST_TIME_ARGS (duration)); } return; } default: gst_message_unref (message); break; } } }
/*! * \brief handleMessage * Handles gstreamer bus messages. Mainly for debugging purposes and ensuring clean shutdown on error */ void handleMessage(GstElement * pipeline) { GError *err = NULL; gchar *debug = NULL; GstBus* bus = NULL; GstStreamStatusType tp; GstElement * elem = NULL; GstMessage* msg = NULL; bus = gst_element_get_bus(pipeline); while(gst_bus_have_pending(bus)) { msg = gst_bus_pop(bus); //printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg)); if(gst_is_missing_plugin_message(msg)) { //ERROR(1, "GStreamer: your gstreamer installation is missing a required plugin\n"); fprintf(stderr, "GStreamer: your gstreamer installation is missing a required plugin\n"); } else { switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_STATE_CHANGED: GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); //fprintf(stderr, "state changed from %s to %s (pending: %s)\n", gst_element_state_get_name(oldstate), // gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate)); break; case GST_MESSAGE_ERROR: gst_message_parse_error(msg, &err, &debug); //fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", // gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); g_error_free(err); g_free(debug); gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); break; case GST_MESSAGE_EOS: //fprintf(stderr, "reached the end of the stream."); break; case GST_MESSAGE_STREAM_STATUS: gst_message_parse_stream_status(msg,&tp,&elem); //fprintf(stderr, "stream status: elem %s, %i\n", GST_ELEMENT_NAME(elem), tp); break; default: //fprintf(stderr, "unhandled message\n"); break; } } gst_message_unref(msg); } gst_object_unref(GST_OBJECT(bus)); }