shmdata_any_reader_t * shmdata_any_reader_init (const char *socketName) { shmdata_any_reader_t *reader = (shmdata_any_reader_t *) g_malloc0 (sizeof (shmdata_any_reader_t)); reader->debug_ = SHMDATA_DISABLE_DEBUG; g_log_set_default_handler (shmdata_any_reader_log_handler, reader); reader->on_data_ = NULL; reader->on_data_user_data_ = NULL; reader->type_ = NULL; reader->data_caps_ = NULL; reader->do_absolute_ = FALSE; reader->run_gmainloop_ = TRUE; gst_init (NULL, NULL); reader->loop_ = g_main_loop_new (NULL, FALSE); reader->pipeline_ = gst_pipeline_new (NULL); if (reader->pipeline_ == NULL) g_critical ("cannot create gstreamer pipeline"); gst_element_set_state (reader->pipeline_, GST_STATE_PLAYING); return reader; }
static void play_sound (gdouble frequency) { GstElement *source, *sink; GstElement *pipeline; pipeline = gst_pipeline_new ("note"); source = gst_element_factory_make ("audiotestsrc", "source"); sink = gst_element_factory_make ("autoaudiosink", "output"); /* set frequency */ g_object_set (source, "freq", frequency, NULL); gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); gst_element_link (source, sink); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* stop it after 500ms */ g_timeout_add (LENGTH, (GSourceFunc) pipeline_stop, pipeline); }
AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback, float sampleRate) : m_callback(callback) , m_renderBus(2, framesToPull, false) , m_sampleRate(sampleRate) , m_isPlaying(false) { m_pipeline = gst_pipeline_new("play"); GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); ASSERT(bus); gst_bus_add_signal_watch(bus); g_signal_connect(bus, "message", G_CALLBACK(messageCallback), this); gst_object_unref(bus); GstElement* webkitAudioSrc = reinterpret_cast<GstElement*>(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC, "rate", sampleRate, "bus", &m_renderBus, "provider", &m_callback, "frames", framesToPull, NULL)); GstElement* wavParser = gst_element_factory_make("wavparse", 0); m_wavParserAvailable = wavParser; ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element"); if (!m_wavParserAvailable) return; #ifndef GST_API_VERSION_1 g_signal_connect(wavParser, "pad-added", G_CALLBACK(onGStreamerWavparsePadAddedCallback), this); #endif gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL); gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING); #ifdef GST_API_VERSION_1 GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src")); finishBuildingPipelineAfterWavParserPadReady(srcPad.get()); #endif }
int main(int argc, char **argv) { GstElement *pipeline; GstElement *src; GstElement *decoder; GstElement *sink; int fi = 0; if(argc < 2) { g_printf("Usage: %s <file>\n", argv[0]); exit(1); } gst_init(NULL, NULL); pipeline = gst_pipeline_new("pipeline"); src = gst_element_factory_make("filesrc", "src"); decoder = gst_element_factory_make("dtdrdec", "decoder"); sink = gst_element_factory_make("alsasink", "sink"); gst_bin_add_many(GST_BIN(pipeline), src, decoder, sink, NULL); gst_element_link_many(src, decoder, sink, NULL); g_object_set(src, "location", argv[1], NULL); gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline)), bus_callback, NULL); gst_element_set_state(pipeline, GST_STATE_PLAYING); loop = g_main_loop_new(NULL, FALSE); g_main_loop_run(loop); gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(pipeline); exit(0); }
static GstElement * setup_gst_pipeline (CairoOverlayState * overlay_state) { GstElement *pipeline; GstElement *cairo_overlay; GstElement *source, *adaptor1, *adaptor2, *sink; pipeline = gst_pipeline_new ("cairo-overlay-example"); /* Adaptors needed because cairooverlay only supports ARGB data */ source = gst_element_factory_make ("videotestsrc", "source"); adaptor1 = gst_element_factory_make ("videoconvert", "adaptor1"); cairo_overlay = gst_element_factory_make ("cairooverlay", "overlay"); adaptor2 = gst_element_factory_make ("videoconvert", "adaptor2"); sink = gst_element_factory_make ("ximagesink", "sink"); if (sink == NULL) sink = gst_element_factory_make ("autovideosink", "sink"); /* If failing, the element could not be created */ g_assert (cairo_overlay); /* Hook up the neccesary signals for cairooverlay */ g_signal_connect (cairo_overlay, "draw", G_CALLBACK (draw_overlay), overlay_state); g_signal_connect (cairo_overlay, "caps-changed", G_CALLBACK (prepare_overlay), overlay_state); gst_bin_add_many (GST_BIN (pipeline), source, adaptor1, cairo_overlay, adaptor2, sink, NULL); if (!gst_element_link_many (source, adaptor1, cairo_overlay, adaptor2, sink, NULL)) { g_warning ("Failed to link elements!"); } return pipeline; }
void Pipeline::create() { qDebug("Loading video: %s", m_videoLocation.toLatin1().data()); gst_init (NULL, NULL); #ifdef WIN32 m_loop = g_main_loop_new (NULL, FALSE); #endif m_pipeline = gst_pipeline_new ("pipeline"); m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this); gst_bus_set_sync_handler (m_bus, (GstBusSyncHandler) create_window, this, NULL); gst_object_unref (m_bus); GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0"); GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin0"); m_glimagesink = gst_element_factory_make ("glimagesink", "sink0"); if (!videosrc || !decodebin || !m_glimagesink ) { qDebug ("one element could not be found"); return; } g_object_set(G_OBJECT(videosrc), "num-buffers", 800, NULL); g_object_set(G_OBJECT(videosrc), "location", m_videoLocation.toLatin1().data(), NULL); g_signal_connect(G_OBJECT(m_glimagesink), "client-reshape", G_CALLBACK (reshapeCallback), NULL); g_signal_connect(G_OBJECT(m_glimagesink), "client-draw", G_CALLBACK (drawCallback), NULL); gst_bin_add_many (GST_BIN (m_pipeline), videosrc, decodebin, m_glimagesink, NULL); gst_element_link_pads (videosrc, "src", decodebin, "sink"); g_signal_connect (decodebin, "pad-added", G_CALLBACK (cb_new_pad), this); }
static void create_gst_pipline() { g_pipeline = gst_pipeline_new ("GtkGstreamer"); g_assert(g_pipeline); g_playbin = gst_element_factory_make ("playbin", "playbin0"); g_assert(g_playbin); g_videosink = gst_element_factory_make ("xvimagesink", "xvimagesink0") ; g_assert(g_videosink); /*Make sure: Every elements was created ok*/ if (!g_pipeline || !g_playbin || !g_videosink) { g_printerr ("One of the elements wasn't create... Exiting\n"); return; } g_object_set(g_playbin, "uri", g_filename, NULL); g_object_set(g_playbin, "video-sink", g_videosink, NULL); g_printf("####################### gstThreadMain watch bus!\n"); GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (g_pipeline)); gst_bus_add_watch (bus, bus_call, NULL); gst_object_unref (bus); g_printf("####################### gstThreadMain add link!\n"); /* Add Elements to the Bin */ gst_bin_add_many(GST_BIN (g_pipeline), g_playbin, NULL); gst_element_set_state (g_pipeline, GST_STATE_PAUSED); }
static void owr_media_renderer_init(OwrMediaRenderer *renderer) { OwrMediaRendererPrivate *priv; GstBus *bus; GSource *bus_source; gchar *bin_name; renderer->priv = priv = OWR_MEDIA_RENDERER_GET_PRIVATE(renderer); priv->media_type = DEFAULT_MEDIA_TYPE; priv->source = DEFAULT_SOURCE; priv->disabled = DEFAULT_DISABLED; priv->message_origin_bus_set = owr_message_origin_bus_set_new(); bin_name = g_strdup_printf("media-renderer-%u", g_atomic_int_add(&unique_bin_id, 1)); priv->pipeline = gst_pipeline_new(bin_name); gst_pipeline_use_clock(GST_PIPELINE(priv->pipeline), gst_system_clock_obtain()); gst_element_set_base_time(priv->pipeline, _owr_get_base_time()); gst_element_set_start_time(priv->pipeline, GST_CLOCK_TIME_NONE); g_free(bin_name); #ifdef OWR_DEBUG g_signal_connect(priv->pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL); #endif priv->sink = NULL; priv->src = NULL; bus = gst_pipeline_get_bus(GST_PIPELINE(priv->pipeline)); bus_source = gst_bus_create_watch(bus); g_source_set_callback(bus_source, (GSourceFunc) bus_call, priv->pipeline, NULL); g_source_attach(bus_source, _owr_get_main_context()); g_source_unref(bus_source); g_mutex_init(&priv->media_renderer_lock); }
GstPipe::GstPipe(GMainContext *context, GstBusSyncReply (*bus_sync_cb)(GstBus * /*bus*/, GstMessage *msg, gpointer user_data), gpointer user_data) : pipeline_ (gst_pipeline_new(nullptr)), gmaincontext_(context), source_funcs_() { source_funcs_.prepare = source_prepare; source_funcs_.check = source_check; source_funcs_.dispatch = source_dispatch; source_funcs_.finalize = source_finalize; source_ = g_source_new(&source_funcs_, sizeof(GstBusSource)); reinterpret_cast<GstBusSource *>(source_)->bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_)); g_source_attach(source_, gmaincontext_); // add a watch to a bus is not working, // (using g_idle_add from sync callback in GstPipeliner instead) gst_bus_set_sync_handler(reinterpret_cast<GstBusSource *>(source_)->bus, bus_sync_cb, user_data, nullptr); reinterpret_cast<GstBusSource *>(source_)->inited = FALSE; }
static void kms_muxing_pipeline_prepare_pipeline (KmsMuxingPipeline * self) { self->priv->pipeline = gst_pipeline_new (KMS_MUXING_PIPELINE_NAME); self->priv->videosrc = gst_element_factory_make ("appsrc", "videoSrc"); self->priv->audiosrc = gst_element_factory_make ("appsrc", "audioSrc"); self->priv->encodebin = gst_element_factory_make ("encodebin", NULL); kms_muxing_pipeline_add_injector_probe (self, self->priv->videosrc); kms_muxing_pipeline_add_injector_probe (self, self->priv->audiosrc); kms_muxing_pipeline_configure (self); gst_bin_add_many (GST_BIN (self->priv->pipeline), self->priv->videosrc, self->priv->audiosrc, self->priv->encodebin, self->priv->sink, NULL); if (!gst_element_link (self->priv->encodebin, self->priv->sink)) { GST_ERROR_OBJECT (self, "Could not link elements: %" GST_PTR_FORMAT ", %" GST_PTR_FORMAT, self->priv->encodebin, self->priv->sink); } if (!gst_element_link_pads (self->priv->videosrc, "src", self->priv->encodebin, "video_%u")) { GST_ERROR_OBJECT (self, "Could not link elements: %" GST_PTR_FORMAT ", %" GST_PTR_FORMAT, self->priv->videosrc, self->priv->encodebin); } if (!gst_element_link_pads (self->priv->audiosrc, "src", self->priv->encodebin, "audio_%u")) { GST_ERROR_OBJECT (self, "Could not link elements: %" GST_PTR_FORMAT ", %" GST_PTR_FORMAT, self->priv->audiosrc, self->priv->encodebin); } }
void AudioFileReader::decodeAudioForBusCreation() { // Build the pipeline (giostreamsrc | filesrc) ! decodebin2 // A deinterleave element is added once a src pad becomes available in decodebin. m_pipeline = gst_pipeline_new(0); GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline))); ASSERT(bus); gst_bus_add_signal_watch(bus.get()); g_signal_connect(bus.get(), "message", G_CALLBACK(messageCallback), this); GstElement* source; if (m_data) { ASSERT(m_dataSize); source = gst_element_factory_make("giostreamsrc", 0); GRefPtr<GInputStream> memoryStream = adoptGRef(g_memory_input_stream_new_from_data(m_data, m_dataSize, 0)); g_object_set(source, "stream", memoryStream.get(), NULL); } else { source = gst_element_factory_make("filesrc", 0); g_object_set(source, "location", m_filePath, NULL); } m_decodebin = gst_element_factory_make("decodebin", "decodebin"); g_signal_connect(m_decodebin.get(), "pad-added", G_CALLBACK(onGStreamerDecodebinPadAddedCallback), this); gst_bin_add_many(GST_BIN(m_pipeline), source, m_decodebin.get(), NULL); gst_element_link_pads_full(source, "src", m_decodebin.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); // Catch errors here immediately, there might not be an error message if // we're unlucky. if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { g_warning("Error: Failed to set pipeline to PAUSED"); m_errorOccurred = true; g_main_loop_quit(m_loop.get()); } }
static void start_playerendpoint (void) { GstElement *pipeline = gst_pipeline_new (NULL); GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); GstElement *playerendpoint = gst_element_factory_make ("playerendpoint", NULL); GMainLoop *loop = g_main_loop_new (NULL, TRUE); g_object_set (G_OBJECT (pipeline), "async-handling", TRUE, NULL); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline); g_signal_connect (G_OBJECT (playerendpoint), "eos", G_CALLBACK (playerendpoint_eos), loop); g_object_set (G_OBJECT (playerendpoint), "uri", "http://ci.kurento.com/downloads/small.webm", NULL); gst_bin_add (GST_BIN (pipeline), playerendpoint); gst_element_set_state (pipeline, GST_STATE_PLAYING); mark_point (); g_object_set (G_OBJECT (playerendpoint), "state", KMS_URI_END_POINT_STATE_START, NULL); mark_point (); g_main_loop_run (loop); mark_point (); gst_element_set_state (pipeline, GST_STATE_NULL); gst_bus_remove_signal_watch (bus); g_object_unref (bus); g_main_loop_unref (loop); g_object_unref (pipeline); }
/** * 初始化声音系统. */ void SoundSystem::InitSublayer() { GstElement *pipeline; GstElement *filesrc, *decode, *volume, *convert, *sink; GstBus *bus; gst_init(NULL, NULL); pipeline = gst_pipeline_new("sound-system"); g_datalist_set_data_full(&eltset, "pipeline-element", pipeline, GDestroyNotify(gst_object_unref)); filesrc = gst_element_factory_make("filesrc", "source"); g_datalist_set_data(&eltset, "filesrc-element", filesrc); decode = gst_element_factory_make("decodebin", "decode"); g_datalist_set_data(&eltset, "decode-element", decode); volume = gst_element_factory_make("volume", "volume"); g_datalist_set_data(&eltset, "volume-element", volume); convert = gst_element_factory_make("audioconvert", "convert"); g_datalist_set_data(&eltset, "convert-element", convert); sink = gst_element_factory_make("autoaudiosink", "output"); g_datalist_set_data(&eltset, "output-element", sink); gst_bin_add_many(GST_BIN(pipeline), filesrc, decode, volume, convert, sink, NULL); gst_element_link_many(filesrc, decode, NULL); gst_element_link_many(volume, convert, sink, NULL); g_signal_connect_swapped(decode, "pad-added", G_CALLBACK(LinkElement), &eltset); bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); gst_bus_add_signal_watch(GST_BUS(bus)); g_signal_connect_swapped(bus, "message::error", G_CALLBACK(ErrorMessageOccur), this); g_signal_connect_swapped(bus, "message::eos", G_CALLBACK(EosMessageOccur), this); gst_object_unref(bus); g_object_set(volume, "volume", progdt.volume, NULL); }
GST_END_TEST GST_START_TEST (delay_stream) { GMainLoop *loop = g_main_loop_new (NULL, TRUE); GstElement *pipeline = gst_pipeline_new (__FUNCTION__); GstElement *agnosticbin = gst_element_factory_make ("agnosticbin", "agnosticbin"); GstElement *fakesink = gst_element_factory_make ("fakesink", NULL); GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline); g_object_set (G_OBJECT (fakesink), "async", FALSE, "sync", FALSE, "signal-handoffs", TRUE, NULL); g_signal_connect (G_OBJECT (fakesink), "handoff", G_CALLBACK (fakesink_hand_off), loop); gst_bin_add_many (GST_BIN (pipeline), agnosticbin, fakesink, NULL); gst_element_link (agnosticbin, fakesink); gst_element_set_state (pipeline, GST_STATE_PLAYING); g_timeout_add_seconds (1, link_source, pipeline); g_timeout_add_seconds (11, timeout_check, pipeline); mark_point (); g_main_loop_run (loop); mark_point (); gst_element_set_state (pipeline, GST_STATE_NULL); gst_bus_remove_signal_watch (bus); g_object_unref (pipeline); g_object_unref (bus); g_main_loop_unref (loop); }
int main(int argc, char *argv[]) { GMainLoop *loop; GstBus *bus; /* initialize GStreamer */ gst_init(&argc, &argv); loop = g_main_loop_new(NULL, FALSE); /* check input arguments */ if (argc !=2 ) { g_print("Usage: %s <Ogg/Vorbis filename>\n", argv[0]); return -1; } /* create elements */ pipleline = gst_pipeline_new("audio-player"); source = gst_element_factory_make("filesrc", "file-source"); parser = gst_element_factory_make("oggdemux", "ogg-parser"); decoder = gst_element_factory_make("vorbisdec", "vorbis-decoder"); conv = gst_element_factory_make("audioconvert", "converter"); sink = gst_element_factory_make("alsasink", "alsa-output"); }
void Eyrie::record() { QVariant ret; if(recbin != NULL) { qDebug() << "Ending recording"; gst_element_set_state(recbin, GST_STATE_NULL); recbin = NULL; QMetaObject::invokeMethod(parent(), "reset", Q_RETURN_ARG(QVariant, ret)); return; } qDebug() << "Starting recording"; QMetaObject::invokeMethod(parent(), "setStatus", Q_RETURN_ARG(QVariant, ret), Q_ARG(QVariant, "")); recbin = gst_pipeline_new("pipeline"); GError *err = NULL; recbin = gst_parse_launch("autoaudiosrc ! level ! tee name=t t. ! queue ! audioconvert ! audioresample ! appsink name=asink caps=audio/x-raw-float,channels=1,rate=11025,width=32,endianness=1234 t. ! queue ! audioconvert ! monoscope ! videobalance saturation=0 ! videoflip method=6 ! ffmpegcolorspace ! xvimagesink name=overlay", &err); sink = gst_bin_get_by_name(GST_BIN(recbin), "asink"); GstAppSinkCallbacks appsink_cbs = { NULL, NULL, on_buffer, NULL }; gst_app_sink_set_callbacks(GST_APP_SINK(sink), &appsink_cbs, this, NULL); overlay = gst_bin_get_by_name(GST_BIN(recbin), "overlay"); gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(overlay), view->effectiveWinId()); gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(overlay), 655, 140, 100, 200); gst_element_set_state(recbin, GST_STATE_PLAYING); attempts = 0; timer->start(10000); }
static GstElement * build_recv_pipeline (GCallback havedata_handler, gpointer data, gint *port) { GstElement *pipeline; GstElement *src; GstElement *sink; GstPad *pad = NULL; pipeline = gst_pipeline_new (NULL); src = gst_element_factory_make ("udpsrc", NULL); sink = gst_element_factory_make ("fakesink", NULL); g_object_set (sink, "sync", FALSE, NULL); ts_fail_unless (pipeline && src && sink, "Could not make pipeline(%p)" " or src(%p) or sink(%p)", pipeline, src, sink); gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL); ts_fail_unless (gst_element_link (src, sink), "Could not link udpsrc" " and fakesink"); pad = gst_element_get_static_pad (sink, "sink"); gst_pad_add_buffer_probe (pad, havedata_handler, data); gst_object_ref (pad); ts_fail_if (gst_element_set_state (pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE, "Could not start recv pipeline"); g_object_get (G_OBJECT (src), "port", port, NULL); return pipeline; }
static void kms_player_end_point_init (KmsPlayerEndPoint * self) { GstBus *bus; self->priv = KMS_PLAYER_END_POINT_GET_PRIVATE (self); self->priv->loop = kms_loop_new (); self->priv->pipeline = gst_pipeline_new ("pipeline"); self->priv->uridecodebin = gst_element_factory_make ("uridecodebin", URIDECODEBIN); gst_bin_add (GST_BIN (self->priv->pipeline), self->priv->uridecodebin); bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->pipeline)); gst_bus_set_sync_handler (bus, bus_sync_signal_handler, self, NULL); g_object_unref (bus); /* Connect to signals */ g_signal_connect (self->priv->uridecodebin, "pad-added", G_CALLBACK (pad_added), self); g_signal_connect (self->priv->uridecodebin, "pad-removed", G_CALLBACK (pad_removed), self); }
void LEDControl::initGstreamerTorch() { qDebug() << "GStreamer torch initialising"; gst_init(NULL, NULL); src = gst_element_factory_make("droidcamsrc", "src"); sink = gst_element_factory_make("droideglsink", "sink"); pipeline = gst_pipeline_new ("test-pipeline"); if (!src || !sink || !pipeline) { return; } // Build the pipeline gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL); if (gst_element_link (src, sink) != TRUE) { qDebug() << "Elements could not be linked!"; gst_object_unref (pipeline); return; } g_object_set(G_OBJECT(src), "video-torch", 1, NULL); g_object_set(G_OBJECT(src), "mode", 2, NULL); gst_element_set_state(pipeline, GST_STATE_NULL); // if we get to here and haven't returned due to error, initialisation was successful qDebug() << "gstreamer initialised successfully"; m_gStreamerInitialised = true; }
static void gstviewfinderbin_init_test_context (GstViewFinderBinTestContext * ctx, gint num_buffers) { GstElement *sink; fail_unless (ctx != NULL); ctx->pipe = gst_pipeline_new ("pipeline"); fail_unless (ctx->pipe != NULL); ctx->src = gst_element_factory_make ("videotestsrc", "src"); fail_unless (ctx->src != NULL, "Failed to create videotestsrc element"); sink = gst_element_factory_make ("fakesink", NULL); ctx->vfbin = gst_element_factory_make ("viewfinderbin", "vfbin"); fail_unless (ctx->vfbin != NULL, "Failed to create viewfinderbin element"); g_object_set (ctx->vfbin, "video-sink", sink, NULL); gst_object_unref (sink); if (num_buffers > 0) g_object_set (ctx->src, "num-buffers", num_buffers, NULL); fail_unless (gst_bin_add (GST_BIN (ctx->pipe), ctx->src)); fail_unless (gst_bin_add (GST_BIN (ctx->pipe), ctx->vfbin)); fail_unless (gst_element_link (ctx->src, ctx->vfbin)); }
static void test_tags (const gchar * tag_str) { GstElement *pipeline; GstBus *bus; GMainLoop *loop; GstTagList *sent_tags; gint i, j, n_recv, n_sent; const gchar *name_sent, *name_recv; const GValue *value_sent, *value_recv; gboolean found, ok; gint comparison; GstElement *videotestsrc, *jpegenc, *metadatamux, *metadatademux, *fakesink; GstTagSetter *setter; GST_DEBUG ("testing tags : %s", tag_str); if (received_tags) { gst_tag_list_free (received_tags); received_tags = NULL; } pipeline = gst_pipeline_new ("pipeline"); fail_unless (pipeline != NULL); videotestsrc = gst_element_factory_make ("videotestsrc", "src"); fail_unless (videotestsrc != NULL); g_object_set (G_OBJECT (videotestsrc), "num-buffers", 1, NULL); jpegenc = gst_element_factory_make ("jpegenc", "enc"); if (jpegenc == NULL) { g_print ("Cannot test - jpegenc not available\n"); return; } metadatamux = gst_element_factory_make ("metadatamux", "mux"); g_object_set (G_OBJECT (metadatamux), "exif", TRUE, NULL); fail_unless (metadatamux != NULL); metadatademux = gst_element_factory_make ("metadatademux", "demux"); fail_unless (metadatademux != NULL); fakesink = gst_element_factory_make ("fakesink", "sink"); fail_unless (fakesink != NULL); gst_bin_add_many (GST_BIN (pipeline), videotestsrc, jpegenc, metadatamux, metadatademux, fakesink, NULL); ok = gst_element_link_many (videotestsrc, jpegenc, metadatamux, metadatademux, fakesink, NULL); fail_unless (ok == TRUE); loop = g_main_loop_new (NULL, TRUE); fail_unless (loop != NULL); bus = gst_element_get_bus (pipeline); fail_unless (bus != NULL); gst_bus_add_watch (bus, bus_handler, loop); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_READY); setter = GST_TAG_SETTER (metadatamux); fail_unless (setter != NULL); sent_tags = gst_structure_from_string (tag_str, NULL); fail_unless (sent_tags != NULL); gst_tag_setter_merge_tags (setter, sent_tags, GST_TAG_MERGE_REPLACE); gst_element_set_state (pipeline, GST_STATE_PLAYING); g_main_loop_run (loop); GST_DEBUG ("mainloop done : %p", received_tags); /* verify tags */ fail_unless (received_tags != NULL); n_recv = gst_structure_n_fields (received_tags); n_sent = gst_structure_n_fields (sent_tags); /* we also get e.g. an exif binary block */ fail_unless (n_recv >= n_sent); /* FIXME: compare taglits values */ for (i = 0; i < n_sent; i++) { name_sent = gst_structure_nth_field_name (sent_tags, i); value_sent = gst_structure_get_value (sent_tags, name_sent); found = FALSE; for (j = 0; j < n_recv; j++) { name_recv = gst_structure_nth_field_name (received_tags, j); if (!strcmp (name_sent, name_recv)) { value_recv = gst_structure_get_value (received_tags, name_recv); comparison = gst_value_compare (value_sent, value_recv); if (comparison != GST_VALUE_EQUAL) { gchar *vs = g_strdup_value_contents (value_sent); gchar *vr = g_strdup_value_contents (value_recv); GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'", G_VALUE_TYPE_NAME (value_sent), vs, G_VALUE_TYPE_NAME (value_recv), vr); g_free (vs); g_free (vr); } fail_unless (comparison == GST_VALUE_EQUAL, "tag item %s has been received with different type or value", name_sent); found = TRUE; break; } } fail_unless (found, "tag item %s is lost", name_sent); } gst_tag_list_free (received_tags); received_tags = NULL; gst_tag_list_free (sent_tags); gst_element_set_state (pipeline, GST_STATE_NULL); g_main_loop_unref (loop); g_object_unref (pipeline); }
/*! * \brief OpenIMAJCapGStreamer::open Open the given file with gstreamer * \param type CvCapture type. One of CAP_GSTREAMER_* * \param filename Filename to open in case of CAP_GSTREAMER_FILE * \return boolean. Specifies if opening was succesful. * * In case of CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows: * v4l2src ! autoconvert ! appsink * * * The 'filename' parameter is not limited to filesystem paths, and may be one of the following: * * - a normal filesystem path: * e.g. video.avi or /path/to/video.avi or C:\\video.avi * - an uri: * e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf * - a gstreamer pipeline description: * e.g. videotestsrc ! videoconvert ! appsink * the appsink name should be either 'appsink0' (the default) or 'opencvsink' * * When dealing with a file, OpenIMAJCapGStreamer will not drop frames if the grabbing interval * larger than the framerate period. (Unlike the uri or manual pipeline description, which assume * a live source) * * The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties * is really slow if we need to restart the pipeline over and over again. * */ bool OpenIMAJCapGStreamer::open(const char* filename ) { if(!isInited) { //FIXME: threadsafety gst_init (NULL, NULL); isInited = true; } bool stream = false; bool manualpipeline = false; char *uri = NULL; uridecodebin = NULL; // test if we have a valid uri. If so, open it with an uridecodebin // else, we might have a file or a manual pipeline. // if gstreamer cannot parse the manual pipeline, we assume we were given and // ordinary file path. if(!gst_uri_is_valid(filename)) { uri = realpath(filename, NULL); stream = false; if(uri) { uri = g_filename_to_uri(uri, NULL, NULL); if(!uri) { WARN("GStreamer: Error opening file\n"); close(); return false; } } else { GError *err = NULL; uridecodebin = gst_parse_launch(filename, &err); if(!uridecodebin) { //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message); //close(); return false; } stream = true; manualpipeline = true; } } else { stream = true; uri = g_strdup(filename); } bool element_from_uri = false; if(!uridecodebin) { // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation. // This means that we cannot use an uridecodebin when dealing with v4l2, since setting // capture properties will not work. // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2. gchar * protocol = gst_uri_get_protocol(uri); if (!strcasecmp(protocol , "v4l2")) { uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL); element_from_uri = true; }else{ uridecodebin = gst_element_factory_make ("uridecodebin", NULL); g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL); } g_free(protocol); if(!uridecodebin) { //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message); close(); return false; } } if(manualpipeline) { GstIterator *it = NULL; it = gst_bin_iterate_sinks (GST_BIN(uridecodebin)); gboolean done = FALSE; GstElement *element = NULL; gchar* name = NULL; GValue value = G_VALUE_INIT; while (!done) { switch (gst_iterator_next (it, &value)) { case GST_ITERATOR_OK: element = GST_ELEMENT (g_value_get_object (&value)); name = gst_element_get_name(element); if (name){ if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) { sink = GST_ELEMENT ( gst_object_ref (element) ); done = TRUE; } g_free(name); } g_value_unset (&value); break; case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: done = TRUE; break; } } gst_iterator_free (it); if (!sink){ //ERROR(1, "GStreamer: cannot find appsink in manual pipeline\n"); return false; } pipeline = uridecodebin; } else { pipeline = gst_pipeline_new (NULL); // videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace // conversion based on caps. color = gst_element_factory_make(COLOR_ELEM, NULL); sink = gst_element_factory_make("appsink", NULL); gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL); if(element_from_uri) { if(!gst_element_link(uridecodebin, color)) { //ERROR(1, "GStreamer: cannot link color -> sink\n"); gst_object_unref(pipeline); return false; } }else{ g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color); } if(!gst_element_link(color, sink)) { //ERROR(1, "GStreamer: cannot link color -> sink\n"); gst_object_unref(pipeline); return false; } } //TODO: is 1 single buffer really high enough? gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1); gst_app_sink_set_drop (GST_APP_SINK(sink), stream); //do not emit signals: all calls will be synchronous and blocking gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0); // support 1 and 3 channel 8 bit data, as well as bayer (also 1 channel, 8bit) caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}"); gst_app_sink_set_caps(GST_APP_SINK(sink), caps); gst_caps_unref(caps); //we do not start recording here just yet. // the user probably wants to set capture properties first, so start recording whenever the first frame is requested return true; }
static void do_perfect_stream_test (guint rate, guint width, gdouble drop_probability, gdouble inject_probability) { GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink; GstMessage *msg; GstCaps *caps; GstPad *srcpad; GList *l, *bufs = NULL; GstClockTime next_time = GST_CLOCK_TIME_NONE; guint64 next_offset = GST_BUFFER_OFFSET_NONE; caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT, rate, "width", G_TYPE_INT, width, NULL); GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ", drop_probability * 100.0, caps); g_assert (drop_probability >= 0.0 && drop_probability <= 1.0); g_assert (inject_probability >= 0.0 && inject_probability <= 1.0); g_assert (width > 0 && (width % 8) == 0); pipe = gst_pipeline_new ("pipeline"); fail_unless (pipe != NULL); src = gst_element_factory_make ("audiotestsrc", "audiotestsrc"); fail_unless (src != NULL); g_object_set (src, "num-buffers", 100, NULL); conv = gst_element_factory_make ("audioconvert", "audioconvert"); fail_unless (conv != NULL); filter = gst_element_factory_make ("capsfilter", "capsfilter"); fail_unless (filter != NULL); g_object_set (filter, "caps", caps, NULL); injector_inject_probability = inject_probability; injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL)); srcpad = gst_element_get_pad (injector, "src"); fail_unless (srcpad != NULL); gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability); gst_object_unref (srcpad); audiorate = gst_element_factory_make ("audiorate", "audiorate"); fail_unless (audiorate != NULL); sink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (sink != NULL); g_object_set (sink, "signal-handoffs", TRUE, NULL); g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs); gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate, sink, NULL); gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL); fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING), GST_STATE_CHANGE_ASYNC); fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1), GST_STATE_CHANGE_SUCCESS); msg = gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1); fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos"); for (l = bufs; l != NULL; l = l->next) { GstBuffer *buf = GST_BUFFER (l->data); guint num_samples; fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf)); fail_unless (GST_BUFFER_DURATION_IS_VALID (buf)); fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf)); fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf)); GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf)); if (GST_CLOCK_TIME_IS_VALID (next_time)) { fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf)); } if (next_offset != GST_BUFFER_OFFSET_NONE) { fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf)); } /* check buffer size for sanity */ fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0); /* check there is actually as much data as there should be */ num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf); fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8)); next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf); next_offset = GST_BUFFER_OFFSET_END (buf); } gst_message_unref (msg); gst_element_set_state (pipe, GST_STATE_NULL); gst_object_unref (pipe); g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL); g_list_free (bufs); gst_caps_unref (caps); }
static gint create_encoder_pipeline (Encoder *encoder) { GstElement *pipeline, *element; Bin *bin; Link *link; GSList *bins, *links, *elements; GstElementFactory *element_factory; GType type; EncoderStream *stream; GstAppSrcCallbacks callbacks = { need_data_callback, NULL, NULL }; GstAppSinkCallbacks encoder_appsink_callbacks = { NULL, NULL, new_sample_callback }; GstCaps *caps; GstBus *bus; pipeline = gst_pipeline_new (NULL); /* add element to pipeline first. */ bins = encoder->bins; while (bins != NULL) { bin = bins->data; elements = bin->elements; while (elements != NULL) { element = elements->data; if (!gst_bin_add (GST_BIN (pipeline), element)) { GST_ERROR ("add element %s to bin %s error.", gst_element_get_name (element), bin->name); return 1; } elements = g_slist_next (elements); } bins = g_slist_next (bins); } /* then links element. */ bins = encoder->bins; while (bins != NULL) { bin = bins->data; element = bin->first; element_factory = gst_element_get_factory (element); type = gst_element_factory_get_element_type (element_factory); stream = NULL; if (g_strcmp0 ("GstAppSrc", g_type_name (type)) == 0) { GST_INFO ("Encoder appsrc found."); stream = encoder_get_stream (encoder, bin->name); gst_app_src_set_callbacks (GST_APP_SRC (element), &callbacks, stream, NULL); } element = bin->last; element_factory = gst_element_get_factory (element); type = gst_element_factory_get_element_type (element_factory); if ((g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) || (g_strcmp0 ("GstHlsSink", g_type_name (type)) == 0) || (g_strcmp0 ("GstFileSink", g_type_name (type)) == 0)) { GstPad *pad; if (g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) { GST_INFO ("Encoder appsink found."); gst_app_sink_set_callbacks (GST_APP_SINK (element), &encoder_appsink_callbacks, encoder, NULL); } pad = gst_element_get_static_pad (element, "sink"); gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoder_appsink_event_probe, encoder, NULL); } links = bin->links; while (links != NULL) { link = links->data; GST_INFO ("link element: %s -> %s", link->src_name, link->sink_name); if (link->caps != NULL) { caps = gst_caps_from_string (link->caps); gst_element_link_filtered (link->src, link->sink, caps); gst_caps_unref (caps); } else { gst_element_link (link->src, link->sink); } links = g_slist_next (links); } bins = g_slist_next (bins); } bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_watch (bus, bus_callback, encoder); g_object_unref (bus); encoder->pipeline = pipeline; return 0; }
int main(int argc, char *argv[]) { GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink; GstElement *video_queue, *visual, *video_convert, *video_sink; GstBus *bus; GstMessage *msg; GstPadTemplate *tee_src_pad_template; GstPad *tee_audio_pad, *tee_video_pad; GstPad *queue_audio_pad, *queue_video_pad; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ audio_source = gst_element_factory_make ("audiotestsrc", "audio_source"); tee = gst_element_factory_make ("tee", "tee"); audio_queue = gst_element_factory_make ("queue", "audio_queue"); audio_convert = gst_element_factory_make ("audioconvert", "audio_convert"); audio_resample = gst_element_factory_make ("audioresample", "audio_resample"); audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); video_queue = gst_element_factory_make ("queue", "video_queue"); visual = gst_element_factory_make ("wavescope", "visual"); video_convert = gst_element_factory_make ("ffmpegcolorspace", "csp"); video_sink = gst_element_factory_make ("autovideosink", "video_sink"); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline"); if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink || !video_queue || !visual || !video_convert || !video_sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Configure elements */ g_object_set (audio_source, "freq", 215.0f, NULL); g_object_set (visual, "shader", 0, "style", 1, NULL); /* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return -1; } /* Manually link the Tee, which has "Request" pads */ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src%d"); tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return -1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad); /* Start playing the pipeline */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
int main (int argc, char **argv) { GOptionEntry options[] = { {"effects", 'e', 0, G_OPTION_ARG_STRING, &opt_effects, "Effects to use (comma-separated list of element names)", NULL}, {NULL} }; GOptionContext *ctx; GError *err = NULL; GMainLoop *loop; GstElement *src, *q1, *q2, *effect, *filter1, *filter2, *sink; gchar **effect_names, **e; ctx = g_option_context_new (""); g_option_context_add_main_entries (ctx, options, GETTEXT_PACKAGE); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing: %s\n", err->message); g_option_context_free (ctx); g_clear_error (&err); return 1; } g_option_context_free (ctx); GST_FIXME ("Multiple things to check/fix, see source code"); if (opt_effects != NULL) effect_names = g_strsplit (opt_effects, ",", -1); else effect_names = g_strsplit (DEFAULT_EFFECTS, ",", -1); for (e = effect_names; e != NULL && *e != NULL; ++e) { GstElement *el; el = gst_element_factory_make (*e, NULL); if (el) { g_print ("Adding effect '%s'\n", *e); g_queue_push_tail (&effects, el); } } pipeline = gst_pipeline_new ("pipeline"); src = gst_element_factory_make ("videotestsrc", NULL); g_object_set (src, "is-live", TRUE, NULL); filter1 = gst_element_factory_make ("capsfilter", NULL); gst_util_set_object_arg (G_OBJECT (filter1), "caps", "video/x-raw, width=320, height=240, " "format={ I420, YV12, YUY2, UYVY, AYUV, Y41B, Y42B, " "YVYU, Y444, v210, v216, NV12, NV21, UYVP, A420, YUV9, YVU9, IYU1 }"); q1 = gst_element_factory_make ("queue", NULL); blockpad = gst_element_get_static_pad (q1, "src"); conv_before = gst_element_factory_make ("videoconvert", NULL); effect = g_queue_pop_head (&effects); cur_effect = effect; conv_after = gst_element_factory_make ("videoconvert", NULL); q2 = gst_element_factory_make ("queue", NULL); filter2 = gst_element_factory_make ("capsfilter", NULL); gst_util_set_object_arg (G_OBJECT (filter2), "caps", "video/x-raw, width=320, height=240, " "format={ RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR }"); sink = gst_element_factory_make ("ximagesink", NULL); gst_bin_add_many (GST_BIN (pipeline), src, filter1, q1, conv_before, effect, conv_after, q2, sink, NULL); gst_element_link_many (src, filter1, q1, conv_before, effect, conv_after, q2, sink, NULL); gst_element_set_state (pipeline, GST_STATE_PLAYING); loop = g_main_loop_new (NULL, FALSE); gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop); g_timeout_add_seconds (1, timeout_cb, loop); g_main_loop_run (loop); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
void mirageaudio_initgstreamer(MirageAudio *ma, const gchar *file) { GstPad *audiopad; GstCaps *filter_float; GstCaps *filter_resample; GstElement *cfilt_float; GstElement *cfilt_resample; GstElement *dec; GstElement *src; GstElement *sink; GstElement *audioresample; GstElement *audioconvert; // Gstreamer decoder setup ma->pipeline = gst_pipeline_new("pipeline"); // decoder src = gst_element_factory_make("filesrc", "source"); g_object_set(G_OBJECT(src), "location", file, NULL); dec = gst_element_factory_make("decodebin", "decoder"); g_signal_connect(dec, "pad-added", G_CALLBACK(mirageaudio_cb_newpad), ma); gst_bin_add_many(GST_BIN(ma->pipeline), src, dec, NULL); gst_element_link(src, dec); // audio conversion ma->audio = gst_bin_new("audio"); audioconvert = gst_element_factory_make("audioconvert", "conv"); filter_float = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, GST_AUDIO_NE(F32), NULL); cfilt_float = gst_element_factory_make("capsfilter", "cfilt_float"); g_object_set(G_OBJECT(cfilt_float), "caps", filter_float, NULL); gst_caps_unref(filter_float); audioresample = gst_element_factory_make("audioresample", "resample"); filter_resample = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, GST_AUDIO_NE(F32), "channels", G_TYPE_INT, 1, NULL); cfilt_resample = gst_element_factory_make("capsfilter", "cfilt_resample"); g_object_set(G_OBJECT(cfilt_resample), "caps", filter_resample, NULL); gst_caps_unref(filter_resample); sink = gst_element_factory_make("fakesink", "sink"); g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL); g_signal_connect(sink, "handoff", G_CALLBACK(mirageaudio_cb_have_data), ma); gst_bin_add_many(GST_BIN(ma->audio), audioconvert, audioresample, cfilt_resample, cfilt_float, sink, NULL); gst_element_link_many(audioconvert, cfilt_float, audioresample, cfilt_resample, sink, NULL); audiopad = gst_element_get_static_pad(audioconvert, "sink"); gst_element_add_pad(ma->audio, gst_ghost_pad_new("sink", audiopad)); gst_object_unref(audiopad); gst_bin_add(GST_BIN(ma->pipeline), ma->audio); // Get sampling rate of audio file GstClockTime max_wait = 1 * GST_SECOND; if (gst_element_set_state(ma->pipeline, GST_STATE_READY) == GST_STATE_CHANGE_ASYNC) { gst_element_get_state(ma->pipeline, NULL, NULL, max_wait); } if (gst_element_set_state(ma->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_ASYNC) { gst_element_get_state(ma->pipeline, NULL, NULL, max_wait); } }
int main (int argc, char *argv[]) { GtkWidget *window, *window_control; GtkWidget *button_state_null, *button_state_ready; GtkWidget *button_state_paused, *button_state_playing; GtkWidget *grid, *area; GstElement *pipeline; GstElement *videosrc, *videosink; GstStateChangeReturn ret; GstCaps *caps; GstBus *bus; gst_init (&argc, &argv); gtk_init (&argc, &argv); pipeline = gst_pipeline_new ("pipeline"); //window that contains an area where the video is drawn window = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_default_size (GTK_WINDOW (window), 640, 480); gtk_window_move (GTK_WINDOW (window), 300, 10); gtk_window_set_title (GTK_WINDOW (window), "gtkgstwidget"); //window to control the states window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE); gtk_window_move (GTK_WINDOW (window_control), 10, 10); grid = gtk_grid_new (); gtk_container_add (GTK_CONTAINER (window_control), grid); //control state null button_state_null = gtk_button_new_with_label ("GST_STATE_NULL"); g_signal_connect (G_OBJECT (button_state_null), "clicked", G_CALLBACK (button_state_null_cb), pipeline); gtk_grid_attach (GTK_GRID (grid), button_state_null, 0, 1, 1, 1); gtk_widget_show (button_state_null); //control state ready button_state_ready = gtk_button_new_with_label ("GST_STATE_READY"); g_signal_connect (G_OBJECT (button_state_ready), "clicked", G_CALLBACK (button_state_ready_cb), pipeline); gtk_grid_attach (GTK_GRID (grid), button_state_ready, 0, 2, 1, 1); gtk_widget_show (button_state_ready); //control state paused button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED"); g_signal_connect (G_OBJECT (button_state_paused), "clicked", G_CALLBACK (button_state_paused_cb), pipeline); gtk_grid_attach (GTK_GRID (grid), button_state_paused, 0, 3, 1, 1); gtk_widget_show (button_state_paused); //control state playing button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING"); g_signal_connect (G_OBJECT (button_state_playing), "clicked", G_CALLBACK (button_state_playing_cb), pipeline); gtk_grid_attach (GTK_GRID (grid), button_state_playing, 0, 4, 1, 1); gtk_widget_show (button_state_playing); gtk_widget_show (grid); gtk_widget_show (window_control); g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb), pipeline); //configure the pipeline videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc"); videosink = gst_element_factory_make ("gtksink", "gtksink"); g_object_get (videosink, "widget", &area, NULL); gtk_container_add (GTK_CONTAINER (window), area); g_object_unref (area); gtk_widget_realize (area); caps = gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, "format", G_TYPE_STRING, "BGRA", NULL); gst_bin_add_many (GST_BIN (pipeline), videosrc, videosink, NULL); if (!gst_element_link_filtered (videosrc, videosink, caps)) { gst_caps_unref (caps); g_warning ("Failed to link videosrc to glfiltercube!\n"); return -1; } gst_caps_unref (caps); //set window id on this event bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline); gst_object_unref (bus); //start ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_print ("Failed to start up pipeline!\n"); return -1; } gtk_widget_show_all (window); gtk_main (); gst_deinit (); return 0; }
static void build_pipeline (NscGStreamer *gstreamer) { NscGStreamerPrivate *priv; GstBus *bus; g_return_if_fail (NSC_IS_GSTREAMER (gstreamer)); priv = NSC_GSTREAMER_GET_PRIVATE (gstreamer); if (priv->pipeline != NULL) { gst_object_unref (GST_OBJECT (priv->pipeline)); } priv->pipeline = gst_pipeline_new ("pipeline"); bus = gst_element_get_bus (priv->pipeline); gst_bus_add_signal_watch (bus); /* Connect the signals we want to listen to on the bus */ g_signal_connect (G_OBJECT (bus), "message::error", G_CALLBACK (error_cb), gstreamer); g_signal_connect (G_OBJECT (bus), "message::eos", G_CALLBACK (eos_cb), gstreamer); /* Read from disk */ priv->filesrc = gst_element_factory_make (FILE_SOURCE, "file_src"); if (priv->filesrc == NULL) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not create GStreamer file input")); return; } /* Decode */ priv->decode = gst_element_factory_make ("decodebin", "decode"); if (priv->decode == NULL) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not create GStreamer file input")); return; } /* Encode */ priv->encode = build_encoder (gstreamer); if (priv->encode == NULL) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not create GStreamer encoders for %s"), gm_audio_profile_get_name (priv->profile)); return; } /* Decodebin uses dynamic pads, so lets set up a callback. */ g_signal_connect (G_OBJECT (priv->decode), "new-decoded-pad", G_CALLBACK (connect_decodebin_cb), priv->encode); /* Write to disk */ priv->filesink = gst_element_factory_make (FILE_SINK, "file_sink"); if (priv->filesink == NULL) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not create GStreamer file output")); return; } /* * TODO: Eventually, we should ask the user if they want to * overwrite any existing file. */ g_signal_connect (G_OBJECT (priv->filesink), "allow-overwrite", G_CALLBACK (just_say_yes), gstreamer); /* Add the elements to the pipeline */ gst_bin_add_many (GST_BIN (priv->pipeline), priv->filesrc, priv->decode, priv->encode, priv->filesink, NULL); /* Link filessrc and decoder */ if (!gst_element_link_many (priv->filesrc, priv->decode, NULL)) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not link pipeline")); return; } /* Link the rest */ if (!gst_element_link (priv->encode, priv->filesink)) { g_set_error (&priv->construct_error, NSC_ERROR, NSC_ERROR_INTERNAL_ERROR, _("Could not link pipeline")); return; } priv->rebuild_pipeline = FALSE; }
gint main (gint argc, gchar * argv[]) { GstBin *bin; GstClockTime start, end; GstElement *sink, *new_sink; /* default parameters */ gint depth = 4; gint children = 3; gint flavour = FLAVOUR_AUDIO; const gchar *flavour_str = "audio"; gst_init (&argc, &argv); /* check command line options */ if (argc) { gint arg; for (arg = 0; arg < argc; arg++) { if (!strcmp (argv[arg], "-d")) { arg++; if (arg < argc) depth = atoi (argv[arg]); } else if (!strcmp (argv[arg], "-c")) { arg++; if (arg < argc) children = atoi (argv[arg]); } else if (!strcmp (argv[arg], "-f")) { arg++; if (arg < argc) { flavour_str = argv[arg]; switch (*flavour_str) { case 'a': flavour = FLAVOUR_AUDIO; break; case 'v': flavour = FLAVOUR_VIDEO; break; default: break; } } } } } /* build pipeline */ g_print ("building %s pipeline with depth = %d and children = %d\n", flavour_str, depth, children); start = gst_util_get_timestamp (); bin = GST_BIN (gst_pipeline_new ("pipeline")); sink = gst_element_factory_make ("fakesink", NULL); gst_bin_add (bin, sink); if (!create_node (bin, sink, "sink", &new_sink, children, flavour)) { goto Error; } if (!create_nodes (bin, new_sink, depth, children, flavour)) { goto Error; } end = gst_util_get_timestamp (); /* num-threads = num-sources = pow (children, depth) */ g_print ("%" GST_TIME_FORMAT " built pipeline with %d elements\n", GST_TIME_ARGS (end - start), GST_BIN_NUMCHILDREN (bin)); /* meassure */ g_print ("starting pipeline\n"); gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY); GST_DEBUG_BIN_TO_DOT_FILE (bin, GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE, "capsnego"); start = gst_util_get_timestamp (); gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PAUSED); event_loop (GST_ELEMENT (bin), start); end = gst_util_get_timestamp (); g_print ("%" GST_TIME_FORMAT " reached paused\n", GST_TIME_ARGS (end - start)); /* clean up */ Error: gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL); gst_object_unref (bin); return 0; }