/** * rb_gst_create_filter_bin: * * Creates an initial bin to use for dynamically plugging filter elements into the * pipeline. * * Return value: filter bin */ GstElement * rb_gst_create_filter_bin () { GstElement *bin; GstElement *audioconvert; GstElement *identity; GstPad *pad; bin = gst_bin_new ("filterbin"); audioconvert = gst_element_factory_make ("audioconvert", "filteraudioconvert"); identity = gst_element_factory_make ("identity", "filteridentity"); gst_bin_add_many (GST_BIN (bin), audioconvert, identity, NULL); gst_element_link (audioconvert, identity); pad = gst_element_get_static_pad (audioconvert, "sink"); gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); gst_object_unref (pad); pad = gst_element_get_static_pad (identity, "src"); gst_element_add_pad (bin, gst_ghost_pad_new ("src", pad)); gst_object_unref (pad); return bin; }
Output::Output (QObject *parent) : QObject (parent) , Bin_ (gst_bin_new ("audio_sink_bin")) , Equalizer_ (gst_element_factory_make ("equalizer-3bands", "equalizer")) , Volume_ (gst_element_factory_make ("volume", "volume")) , Converter_ (gst_element_factory_make ("audioconvert", "convert")) , Sink_ (gst_element_factory_make ("autoaudiosink", "audio_sink")) , SaveVolumeScheduled_ (false) { gst_bin_add_many (GST_BIN (Bin_), Equalizer_, Volume_, Converter_, Sink_, nullptr); gst_element_link_many (Equalizer_, Volume_, Converter_, Sink_, nullptr); auto pad = gst_element_get_static_pad (Equalizer_, "sink"); auto ghostPad = gst_ghost_pad_new ("sink", pad); gst_pad_set_active (ghostPad, TRUE); gst_element_add_pad (Bin_, ghostPad); gst_object_unref (pad); g_signal_connect (Volume_, "notify::volume", G_CALLBACK (CbVolumeChanged), this); g_signal_connect (Volume_, "notify::mute", G_CALLBACK (CbMuteChanged), this); const auto volume = XmlSettingsManager::Instance () .Property ("AudioVolume", 1).toDouble (); setVolume (volume); const auto isMuted = XmlSettingsManager::Instance () .Property ("AudioMuted", false).toBool (); g_object_set (G_OBJECT (Volume_), "mute", static_cast<gboolean> (isMuted), nullptr); }
GstElement *buildElement() { #ifdef Q_WS_MAEMO_5 return m_element = m_videoRenderer->videoSink(); #endif if (m_bin == NULL) { GstBin * bin = GST_BIN(gst_bin_new(NULL)); m_colorspace = gst_element_factory_make("ffmpegcolorspace", NULL); m_element = m_videoRenderer->videoSink(); gst_bin_add(bin, m_colorspace); gst_bin_add(bin, m_element); gst_element_link(m_colorspace, m_element); // add ghostpads GstPad *pad = gst_element_get_static_pad(m_colorspace, "sink"); gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("sink", pad)); gst_object_unref(GST_OBJECT(pad)); m_bin = GST_ELEMENT(bin); } m_videoRenderer->precessNewStream(); gst_object_ref(GST_OBJECT(m_bin)); return m_bin; }
/* a silence bin always run on an adderbin channel * this _should_ keep stream continuity while adding * removing sources */ GstElement* get_silence_bin() { GstElement *silbin; GstElement *atestsrc; GstElement *raw_trans; GstPad *testpad; silbin = gst_bin_new(NULL); atestsrc = get_silence_source(); gst_bin_add(GST_BIN(silbin), atestsrc); g_object_set(G_OBJECT(atestsrc), "wave", 4, NULL); raw_trans = get_raw_audio_trans(); gst_bin_add(GST_BIN(silbin), raw_trans); gst_element_link(atestsrc, raw_trans); testpad = gst_ghost_pad_new("src", gst_element_get_static_pad(raw_trans, "src")); gst_element_add_pad(silbin, testpad); return silbin; }
void EntradaLogo::crea(GstElement *pipeline, QString nomfitxer) { //Creem entrada de fitxer bin_logo = gst_bin_new ("bin_logo"); source = gst_element_factory_make ("filesrc", "fitxer_logo"); dec = gst_element_factory_make ("pngdec", "decoder_logo"); imagefreeze = gst_element_factory_make("imagefreeze", "imagefreeze_logo"); queue = gst_element_factory_make("queue2","queue_logo"); conv_logo = gst_element_factory_make("ffmpegcolorspace", "color_conv_logo"); //Comprovem que s'han pogut crear tots els elements d'entrada if(!bin_logo || !source || !dec || !imagefreeze || !queue || !conv_logo){ g_printerr ("Un dels elements de l'entrada de logo no s'ha pogut crear. Sortint.\n"); } gst_bin_add_many (GST_BIN (bin_logo), source, dec, imagefreeze, queue, conv_logo, NULL); const char *c_nom_fitxer = nomfitxer.toStdString().c_str(); g_object_set (G_OBJECT (source), "location", c_nom_fitxer, NULL); //Afegim els elements al pipeline corresponent gst_bin_add_many (GST_BIN (pipeline),bin_logo, NULL); //Linkem els elements gst_element_link_many(source, dec, imagefreeze, queue, conv_logo, NULL); }
static GstElement * setup_vp8dec (const gchar * src_caps_str) { GstElement *bin; GstElement *vp8enc, *vp8dec; GstCaps *srccaps = NULL; GstBus *bus; GstPad *ghostpad, *targetpad; if (src_caps_str) { srccaps = gst_caps_from_string (src_caps_str); fail_unless (srccaps != NULL); } bin = gst_bin_new ("bin"); vp8enc = gst_check_setup_element ("vp8enc"); fail_unless (vp8enc != NULL); vp8dec = gst_check_setup_element ("vp8dec"); fail_unless (vp8dec != NULL); g_object_set (vp8enc, "name", "encoder", NULL); g_object_set (vp8dec, "name", "decoder", NULL); gst_bin_add_many (GST_BIN (bin), vp8enc, vp8dec, NULL); fail_unless (gst_element_link_pads (vp8enc, "src", vp8dec, "sink")); targetpad = gst_element_get_static_pad (vp8enc, "sink"); fail_unless (targetpad != NULL); ghostpad = gst_ghost_pad_new ("sink", targetpad); fail_unless (ghostpad != NULL); gst_element_add_pad (bin, ghostpad); gst_object_unref (targetpad); targetpad = gst_element_get_static_pad (vp8dec, "src"); fail_unless (targetpad != NULL); ghostpad = gst_ghost_pad_new ("src", targetpad); fail_unless (ghostpad != NULL); gst_element_add_pad (bin, ghostpad); gst_object_unref (targetpad); srcpad = gst_check_setup_src_pad (bin, &srctemplate); sinkpad = gst_check_setup_sink_pad (bin, &sinktemplate); gst_pad_set_active (srcpad, TRUE); gst_pad_set_active (sinkpad, TRUE); gst_check_setup_events (srcpad, bin, srccaps, GST_FORMAT_TIME); bus = gst_bus_new (); gst_element_set_bus (bin, bus); fail_unless (gst_element_set_state (bin, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE, "could not set to playing"); if (srccaps) gst_caps_unref (srccaps); buffers = NULL; return bin; }
// setups audio visualization // a modified version of totem's bacon video widget static void setup_vis (gstPlay *play) { if (play->xid == 0) return; GstElement *vis_bin = NULL; GstElement *vis_element = NULL; GstElement *vis_capsfilter = NULL; GstPad *pad = NULL; GstElementFactory *fac = NULL; fac = setup_vis_find_factory (play->vis_name); if (fac == NULL) goto beach; //cant find the visualisation vis_element = gst_element_factory_create (fac, "vis_element"); if (!GST_IS_ELEMENT (vis_element)) goto beach; //cant create visualisation element vis_capsfilter = gst_element_factory_make ("capsfilter", "vis_capsfilter"); if (!GST_IS_ELEMENT (vis_capsfilter)) { gst_object_unref (vis_element); goto beach; //cant create visualisation capsfilter element } vis_bin = gst_bin_new ("vis_bin"); if (!GST_IS_ELEMENT (vis_bin)) { gst_object_unref (vis_element); gst_object_unref (vis_capsfilter); goto beach; //cant create visualisation bin } gst_bin_add_many (GST_BIN (vis_bin), vis_element, vis_capsfilter, NULL); // sink ghostpad pad = gst_element_get_static_pad (vis_element, "sink"); gst_element_add_pad (vis_bin, gst_ghost_pad_new ("sink", pad)); gst_object_unref (pad); // source ghostpad, link with vis_element pad = gst_element_get_static_pad (vis_capsfilter, "src"); gst_element_add_pad (vis_bin, gst_ghost_pad_new ("src", pad)); gst_element_link_pads (vis_element, "src", vis_capsfilter, "sink"); gst_object_unref (pad); beach: g_object_set (play->element, "vis-plugin", vis_bin, NULL); return; }
void Osd::init() { videosink = gst_element_factory_make("autovideosink", "video1"); overlay = gst_element_factory_make("textoverlay", "overlay1"); time = gst_element_factory_make("timeoverlay", "time1"); bin = gst_bin_new("osd"); gst_bin_add_many(GST_BIN(bin), videosink, time, overlay, nullptr); gst_element_link_many(overlay, time, videosink, nullptr); GstPad *pad = gst_element_get_static_pad(overlay, "video_sink"); GstPad *ghostpad = gst_ghost_pad_new("sink", pad); gst_element_add_pad(bin, ghostpad); gst_object_unref(GST_OBJECT(pad)); g_object_set(G_OBJECT(overlay), "text", "", nullptr); g_object_set(G_OBJECT(overlay), "silent", false, nullptr); #if GST_VERSION_MAJOR == 1 g_object_set(G_OBJECT(overlay), "valignment", "top", nullptr); g_object_set(G_OBJECT(overlay), "halignment", "right", nullptr); #else g_object_set(G_OBJECT(overlay), "valign", "top", nullptr); g_object_set(G_OBJECT(overlay), "halign", "right", nullptr); #endif g_object_set(G_OBJECT(overlay), "shaded-background", true, nullptr); g_object_set(G_OBJECT(time), "silent", !timeVisible, nullptr); g_object_set(G_OBJECT(pipeline), "video-sink", bin, nullptr); elements << overlay << time; timer.setSingleShot(true); connect(&timer, SIGNAL(timeout()), SLOT(clear())); }
void RefPointerTest::refTest1() { GstObject *bin = GST_OBJECT(gst_object_ref(GST_OBJECT(gst_bin_new(NULL)))); gst_object_sink(bin); QGst::ObjectPtr object = QGst::ObjectPtr::wrap(bin, false); QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin), 1); }
static GstElement *owr_image_renderer_get_element(OwrMediaRenderer *renderer) { OwrImageRenderer *image_renderer; OwrImageRendererPrivate *priv; GstElement *renderer_bin; GstElement *sink; GstPad *ghostpad, *sinkpad; gchar *bin_name; g_assert(renderer); image_renderer = OWR_IMAGE_RENDERER(renderer); priv = image_renderer->priv; g_assert(!priv->appsink); bin_name = g_strdup_printf("image-renderer-bin-%u", g_atomic_int_add(&unique_bin_id, 1)); renderer_bin = gst_bin_new(bin_name); g_free(bin_name); sink = gst_element_factory_make("appsink", "image-renderer-appsink"); g_assert(sink); priv->appsink = sink; g_object_set(sink, "max-buffers", 1, "drop", TRUE, "qos", TRUE, "enable-last-sample", FALSE, NULL); gst_bin_add_many(GST_BIN(renderer_bin), sink, NULL); sinkpad = gst_element_get_static_pad(sink, "sink"); g_assert(sinkpad); ghostpad = gst_ghost_pad_new("sink", sinkpad); gst_pad_set_active(ghostpad, TRUE); gst_element_add_pad(renderer_bin, ghostpad); gst_object_unref(sinkpad); return renderer_bin; }
RgAnalyser::RgAnalyser (const QStringList& paths, QObject *parent) : QObject { parent } , Paths_ { paths } #if GST_VERSION_MAJOR < 1 , Pipeline_ (gst_element_factory_make ("playbin2", nullptr)) #else , Pipeline_ (gst_element_factory_make ("playbin", nullptr)) #endif , SinkBin_ { gst_bin_new (nullptr) } , AConvert_ { gst_element_factory_make ("audioconvert", nullptr) } , AResample_ { gst_element_factory_make ("audioresample", nullptr) } , RGAnalysis_ { gst_element_factory_make ("rganalysis", nullptr) } , Fakesink_ { gst_element_factory_make ("fakesink", nullptr) } , PopThread_ { new LightPopThread { gst_pipeline_get_bus (GST_PIPELINE (Pipeline_)), this } } { qRegisterMetaType<GstMessage_ptr> ("GstMessage_ptr"); gst_bin_add_many (GST_BIN (SinkBin_), AConvert_, AResample_, RGAnalysis_, Fakesink_, nullptr); gst_element_link_many (AConvert_, AResample_, RGAnalysis_, Fakesink_, nullptr); auto pad = gst_element_get_static_pad (AConvert_, "sink"); auto ghostPad = gst_ghost_pad_new ("sink", pad); gst_pad_set_active (ghostPad, TRUE); gst_element_add_pad (SinkBin_, ghostPad); gst_object_unref (pad); g_object_set (GST_OBJECT (RGAnalysis_), "num-tracks", paths.size (), nullptr); g_object_set (GST_OBJECT (Pipeline_), "audio-sink", SinkBin_, nullptr); CheckFinish (); PopThread_->start (); }
GstElement *QGstreamerCaptureSession::buildAudioPreview() { GstElement *previewElement = 0; if (m_audioPreviewFactory) { previewElement = m_audioPreviewFactory->buildElement(); } else { #if 1 previewElement = gst_element_factory_make("fakesink", "audio-preview"); #else GstElement *bin = gst_bin_new("audio-preview-bin"); GstElement *visual = gst_element_factory_make("libvisual_lv_scope", "audio-preview"); GstElement *sink = gst_element_factory_make("ximagesink", NULL); gst_bin_add_many(GST_BIN(bin), visual, sink, NULL); gst_element_link_many(visual,sink, NULL); // add ghostpads GstPad *pad = gst_element_get_static_pad(visual, "sink"); Q_ASSERT(pad); gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("audiosink", pad)); gst_object_unref(GST_OBJECT(pad)); previewElement = bin; #endif } return previewElement; }
void EntradaAudio::crea(int k, GstElement *pipeline) { //Elements de font d'entrada d'àudio QString sbin("bin_audio_%1"), ssource_a("audio_source_%1"), squeue("audio_queue_%1"), svolumen_m("volumen_mix_%1"); bin = gst_bin_new ((char*)sbin.arg(k).toStdString().c_str()); source = gst_element_factory_make("audiotestsrc", (char*)ssource_a.arg(k).toStdString().c_str()); queue_mix = gst_element_factory_make("queue2", (char*)squeue.arg(k).toStdString().c_str()); volume_mix = gst_element_factory_make("volume", (char*)svolumen_m.arg(k).toStdString().c_str()); //Comprovem que s'han pogut crear tots els elements d'entrada if (!source || !queue_mix || !volume_mix){ g_printerr ("Un dels elements comuns no s'ha pogut crear. Sortint.\n"); } //Canvi de les propietats d'alguns elements g_object_set (G_OBJECT (source), "wave",4, NULL); g_object_set (G_OBJECT (volume_mix), "volume",0, NULL); //Afegim tots els elements al bin_font corresponent gst_bin_add_many (GST_BIN (bin), source, queue_mix, volume_mix, NULL); //Afegim els bin_font al pipeline gst_bin_add (GST_BIN (pipeline), bin); //Linkem els elements gst_element_link_many (source, queue_mix, volume_mix,NULL); }
/**************************************************** * GstElement vmetods * ****************************************************/ static GstPad * _request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * name, const GstCaps * caps) { GstPad *audioresample_srcpad, *audioconvert_sinkpad, *tmpghost; GstPad *ghost; GstElement *audioconvert, *audioresample; PadInfos *infos = g_slice_new0 (PadInfos); GESSmartAdder *self = GES_SMART_ADDER (element); infos->adder_pad = gst_element_request_pad (self->adder, templ, NULL, caps); if (infos->adder_pad == NULL) { GST_WARNING_OBJECT (element, "Could not get any pad from GstAdder"); return NULL; } infos->self = self; infos->bin = gst_bin_new (NULL); audioconvert = gst_element_factory_make ("audioconvert", NULL); audioresample = gst_element_factory_make ("audioresample", NULL); gst_bin_add_many (GST_BIN (infos->bin), audioconvert, audioresample, NULL); gst_element_link_many (audioconvert, audioresample, NULL); audioconvert_sinkpad = gst_element_get_static_pad (audioconvert, "sink"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioconvert_sinkpad)); gst_object_unref (audioconvert_sinkpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_bin_add (GST_BIN (self), infos->bin); ghost = gst_ghost_pad_new (NULL, tmpghost); gst_pad_set_active (ghost, TRUE); if (!gst_element_add_pad (GST_ELEMENT (self), ghost)) goto could_not_add; audioresample_srcpad = gst_element_get_static_pad (audioresample, "src"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioresample_srcpad)); gst_object_unref (audioresample_srcpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_pad_link (tmpghost, infos->adder_pad); LOCK (self); g_hash_table_insert (self->pads_infos, ghost, infos); UNLOCK (self); GST_DEBUG_OBJECT (self, "Returning new pad %" GST_PTR_FORMAT, ghost); return ghost; could_not_add: { GST_ERROR_OBJECT (self, "could not add pad"); destroy_pad (infos); return NULL; } }
//static BinPtr Bin::create(const char *name) { GstElement *bin = gst_bin_new(name); if (bin) { gst_object_ref_sink(bin); } return BinPtr::wrap(GST_BIN(bin), false); }
static void really_add_tee (GstPad *pad, gboolean blocked, RBGstPipelineOp *op) { GstElement *queue; GstElement *audioconvert; GstElement *bin; GstElement *parent_bin; GstPad *sinkpad; GstPad *ghostpad; rb_debug ("really adding tee %p", op->element); /* set up containing bin */ bin = gst_bin_new (NULL); queue = gst_element_factory_make ("queue", NULL); audioconvert = gst_element_factory_make ("audioconvert", NULL); /* The bin contains elements that change state asynchronously * and not as part of a state change in the entire pipeline. */ g_object_set (bin, "async-handling", TRUE, NULL); g_object_set (queue, "max-size-buffers", 3, NULL); gst_bin_add_many (GST_BIN (bin), queue, audioconvert, op->element, NULL); gst_element_link_many (queue, audioconvert, op->element, NULL); /* add ghost pad */ sinkpad = gst_element_get_static_pad (queue, "sink"); ghostpad = gst_ghost_pad_new ("sink", sinkpad); gst_element_add_pad (bin, ghostpad); gst_object_unref (sinkpad); /* add it into the pipeline */ parent_bin = GST_ELEMENT_PARENT (op->fixture); gst_bin_add (GST_BIN (parent_bin), bin); gst_element_link (op->fixture, bin); /* if we're supposed to be playing, unblock the sink */ if (blocked) { rb_debug ("unblocking pad after adding tee"); gst_element_set_state (parent_bin, GST_STATE_PLAYING); gst_object_ref (ghostpad); gst_pad_set_blocked_async (pad, FALSE, (GstPadBlockCallback)pipeline_op_done, ghostpad); } else { gst_element_set_state (bin, GST_STATE_PAUSED); gst_object_ref (ghostpad); pipeline_op_done (NULL, FALSE, ghostpad); } _rb_player_gst_tee_emit_tee_inserted (RB_PLAYER_GST_TEE (op->player), op->element); free_pipeline_op (op); }
/*** * Creates an instance of a playbin with "audio-src" and * "video-src" ghost pads to allow redirected output streams. * * ### This function is probably not required now that MediaObject is based * on decodebin directly. */ GstElement* GstHelper::createPluggablePlaybin() { GstElement *playbin = 0; //init playbin and add to our pipeline playbin = gst_element_factory_make("playbin2", NULL); //Create an identity element to redirect sound GstElement *audioSinkBin = gst_bin_new (NULL); GstElement *audioPipe = gst_element_factory_make("identity", NULL); gst_bin_add(GST_BIN(audioSinkBin), audioPipe); //Create a sinkpad on the identity GstPad *audiopad = gst_element_get_pad (audioPipe, "sink"); gst_element_add_pad (audioSinkBin, gst_ghost_pad_new ("sink", audiopad)); gst_object_unref (audiopad); //Create an "audio_src" source pad on the playbin GstPad *audioPlaypad = gst_element_get_pad (audioPipe, "src"); gst_element_add_pad (playbin, gst_ghost_pad_new ("audio_src", audioPlaypad)); gst_object_unref (audioPlaypad); //Done with our audio redirection g_object_set (G_OBJECT(playbin), "audio-sink", audioSinkBin, (const char*)NULL); // * * Redirect video to "video_src" pad : * * //Create an identity element to redirect sound GstElement *videoSinkBin = gst_bin_new (NULL); GstElement *videoPipe = gst_element_factory_make("identity", NULL); gst_bin_add(GST_BIN(videoSinkBin), videoPipe); //Create a sinkpad on the identity GstPad *videopad = gst_element_get_pad (videoPipe, "sink"); gst_element_add_pad (videoSinkBin, gst_ghost_pad_new ("sink", videopad)); gst_object_unref (videopad); //Create an "audio_src" source pad on the playbin GstPad *videoPlaypad = gst_element_get_pad (videoPipe, "src"); gst_element_add_pad (playbin, gst_ghost_pad_new ("video_src", videoPlaypad)); gst_object_unref (videoPlaypad); //Done with our video redirection g_object_set (G_OBJECT(playbin), "video-sink", videoSinkBin, (const char*)NULL); return playbin; }
static void impl_constructed (GObject *object) { RBVisualizerPage *page; ClutterInitError err; GstElement *colorspace; GstElement *realsink; GstElement *capsfilter; GstCaps *caps; GstPad *pad; RB_CHAIN_GOBJECT_METHOD (rb_visualizer_page_parent_class, constructed, object); page = RB_VISUALIZER_PAGE (object); err = gtk_clutter_init (NULL, NULL); if (err != CLUTTER_INIT_SUCCESS) { /* maybe do something more sensible here. not sure if there are any user-recoverable * conditions that would cause clutter init to fail, though, so it may not be worth it. * as it is, we just won't add the page to the page tree. */ g_warning ("Unable to display visual effects due to Clutter init failure"); return; } page->texture = clutter_texture_new (); clutter_texture_set_sync_size (CLUTTER_TEXTURE (page->texture), TRUE); clutter_texture_set_keep_aspect_ratio (CLUTTER_TEXTURE (page->texture), TRUE); page->sink = gst_bin_new (NULL); g_object_ref (page->sink); /* actual sink */ realsink = gst_element_factory_make ("cluttersink", NULL); g_object_set (realsink, "texture", page->texture, NULL); colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL); /* capsfilter to force rgb format (without this we end up using ayuv) */ capsfilter = gst_element_factory_make ("capsfilter", NULL); caps = gst_caps_from_string ("video/x-raw-rgb,bpp=(int)24,depth=(int)24," "endianness=(int)4321,red_mask=(int)16711680," "green_mask=(int)65280,blue_mask=(int)255"); g_object_set (capsfilter, "caps", caps, NULL); gst_caps_unref (caps); gst_bin_add_many (GST_BIN (page->sink), colorspace, capsfilter, realsink, NULL); gst_element_link (colorspace, capsfilter); gst_element_link (capsfilter, realsink); pad = gst_element_get_static_pad (colorspace, "sink"); gst_element_add_pad (page->sink, gst_ghost_pad_new ("sink", pad)); gst_object_unref (pad); g_signal_connect_object (page->fullscreen_action, "toggled", G_CALLBACK (toggle_fullscreen_cb), page, 0); }
static GstElement * ges_title_source_create_source (GESTrackElement * object) { GstElement *topbin, *background, *text; GstPad *src, *pad; GESTitleSource *self = GES_TITLE_SOURCE (object); GESTitleSourcePrivate *priv = self->priv; const gchar *bg_props[] = { "pattern", "foreground-color", NULL }; const gchar *text_props[] = { "text", "font-desc", "valignment", "halignment", "color", "xpos", "ypos", "outline-color", "shaded-background", NULL }; topbin = gst_bin_new ("titlesrc-bin"); background = gst_element_factory_make ("videotestsrc", "titlesrc-bg"); text = gst_element_factory_make ("textoverlay", "titlsrc-text"); if (priv->text) { g_object_set (text, "text", priv->text, NULL); } if (priv->font_desc) { g_object_set (text, "font-desc", priv->font_desc, NULL); } g_object_set (text, "valignment", (gint) priv->valign, "halignment", (gint) priv->halign, NULL); g_object_set (text, "color", (guint) self->priv->color, NULL); g_object_set (text, "xpos", (gdouble) self->priv->xpos, NULL); g_object_set (text, "ypos", (gdouble) self->priv->ypos, NULL); g_object_set (background, "pattern", (gint) GES_VIDEO_TEST_PATTERN_SOLID, NULL); g_object_set (background, "foreground-color", (guint) self->priv->background, NULL); gst_bin_add_many (GST_BIN (topbin), background, text, NULL); gst_element_link_pads_full (background, "src", text, "video_sink", GST_PAD_LINK_CHECK_NOTHING); pad = gst_element_get_static_pad (text, "src"); src = gst_ghost_pad_new ("src", pad); gst_object_unref (pad); gst_element_add_pad (topbin, src); gst_object_ref (text); gst_object_ref (background); priv->text_el = text; priv->background_el = background; ges_track_element_add_children_props (object, text, NULL, NULL, text_props); ges_track_element_add_children_props (object, background, NULL, NULL, bg_props); return topbin; }
void Pipeline::setupEffectBins() { GstCaps *caps; GstPad *pad; // internal bin and elements effectInternalBin = gst_bin_new(NULL); effectPreCS = gst_element_factory_make("ffmpegcolorspace", NULL); effectPostCS = gst_element_factory_make("ffmpegcolorspace", NULL); effect = gst_element_factory_make("identity", NULL); // capsfilter used to force rgb in the effect pipeline effectCapsFilter = gst_element_factory_make("capsfilter", NULL); caps = gst_caps_from_string("video/x-raw-rgb"); g_object_set(effectCapsFilter, "caps", caps, NULL); gst_bin_add_many(GST_BIN(effectInternalBin), effectPreCS, effectCapsFilter, effect, effectPostCS, NULL); gst_element_link_many(effectPreCS, effectCapsFilter, effect, effectPostCS, NULL); // ghost pads to the internal bin pad = gst_element_get_static_pad(effectPreCS, "sink"); gst_element_add_pad(effectInternalBin, gst_ghost_pad_new("sink", pad)); gst_object_unref(GST_OBJECT(pad)); pad = gst_element_get_static_pad(effectPostCS, "src"); gst_element_add_pad(effectInternalBin, gst_ghost_pad_new("src", pad)); gst_object_unref(GST_OBJECT(pad)); // main bin and valve effectBin = gst_bin_new(NULL); effectValve = gst_element_factory_make("valve", NULL); gst_bin_add_many(GST_BIN(effectBin), effectValve, effectInternalBin, NULL); gst_element_link(effectValve, effectInternalBin); // ghost pads to the main bin pad = gst_element_get_static_pad(effectValve, "sink"); gst_element_add_pad(effectBin, gst_ghost_pad_new("sink", pad)); gst_object_unref(GST_OBJECT(pad)); pad = gst_element_get_static_pad(effectInternalBin, "src"); gst_element_add_pad(effectBin, gst_ghost_pad_new("src", pad)); gst_object_unref(GST_OBJECT(pad)); g_object_set(camerabin, "video-source-filter", effectBin, NULL); }
KGSTAudioPlayer_* KGSTAudioPlayer_::Init(const char* audiofile, bool loop) { setLoopPlay(loop); g_object_set(G_OBJECT(filesrc), "location", audiofile, NULL); /* get the decoder */ decodebin = gst_element_factory_make("decodebin", "decodebin"); if(!decodebin) { g_print("could not find plugin \"mad\""); } g_assert(decodebin); /* create audio output */ audio = gst_bin_new("audiobin"); g_assert(audio); g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(cb_newpad), audio); gst_bin_add_many(GST_BIN(pipeline), filesrc, decodebin, NULL); gst_element_link(filesrc, decodebin); audioconvert = gst_element_factory_make("audioconvert", "audioconvert"); if(!audioconvert) { g_print("could not create \"audioconvert\" element!"); } g_assert(audioconvert); audioresample = gst_element_factory_make("audioresample", "audioresample"); if(!audioresample) { g_print("could not create \"audioresample\" element!"); } g_assert(audioresample); /* add ... into audio output */ audioconvert = gst_element_factory_make("audioconvert", "audioconvert"); GstPad *audiopad = gst_element_get_static_pad(audioconvert, "sink"); gst_bin_add_many(GST_BIN(audio), audioconvert, audioresample, volume, audiosink, NULL); gst_element_link_many(audioconvert, audioresample, volume, audiosink, NULL); gst_element_add_pad(audio, gst_ghost_pad_new("sink", audiopad)); gst_object_unref(audiopad); gst_bin_add(GST_BIN(pipeline), audio); /// error ... // /* add objects to the main pipeline */// // gst_bin_add_many(GST_BIN(pipeline), filesrc, decodebin, audioconvert, // audioresample, volume, audiosink, NULL); // /* link the elements */ // gst_element_link_many(filesrc, decodebin, audioconvert, audioresample, volume, audiosink, NULL); // g_timeout_add(200,(GSourceFunc) cb_print_position, pipeline); return this; }
RosGstPlay() { GstPad *audiopad; std::string dst_type; std::string device; // The destination of the audio ros::param::param<std::string>("~dst", dst_type, "alsasink"); ros::param::param<std::string>("~device", device, std::string()); _sub = _nh.subscribe("audio", 10, &RosGstPlay::onAudio, this); _loop = g_main_loop_new(NULL, false); _pipeline = gst_pipeline_new("app_pipeline"); _source = gst_element_factory_make("appsrc", "app_source"); g_object_set(G_OBJECT(_source), "do-timestamp", TRUE, NULL); gst_bin_add( GST_BIN(_pipeline), _source); //_playbin = gst_element_factory_make("playbin2", "uri_play"); //g_object_set( G_OBJECT(_playbin), "uri", "file:///home/test/test.mp3", NULL); if (dst_type == "alsasink") { _decoder = gst_element_factory_make("decodebin", "decoder"); g_signal_connect(_decoder, "pad-added", G_CALLBACK(cb_newpad),this); gst_bin_add( GST_BIN(_pipeline), _decoder); gst_element_link(_source, _decoder); _audio = gst_bin_new("audiobin"); _convert = gst_element_factory_make("audioconvert", "convert"); audiopad = gst_element_get_static_pad(_convert, "sink"); _sink = gst_element_factory_make("autoaudiosink", "sink"); if (!device.empty()) { g_object_set(G_OBJECT(_sink), "device", device.c_str(), NULL); } gst_bin_add_many( GST_BIN(_audio), _convert, _sink, NULL); gst_element_link(_convert, _sink); gst_element_add_pad(_audio, gst_ghost_pad_new("sink", audiopad)); gst_object_unref(audiopad); gst_bin_add(GST_BIN(_pipeline), _audio); } else { _sink = gst_element_factory_make("filesink", "sink"); g_object_set( G_OBJECT(_sink), "location", dst_type.c_str(), NULL); gst_bin_add(GST_BIN(_pipeline), _sink); gst_element_link(_source, _sink); } gst_element_set_state(GST_ELEMENT(_pipeline), GST_STATE_PLAYING); //gst_element_set_state(GST_ELEMENT(_playbin), GST_STATE_PLAYING); _gst_thread = boost::thread( boost::bind(g_main_loop_run, _loop) ); }
gint main (gint argc, gchar *argv[]) { GMainLoop *loop; GstElement *src, *dec, *conv, *sink; GstPad *audiopad; GstBus *bus; /* init GStreamer */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* make sure we have input */ if (argc != 2) { g_print ("Usage: %s <filename>\n", argv[0]); return -1; } /* setup */ pipeline = gst_pipeline_new ("pipeline"); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_watch (bus, my_bus_callback, loop); gst_object_unref (bus); src = gst_element_factory_make ("filesrc", "source"); g_object_set (G_OBJECT (src), "location", argv[1], NULL); dec = gst_element_factory_make ("decodebin", "decoder"); g_signal_connect (dec, "pad-added", G_CALLBACK (cb_newpad), NULL); gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL); gst_element_link (src, dec); /* create audio output */ audio = gst_bin_new ("audiobin"); conv = gst_element_factory_make ("audioconvert", "aconv"); audiopad = gst_element_get_static_pad (conv, "sink"); sink = gst_element_factory_make ("alsasink", "sink"); gst_bin_add_many (GST_BIN (audio), conv, sink, NULL); gst_element_link (conv, sink); gst_element_add_pad (audio, gst_ghost_pad_new ("sink", audiopad)); gst_object_unref (audiopad); gst_bin_add (GST_BIN (pipeline), audio); /* run */ gst_element_set_state (pipeline, GST_STATE_PLAYING); g_main_loop_run (loop); /* cleanup */ gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); return 0; }
void RefPointerTest::refTest1() { GstElement *element = gst_bin_new(NULL); GstObject *bin1 = GST_OBJECT(element); QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(element), 1); GstObject *bin2 = GST_OBJECT(gst_object_ref_sink(bin1)); QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin2), 1); QGst::ObjectPtr object = QGst::ObjectPtr::wrap(bin2, false); QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin2), 1); }
static void empathy_video_widget_constructed (GObject *object) { EmpathyVideoWidgetPriv *priv = GET_PRIV (object); GstElement *colorspace, *videoscale, *sink; GstPad *pad; priv->videosink = gst_bin_new (NULL); gst_object_ref (priv->videosink); gst_object_sink (priv->videosink); priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink"); sink = gst_element_factory_make ("gconfvideosink", NULL); g_assert (sink != NULL); videoscale = gst_element_factory_make ("videoscale", NULL); g_assert (videoscale != NULL); g_object_set (videoscale, "qos", FALSE, NULL); colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL); g_assert (colorspace != NULL); g_object_set (colorspace, "qos", FALSE, NULL); gst_bin_add_many (GST_BIN (priv->videosink), colorspace, videoscale, sink, NULL); if (!gst_element_link (colorspace, videoscale)) g_error ("Failed to link ffmpegcolorspace and videoscale"); if (!gst_element_link (videoscale, sink)) g_error ("Failed to link videoscale and gconfvideosink"); pad = gst_element_get_static_pad (colorspace, "sink"); g_assert (pad != NULL); priv->sink_pad = gst_ghost_pad_new ("sink", pad); if (!gst_element_add_pad (priv->videosink, priv->sink_pad)) g_error ("Couldn't add sink ghostpad to the bin"); gst_object_unref (pad); fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink)); gst_bus_enable_sync_message_emission (priv->bus); g_signal_connect (priv->bus, "sync-message", G_CALLBACK (empathy_video_widget_sync_message_cb), object); gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width, priv->min_height); }
GstElement *QGstreamerCaptureSession::buildVideoPreview() { GstElement *previewElement = 0; if (m_viewfinderInterface) { GstElement *bin = gst_bin_new("video-preview-bin"); GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview"); GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview"); GstElement *preview = m_viewfinderInterface->videoSink(); gst_bin_add_many(GST_BIN(bin), colorspace, capsFilter, preview, NULL); gst_element_link(colorspace,capsFilter); gst_element_link(capsFilter,preview); QSize resolution; qreal frameRate = 0; if (m_captureMode & Video) { QVideoEncoderSettings videoSettings = m_videoEncodeControl->videoSettings(); resolution = videoSettings.resolution(); frameRate = videoSettings.frameRate(); } else if (m_captureMode & Image) { resolution = m_imageEncodeControl->imageSettings().resolution(); } if (!resolution.isEmpty() || frameRate > 0.001) { GstCaps *caps = gst_caps_new_empty(); QStringList structureTypes; structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb"; foreach(const QString &structureType, structureTypes) { GstStructure *structure = gst_structure_new(structureType.toAscii().constData(), NULL); if (!resolution.isEmpty()) { gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL); gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL); } if (frameRate > 0.001) { QPair<int,int> rate = m_videoEncodeControl->rateAsRational(); //qDebug() << "frame rate:" << num << denum; gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL); } gst_caps_append_structure(caps,structure); } //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps); g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL); }
static GstElement * ges_multi_file_source_create_source (GESTrackElement * track_element) { GESMultiFileSource *self; GstElement *bin, *src, *decodebin; GstCaps *disc_caps; GstDiscovererStreamInfo *stream_info; GValue fps = G_VALUE_INIT; GstCaps *caps; GESUriSourceAsset *asset; GESMultiFileURI *uri_data; self = (GESMultiFileSource *) track_element; asset = GES_URI_SOURCE_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (self))); if (asset != NULL) { stream_info = ges_uri_source_asset_get_stream_info (asset); g_assert (stream_info); disc_caps = gst_discoverer_stream_info_get_caps (stream_info); caps = gst_caps_copy (disc_caps); GST_DEBUG ("Got some nice caps %s", gst_caps_to_string (disc_caps)); gst_object_unref (stream_info); gst_caps_unref (disc_caps); } else { caps = gst_caps_new_empty (); GST_WARNING ("Could not extract asset."); } g_value_init (&fps, GST_TYPE_FRACTION); gst_value_set_fraction (&fps, 25, 1); gst_caps_set_value (caps, "framerate", &fps); bin = GST_ELEMENT (gst_bin_new ("multi-image-bin")); src = gst_element_factory_make ("multifilesrc", NULL); uri_data = ges_multi_file_uri_new (self->uri); g_object_set (src, "start-index", uri_data->start, "stop-index", uri_data->end, "caps", caps, "location", uri_data->location, NULL); g_free (uri_data); decodebin = gst_element_factory_make ("decodebin", NULL); gst_bin_add_many (GST_BIN (bin), src, decodebin, NULL); gst_element_link_pads_full (src, "src", decodebin, "sink", GST_PAD_LINK_CHECK_NOTHING); g_signal_connect (G_OBJECT (decodebin), "pad-added", G_CALLBACK (pad_added_cb), bin); return bin; }
int main(int argc, char *argv[]) { GstElement *pipeline, *bin, *equalizer, *convert, *sink; GstPad *pad, *ghost_pad; GstBus *bus; GstMessage *msg; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Build the pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Create the elements inside the sink bin */ equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer"); convert = gst_element_factory_make ("audioconvert", "convert"); sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); if (!equalizer || !convert || !sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Create the sink bin, add the elements and link them */ bin = gst_bin_new ("audio_sink_bin"); gst_bin_add_many (GST_BIN (bin), equalizer, convert, sink, NULL); gst_element_link_many (equalizer, convert, sink, NULL); pad = gst_element_get_static_pad (equalizer, "sink"); ghost_pad = gst_ghost_pad_new ("sink", pad); gst_pad_set_active (ghost_pad, TRUE); gst_element_add_pad (bin, ghost_pad); gst_object_unref (pad); /* Configure the equalizer */ g_object_set (G_OBJECT (equalizer), "band1", (gdouble)-24.0, NULL); g_object_set (G_OBJECT (equalizer), "band2", (gdouble)-24.0, NULL); /* Set playbin2's audio sink to be our sink bin */ g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
void FarsightChannel::CreateAudioPlaybackElement() { audio_playback_bin_ = gst_bin_new("audio-output-bin"); if (audio_playback_bin_ == 0) throw Exception("Cannot create GStreamer bin for audio playback."); fake_audio_output_ = setUpElement("fakesink"); if (fake_audio_output_ == 0) throw Exception("Cannot create GStreamer fake audio output element."); else { g_signal_connect(fake_audio_output_, "handoff", G_CALLBACK(&FarsightChannel::OnFakeSinkHandoff), this); g_object_set(G_OBJECT(fake_audio_output_), "signal-handoffs", TRUE, NULL); } // audio modifications audio_resample_ = gst_element_factory_make("audioresample", NULL); if (audio_resample_ == 0) throw Exception("Cannot create GStreamer audio resample element."); audio_capsfilter_ = gst_element_factory_make("capsfilter", NULL); GstCaps *audio_caps = gst_caps_new_simple("audio/x-raw-int", "channels", G_TYPE_INT, 1, "width", G_TYPE_INT, 16, // "depth", G_TYPE_INT, 16, "rate", G_TYPE_INT, 16000, "signed", G_TYPE_BOOLEAN, true, // "endianess", G_TYPE_INT, 1234, NULL); g_object_set(G_OBJECT(audio_capsfilter_), "caps", audio_caps, NULL); //audio_convert_ = gst_element_factory_make("audioconvert", NULL); //if (audio_convert_ == 0) // throw Exception("Cannot create GStreamer audio convert element."); gst_bin_add_many(GST_BIN(audio_playback_bin_), audio_resample_, fake_audio_output_, NULL); gboolean ok = gst_element_link_many(audio_resample_, fake_audio_output_, NULL); if (!ok) { QString error_message = "Cannot link elements for audio playback bin."; LogError(error_message.toStdString()); throw Exception(error_message.toStdString().c_str()); } // add ghost pad to audio_bin_ GstPad *sink = gst_element_get_static_pad(audio_resample_, "sink"); audio_playback_bin_sink_pad_ = gst_ghost_pad_new("sink", sink); gst_element_add_pad(GST_ELEMENT(audio_playback_bin_), audio_playback_bin_sink_pad_); gst_object_unref(G_OBJECT(sink)); gst_object_ref(audio_playback_bin_); gst_object_sink(audio_playback_bin_); }
/* * Class method: new(name=nil) * name: a name for the bin. * * Constructs a new Gst::Bin object. * * If element name is ommited (or nil), then the bin will receive a guaranteed * unique name, consisting of the "bin" string and a number. * If name is given, it will be given the name supplied. * * Returns: a newly allocated Gst::Bin object. */ static VALUE rb_gst_bin_initialize(int argc, VALUE *argv, VALUE self) { GstElement *bin; VALUE name; rb_scan_args(argc, argv, "01", &name); bin = gst_bin_new(RVAL2CSTR_ACCEPT_NIL(name)); if (bin) RBGST_INITIALIZE(self, bin); return Qnil; }