P_INVOKE void bp_equalizer_set_gain (BansheePlayer *player, guint bandnum, gdouble gain) { g_return_if_fail (IS_BANSHEE_PLAYER (player)); if (player->equalizer != NULL) { GstObject *band; g_return_if_fail (bandnum < gst_child_proxy_get_children_count (GST_CHILD_PROXY (player->equalizer))); band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (player->equalizer), bandnum); g_object_set (band, "gain", gain, NULL); g_object_unref (band); } }
static GstPad * gst_gl_mixer_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps) { GstGLMixerBin *self = GST_GL_MIXER_BIN (element); GstPadTemplate *mixer_templ; struct input_chain *chain; GstPad *mixer_pad; chain = g_new0 (struct input_chain, 1); mixer_templ = _find_element_pad_template (self->mixer, GST_PAD_TEMPLATE_DIRECTION (templ), GST_PAD_TEMPLATE_PRESENCE (templ)); g_return_val_if_fail (mixer_templ, NULL); mixer_pad = gst_element_request_pad (self->mixer, mixer_templ, req_name, NULL); g_return_val_if_fail (mixer_pad, NULL); if (!_create_input_chain (self, chain, mixer_pad)) { gst_element_release_request_pad (self->mixer, mixer_pad); _free_input_chain (chain); return NULL; } GST_OBJECT_LOCK (element); self->priv->input_chains = g_list_prepend (self->priv->input_chains, chain); GST_OBJECT_UNLOCK (element); gst_child_proxy_child_added (GST_CHILD_PROXY (self), G_OBJECT (chain->ghost_pad), GST_OBJECT_NAME (chain->ghost_pad)); return GST_PAD (chain->ghost_pad); }
static void gst_gl_stereo_mix_release_pad (GstElement * element, GstPad * pad) { GST_DEBUG_OBJECT (element, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad)); gst_child_proxy_child_removed (GST_CHILD_PROXY (element), G_OBJECT (pad), GST_OBJECT_NAME (pad)); GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad); }
/** * gst_child_proxy_lookup: * @childproxy: child proxy object to lookup the property in * @name: name of the property to look up * @target: (out) (allow-none) (transfer full): pointer to a #GObject that * takes the real object to set property on * @pspec: (out) (allow-none) (transfer none): pointer to take the #GParamSpec * describing the property * * Looks up which object and #GParamSpec would be effected by the given @name. * * MT safe. * * Returns: TRUE if @target and @pspec could be found. FALSE otherwise. In that * case the values for @pspec and @target are not modified. Unref @target after * usage. For plain GObjects @target is the same as @object. */ gboolean gst_child_proxy_lookup (GstChildProxy * childproxy, const gchar * name, GObject ** target, GParamSpec ** pspec) { GObject *object; gboolean res = FALSE; gchar **names, **current; g_return_val_if_fail (GST_IS_CHILD_PROXY (childproxy), FALSE); g_return_val_if_fail (name != NULL, FALSE); object = g_object_ref (childproxy); current = names = g_strsplit (name, "::", -1); /* find the owner of the property */ while (current[1]) { GObject *next; if (!GST_IS_CHILD_PROXY (object)) { GST_INFO ("object %s is not a parent, so you cannot request a child by name %s", (GST_IS_OBJECT (object) ? GST_OBJECT_NAME (object) : ""), current[0]); break; } next = gst_child_proxy_get_child_by_name (GST_CHILD_PROXY (object), current[0]); if (!next) { GST_INFO ("no such object %s", current[0]); break; } g_object_unref (object); object = next; current++; } /* look for psec */ if (current[1] == NULL) { GParamSpec *spec = g_object_class_find_property (G_OBJECT_GET_CLASS (object), current[0]); if (spec == NULL) { GST_INFO ("no param spec named %s", current[0]); } else { if (pspec) *pspec = spec; if (target) { g_object_ref (object); *target = object; } res = TRUE; } } g_object_unref (object); g_strfreev (names); return res; }
P_INVOKE void bp_equalizer_get_frequencies (BansheePlayer *player, gdouble **freq) { gint i, count; g_return_if_fail (IS_BANSHEE_PLAYER (player)); if (player->equalizer == NULL) { return; } count = gst_child_proxy_get_children_count (GST_CHILD_PROXY (player->equalizer)); for (i = 0; i < count; i++) { GstObject *band; band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (player->equalizer), i); g_object_get (G_OBJECT (band), "freq", &(*freq)[i], NULL); g_object_unref (band); } }
static void gst_audiomixer_release_pad (GstElement * element, GstPad * pad) { GstAudioMixer *audiomixer; audiomixer = GST_AUDIO_MIXER (element); GST_DEBUG_OBJECT (audiomixer, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad)); gst_child_proxy_child_removed (GST_CHILD_PROXY (audiomixer), G_OBJECT (pad), GST_OBJECT_NAME (pad)); GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad); }
P_INVOKE guint bp_equalizer_get_nbands (BansheePlayer *player) { guint count; g_return_val_if_fail (IS_BANSHEE_PLAYER (player), 0); if (player->equalizer == NULL) { return 0; } count = gst_child_proxy_get_children_count (GST_CHILD_PROXY (player->equalizer)); return count; }
void GstEnginePipeline::UpdateEqualizer() { // Update band gains for (int i = 0; i < kEqBandCount; ++i) { float gain = eq_enabled_ ? eq_band_gains_[i] : 0.0; if (gain < 0) gain *= 0.24; else gain *= 0.12; GstObject* band = gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i); g_object_set(G_OBJECT(band), "gain", gain, nullptr); g_object_unref(G_OBJECT(band)); } // Update preamp float preamp = 1.0; if (eq_enabled_) preamp = float(eq_preamp_ + 100) * 0.01; // To scale from 0.0 to 2.0 g_object_set(G_OBJECT(equalizer_preamp_), "volume", preamp, nullptr); }
static GstPad * gst_audiomixer_request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps) { GstAudioMixerPad *newpad; newpad = (GstAudioMixerPad *) GST_ELEMENT_CLASS (parent_class)->request_new_pad (element, templ, req_name, caps); if (newpad == NULL) goto could_not_create; gst_child_proxy_child_added (GST_CHILD_PROXY (element), G_OBJECT (newpad), GST_OBJECT_NAME (newpad)); return GST_PAD_CAST (newpad); could_not_create: { GST_DEBUG_OBJECT (element, "could not create/add pad"); return NULL; } }
/* Test Callbacks and vmethods*/ static GstPipeline * create_pipeline (InsanityGstPipelineTest * ptest, gpointer unused_data) { GstCaps *caps; gulong probe_id; GError *err = NULL; GstIterator *it = NULL; gchar *uri = NULL, *sublocation = NULL; GstElement *capsfilter = NULL, *capsfilter1 = NULL, *colorspace = NULL, *colorspace1 = NULL, *fakesink = NULL; GstPad *fakesinksink = NULL, *tmppad = NULL; InsanityTest *test = INSANITY_TEST (ptest); SUBTITLES_TEST_LOCK (); glob_pipeline = GST_ELEMENT (gst_pipeline_new ("pipeline")); /* Create the source */ insanity_test_get_boolean_argument (test, "push-mode", (gboolean *) & glob_push_mode); insanity_test_get_string_argument (test, "sublocation", &sublocation); if (sublocation == NULL || g_strcmp0 (sublocation, "") == 0) { ERROR (test, "Location name not set\n"); goto creation_failed; } uri = gst_filename_to_uri (sublocation, &err); if (err != NULL) { ERROR (test, "Error creating uri %s", err->message); goto creation_failed; } if (glob_push_mode == TRUE) { gchar *tmpuri; glob_uridecodebin = gst_element_factory_make ("pushfilesrc", "src"); tmpuri = g_strconcat ("push", uri, NULL); g_free (uri); uri = tmpuri; } glob_uridecodebin = gst_element_factory_make ("uridecodebin", "src"); g_signal_connect (glob_uridecodebin, "pad-added", G_CALLBACK (pad_added_cb), test); g_object_set (glob_uridecodebin, "uri", uri, NULL); /* the subtitleoverlay */ glob_suboverlay = gst_element_factory_make ("subtitleoverlay", "subtitleoverlay"); if (glob_suboverlay == NULL) goto creation_failed; /* the fakesink */ fakesink = gst_element_factory_make ("fakesink", "fakesink"); if (fakesink == NULL) goto creation_failed; /* and the videotestsrc */ glob_videotestsrc = gst_element_factory_make ("videotestsrc", "videotestsrc"); if (glob_videotestsrc == NULL) goto creation_failed; g_object_set (glob_videotestsrc, "pattern", 2, "do-timestamp", TRUE, NULL); /* Make sure the video is big enough */ capsfilter = gst_element_factory_make ("capsfilter", NULL); if (capsfilter == NULL) goto creation_failed; gst_video_info_init (&glob_video_info); gst_video_info_set_format (&glob_video_info, GST_VIDEO_FORMAT_RGB, 1920, 1080); caps = gst_video_info_to_caps (&glob_video_info); g_object_set (capsfilter, "caps", caps, NULL); capsfilter1 = gst_element_factory_make ("capsfilter", NULL); if (capsfilter1 == NULL) goto creation_failed; /* We want the last frame that we will "parse" to check if it contains * subtitles to be in RGB to make simpler for us */ g_object_set (capsfilter1, "caps", caps, NULL); colorspace = gst_element_factory_make ("videoconvert", NULL); if (colorspace == NULL) goto creation_failed; colorspace1 = gst_element_factory_make ("videoconvert", NULL); if (colorspace1 == NULL) goto creation_failed; /* Now add to the pipeline */ gst_bin_add_many (GST_BIN (glob_pipeline), glob_uridecodebin, glob_videotestsrc, capsfilter, glob_suboverlay, capsfilter1, colorspace, colorspace1, fakesink, NULL); /* link video branch elements */ gst_element_link_many (glob_videotestsrc, capsfilter, glob_suboverlay, colorspace, capsfilter1, fakesink, NULL); /* And install a probe to the subtitleoverlay src pad */ fakesinksink = gst_element_get_static_pad (fakesink, "sink"); if (fakesinksink == NULL) goto failed; if (insanity_gst_test_add_data_probe (INSANITY_GST_TEST (test), GST_BIN (glob_pipeline), GST_OBJECT_NAME (fakesink), GST_OBJECT_NAME (fakesinksink), &tmppad, &probe_id, &probe_cb, NULL, NULL) == TRUE) { glob_suboverlay_src_probe = g_slice_new0 (ProbeContext); glob_suboverlay_src_probe->probe_id = probe_id; glob_suboverlay_src_probe->pad = fakesinksink; glob_suboverlay_src_probe->element = fakesink; glob_suboverlay_src_probe->test = test; glob_suboverlay_src_probe->waiting_first_segment = TRUE; insanity_test_validate_checklist_item (test, "install-probes", TRUE, NULL); } else { insanity_test_validate_checklist_item (test, "install-probes", FALSE, "Failed to attach probe to fakesink"); insanity_test_done (test); goto failed; } g_signal_connect (GST_CHILD_PROXY (glob_suboverlay), "child-added", G_CALLBACK (suboverlay_child_added_cb), test); done: SUBTITLES_TEST_UNLOCK (); g_free (uri); g_free (sublocation); if (err != NULL) g_error_free (err); if (it != NULL) gst_iterator_free (it); return GST_PIPELINE (glob_pipeline); failed: if (glob_pipeline != NULL) gst_object_unref (glob_pipeline); glob_suboverlay = glob_pipeline = glob_videotestsrc = glob_uridecodebin = NULL; goto done; creation_failed: if (glob_uridecodebin != NULL) gst_object_unref (glob_uridecodebin); if (glob_suboverlay != NULL) gst_object_unref (glob_suboverlay); if (glob_videotestsrc != NULL) gst_object_unref (glob_videotestsrc); if (fakesink != NULL) gst_object_unref (fakesink); goto failed; }
bool GstEnginePipeline::Init() { // Here we create all the parts of the gstreamer pipeline - from the source // to the sink. The parts of the pipeline are split up into bins: // uri decode bin -> audio bin // The uri decode bin is a gstreamer builtin that automatically picks the // right type of source and decoder for the URI. // The audio bin gets created here and contains: // queue ! audioconvert ! <caps32> // ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee // rgvolume and rglimiter are only created when replaygain is enabled. // After the tee the pipeline splits. One split is converted to 16-bit int // samples for the scope, the other is kept as float32 and sent to the // speaker. // tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink // tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale // ! convert ! audiosink // Audio bin audiobin_ = gst_bin_new("audiobin"); gst_bin_add(GST_BIN(pipeline_), audiobin_); // Create the sink if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false; if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") && !device_.toString().isEmpty()) { switch (device_.type()) { case QVariant::Int: g_object_set(G_OBJECT(audiosink_), "device", device_.toInt(), nullptr); break; case QVariant::String: g_object_set(G_OBJECT(audiosink_), "device", device_.toString().toUtf8().constData(), nullptr); break; #ifdef Q_OS_WIN32 case QVariant::ByteArray: { GUID guid = QUuid(device_.toByteArray()); g_object_set(G_OBJECT(audiosink_), "device", &guid, nullptr); break; } #endif // Q_OS_WIN32 default: qLog(Warning) << "Unknown device type" << device_; break; } } // Create all the other elements GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue, *convert; queue_ = engine_->CreateElement("queue2", audiobin_); audioconvert_ = engine_->CreateElement("audioconvert", audiobin_); tee = engine_->CreateElement("tee", audiobin_); probe_queue = engine_->CreateElement("queue", audiobin_); probe_converter = engine_->CreateElement("audioconvert", audiobin_); probe_sink = engine_->CreateElement("fakesink", audiobin_); audio_queue = engine_->CreateElement("queue", audiobin_); equalizer_preamp_ = engine_->CreateElement("volume", audiobin_); equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_); stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_); volume_ = engine_->CreateElement("volume", audiobin_); audioscale_ = engine_->CreateElement("audioresample", audiobin_); convert = engine_->CreateElement("audioconvert", audiobin_); if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter || !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ || !stereo_panorama_ || !volume_ || !audioscale_ || !convert) { return false; } // Create the replaygain elements if it's enabled. event_probe is the // audioconvert element we attach the probe to, which will change depending // on whether replaygain is enabled. convert_sink is the element after the // first audioconvert, which again will change. GstElement* event_probe = audioconvert_; GstElement* convert_sink = tee; if (rg_enabled_) { rgvolume_ = engine_->CreateElement("rgvolume", audiobin_); rglimiter_ = engine_->CreateElement("rglimiter", audiobin_); audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_); event_probe = audioconvert2_; convert_sink = rgvolume_; if (!rgvolume_ || !rglimiter_ || !audioconvert2_) { return false; } // Set replaygain settings g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr); g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr); g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_), nullptr); } // Create a pad on the outside of the audiobin and connect it to the pad of // the first element. GstPad* pad = gst_element_get_static_pad(queue_, "sink"); gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); // Add a data probe on the src pad of the audioconvert element for our scope. // We do it here because we want pre-equalized and pre-volume samples // so that our visualization are not be affected by them. pad = gst_element_get_static_pad(event_probe, "src"); gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this); gst_object_unref(pad); // Configure the fakesink properly g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr); // Set the equalizer bands g_object_set(G_OBJECT(equalizer_), "num-bands", 10, nullptr); int last_band_frequency = 0; for (int i = 0; i < kEqBandCount; ++i) { GstObject* band = gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i); const float frequency = kEqBandFrequencies[i]; const float bandwidth = frequency - last_band_frequency; last_band_frequency = frequency; g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(band)); } // Set the stereo balance. g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_, nullptr); // Set the buffer duration. We set this on this queue instead of the // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin // only affects network sources. // Disable the default buffer and byte limits, so we only buffer based on // time. g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_, nullptr); g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr); if (buffer_duration_nanosec_ > 0) { g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr); } gst_element_link(queue_, audioconvert_); // Create the caps to put in each path in the tee. The scope path gets 16-bit // ints and the audiosink path gets float32. GstCaps* caps16 = gst_caps_new_simple("audio/x-raw-int", "width", G_TYPE_INT, 16, "signed", G_TYPE_BOOLEAN, true, nullptr); GstCaps* caps32 = gst_caps_new_simple("audio/x-raw-float", "width", G_TYPE_INT, 32, nullptr); if (mono_playback_) { gst_caps_set_simple(caps32, "channels", G_TYPE_INT, 1, nullptr); } // Link the elements with special caps gst_element_link_filtered(probe_converter, probe_sink, caps16); gst_element_link_filtered(audioconvert_, convert_sink, caps32); gst_caps_unref(caps16); gst_caps_unref(caps32); // Link the outputs of tee to the queues on each path. gst_pad_link(gst_element_get_request_pad(tee, "src%d"), gst_element_get_static_pad(probe_queue, "sink")); gst_pad_link(gst_element_get_request_pad(tee, "src%d"), gst_element_get_static_pad(audio_queue, "sink")); // Link replaygain elements if enabled. if (rg_enabled_) { gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr); } // Link everything else. gst_element_link(probe_queue, probe_converter); gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_, stereo_panorama_, volume_, audioscale_, convert, audiosink_, nullptr); // Add probes and handlers. gst_pad_add_buffer_probe(gst_element_get_static_pad(probe_converter, "src"), G_CALLBACK(HandoffCallback), this); gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this); bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this); MaybeLinkDecodeToAudio(); return true; }
void gst_iir_equalizer_compute_frequencies (GstIirEqualizer * equ, guint new_count) { guint old_count, i; gdouble freq0, freq1, step; gchar name[20]; if (equ->freq_band_count == new_count) return; BANDS_LOCK (equ); if (G_UNLIKELY (equ->freq_band_count == new_count)) { BANDS_UNLOCK (equ); return; } old_count = equ->freq_band_count; equ->freq_band_count = new_count; GST_DEBUG ("bands %u -> %u", old_count, new_count); if (old_count < new_count) { /* add new bands */ equ->bands = g_realloc (equ->bands, sizeof (GstObject *) * new_count); for (i = old_count; i < new_count; i++) { /* otherwise they get names like 'iirequalizerband5' */ sprintf (name, "band%u", i); equ->bands[i] = g_object_new (GST_TYPE_IIR_EQUALIZER_BAND, "name", name, NULL); GST_DEBUG ("adding band[%d]=%p", i, equ->bands[i]); gst_object_set_parent (GST_OBJECT (equ->bands[i]), GST_OBJECT (equ)); gst_child_proxy_child_added (GST_CHILD_PROXY (equ), G_OBJECT (equ->bands[i]), name); } } else { /* free unused bands */ for (i = new_count; i < old_count; i++) { GST_DEBUG ("removing band[%d]=%p", i, equ->bands[i]); gst_child_proxy_child_removed (GST_CHILD_PROXY (equ), G_OBJECT (equ->bands[i]), GST_OBJECT_NAME (equ->bands[i])); gst_object_unparent (GST_OBJECT (equ->bands[i])); equ->bands[i] = NULL; } } alloc_history (equ, GST_AUDIO_FILTER_INFO (equ)); /* set center frequencies and name band objects * FIXME: arg! we can't change the name of parented objects :( * application should read band->freq to get the name */ step = pow (HIGHEST_FREQ / LOWEST_FREQ, 1.0 / new_count); freq0 = LOWEST_FREQ; for (i = 0; i < new_count; i++) { freq1 = freq0 * step; if (i == 0) equ->bands[i]->type = BAND_TYPE_LOW_SHELF; else if (i == new_count - 1) equ->bands[i]->type = BAND_TYPE_HIGH_SHELF; else equ->bands[i]->type = BAND_TYPE_PEAK; equ->bands[i]->freq = freq0 + ((freq1 - freq0) / 2.0); equ->bands[i]->width = freq1 - freq0; GST_DEBUG ("band[%2d] = '%lf'", i, equ->bands[i]->freq); g_object_notify (G_OBJECT (equ->bands[i]), "bandwidth"); g_object_notify (G_OBJECT (equ->bands[i]), "freq"); g_object_notify (G_OBJECT (equ->bands[i]), "type"); /* if(equ->bands[i]->freq<10000.0) sprintf (name,"%dHz",(gint)equ->bands[i]->freq); else sprintf (name,"%dkHz",(gint)(equ->bands[i]->freq/1000.0)); gst_object_set_name( GST_OBJECT (equ->bands[i]), name); GST_DEBUG ("band[%2d] = '%s'",i,name); */ freq0 = freq1; } equ->need_new_coefficients = TRUE; BANDS_UNLOCK (equ); }
bool GstEnginePipeline::Init() { // Here we create all the parts of the gstreamer pipeline - from the source // to the sink. The parts of the pipeline are split up into bins: // uri decode bin -> audio bin // The uri decode bin is a gstreamer builtin that automatically picks the // right type of source and decoder for the URI. // The audio bin gets created here and contains: // queue ! audioconvert ! <caps32> // ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee // rgvolume and rglimiter are only created when replaygain is enabled. // After the tee the pipeline splits. One split is converted to 16-bit int // samples for the scope, the other is kept as float32 and sent to the // speaker. // tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink // tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale // ! convert ! audiosink gst_segment_init(&last_decodebin_segment_, GST_FORMAT_TIME); // Audio bin audiobin_ = gst_bin_new("audiobin"); gst_bin_add(GST_BIN(pipeline_), audiobin_); // Create the sink if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false; if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") && !device_.toString().isEmpty()) { switch (device_.type()) { case QVariant::Int: g_object_set(G_OBJECT(audiosink_), "device", device_.toInt(), nullptr); break; case QVariant::LongLong: g_object_set(G_OBJECT(audiosink_), "device", device_.toLongLong(), nullptr); break; case QVariant::String: g_object_set(G_OBJECT(audiosink_), "device", device_.toString().toUtf8().constData(), nullptr); break; case QVariant::ByteArray: { g_object_set(G_OBJECT(audiosink_), "device", device_.toByteArray().constData(), nullptr); break; } default: qLog(Warning) << "Unknown device type" << device_; break; } } // Create all the other elements GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue, *convert; queue_ = engine_->CreateElement("queue2", audiobin_); audioconvert_ = engine_->CreateElement("audioconvert", audiobin_); tee = engine_->CreateElement("tee", audiobin_); probe_queue = engine_->CreateElement("queue2", audiobin_); probe_converter = engine_->CreateElement("audioconvert", audiobin_); probe_sink = engine_->CreateElement("fakesink", audiobin_); audio_queue = engine_->CreateElement("queue", audiobin_); equalizer_preamp_ = engine_->CreateElement("volume", audiobin_); equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_); stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_); volume_ = engine_->CreateElement("volume", audiobin_); audioscale_ = engine_->CreateElement("audioresample", audiobin_); convert = engine_->CreateElement("audioconvert", audiobin_); if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter || !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ || !stereo_panorama_ || !volume_ || !audioscale_ || !convert) { return false; } // Create the replaygain elements if it's enabled. event_probe is the // audioconvert element we attach the probe to, which will change depending // on whether replaygain is enabled. convert_sink is the element after the // first audioconvert, which again will change. GstElement* event_probe = audioconvert_; GstElement* convert_sink = tee; if (rg_enabled_) { rgvolume_ = engine_->CreateElement("rgvolume", audiobin_); rglimiter_ = engine_->CreateElement("rglimiter", audiobin_); audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_); event_probe = audioconvert2_; convert_sink = rgvolume_; if (!rgvolume_ || !rglimiter_ || !audioconvert2_) { return false; } // Set replaygain settings g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr); g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr); g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_), nullptr); } // Create a pad on the outside of the audiobin and connect it to the pad of // the first element. GstPad* pad = gst_element_get_static_pad(queue_, "sink"); gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); // Add a data probe on the src pad of the audioconvert element for our scope. // We do it here because we want pre-equalized and pre-volume samples // so that our visualization are not be affected by them. pad = gst_element_get_static_pad(event_probe, "src"); gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, &EventHandoffCallback, this, NULL); gst_object_unref(pad); // Configure the fakesink properly g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr); // Setting the equalizer bands: // // GStreamer's GstIirEqualizerNBands sets up shelve filters for the first and // last bands as corner cases. That was causing the "inverted slider" bug. // As a workaround, we create two dummy bands at both ends of the spectrum. // This causes the actual first and last adjustable bands to be // implemented using band-pass filters. g_object_set(G_OBJECT(equalizer_), "num-bands", 10 + 2, nullptr); // Dummy first band (bandwidth 0, cutting below 20Hz): GstObject* first_band = GST_OBJECT( gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), 0)); g_object_set(G_OBJECT(first_band), "freq", 20.0, "bandwidth", 0, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(first_band)); // Dummy last band (bandwidth 0, cutting over 20KHz): GstObject* last_band = GST_OBJECT(gst_child_proxy_get_child_by_index( GST_CHILD_PROXY(equalizer_), kEqBandCount + 1)); g_object_set(G_OBJECT(last_band), "freq", 20000.0, "bandwidth", 0, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(last_band)); int last_band_frequency = 0; for (int i = 0; i < kEqBandCount; ++i) { const int index_in_eq = i + 1; GstObject* band = GST_OBJECT(gst_child_proxy_get_child_by_index( GST_CHILD_PROXY(equalizer_), index_in_eq)); const float frequency = kEqBandFrequencies[i]; const float bandwidth = frequency - last_band_frequency; last_band_frequency = frequency; g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(band)); } // Set the stereo balance. g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_, nullptr); // Set the buffer duration. We set this on this queue instead of the // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin // only affects network sources. // Disable the default buffer and byte limits, so we only buffer based on // time. g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_, nullptr); g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr); if (buffer_duration_nanosec_ > 0) { g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr); } gst_element_link_many(queue_, audioconvert_, convert_sink, nullptr); gst_element_link(probe_converter, probe_sink); // Link the outputs of tee to the queues on each path. gst_pad_link(gst_element_get_request_pad(tee, "src_%u"), gst_element_get_static_pad(probe_queue, "sink")); gst_pad_link(gst_element_get_request_pad(tee, "src_%u"), gst_element_get_static_pad(audio_queue, "sink")); // Link replaygain elements if enabled. if (rg_enabled_) { gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr); } // Link the analyzer output of the tee and force 16 bit caps GstCaps* caps16 = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, "S16LE", NULL); gst_element_link_filtered(probe_queue, probe_converter, caps16); gst_caps_unref(caps16); gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_, stereo_panorama_, volume_, audioscale_, convert, nullptr); // We only limit the media type to raw audio. // Let the audio output of the tee autonegotiate the bit depth and format. GstCaps* caps = gst_caps_new_empty_simple("audio/x-raw"); // Add caps for fixed sample rate and mono, but only if requested if (sample_rate_ != GstEngine::kAutoSampleRate && sample_rate_ > 0) { gst_caps_set_simple(caps, "rate", G_TYPE_INT, sample_rate_, nullptr); } if (mono_playback_) { gst_caps_set_simple(caps, "channels", G_TYPE_INT, 1, nullptr); } gst_element_link_filtered(convert, audiosink_, caps); gst_caps_unref(caps); // Add probes and handlers. gst_pad_add_probe(gst_element_get_static_pad(probe_converter, "src"), GST_PAD_PROBE_TYPE_BUFFER, HandoffCallback, this, nullptr); gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this, nullptr); bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this); MaybeLinkDecodeToAudio(); return true; }
int main (int argc, char *argv[]) { GstElement *bin; GstElement *src, *capsfilter, *equalizer, *spectrum, *audioconvert, *sink; GstCaps *caps; GstBus *bus; GtkWidget *appwindow, *vbox, *hbox, *widget; int i; gst_init (&argc, &argv); gtk_init (&argc, &argv); bin = gst_pipeline_new ("bin"); /* White noise */ src = gst_element_factory_make ("audiotestsrc", "src"); g_object_set (G_OBJECT (src), "wave", 5, "volume", 0.8, NULL); /* Force float32 samples */ capsfilter = gst_element_factory_make ("capsfilter", "capsfilter"); caps = gst_caps_new_simple ("audio/x-raw-float", "width", G_TYPE_INT, 32, NULL); g_object_set (capsfilter, "caps", caps, NULL); equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer"); g_object_set (G_OBJECT (equalizer), "num-bands", NBANDS, NULL); spectrum = gst_element_factory_make ("spectrum", "spectrum"); g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80, "message", TRUE, "interval", 500 * GST_MSECOND, NULL); audioconvert = gst_element_factory_make ("audioconvert", "audioconvert"); sink = gst_element_factory_make ("autoaudiosink", "sink"); gst_bin_add_many (GST_BIN (bin), src, capsfilter, equalizer, spectrum, audioconvert, sink, NULL); if (!gst_element_link_many (src, capsfilter, equalizer, spectrum, audioconvert, sink, NULL)) { fprintf (stderr, "can't link elements\n"); exit (1); } bus = gst_element_get_bus (bin); gst_bus_add_watch (bus, message_handler, NULL); gst_object_unref (bus); appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL); g_signal_connect (G_OBJECT (appwindow), "destroy", G_CALLBACK (on_window_destroy), NULL); vbox = gtk_vbox_new (FALSE, 6); drawingarea = gtk_drawing_area_new (); gtk_widget_set_size_request (drawingarea, spect_bands, spect_height); g_signal_connect (G_OBJECT (drawingarea), "configure-event", G_CALLBACK (on_configure_event), (gpointer) spectrum); gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0); hbox = gtk_hbox_new (FALSE, 20); for (i = 0; i < NBANDS; i++) { GstObject *band; gdouble freq; gdouble bw; gdouble gain; gchar *label; GtkWidget *frame, *scales_hbox; band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (equalizer), i); g_assert (band != NULL); g_object_get (G_OBJECT (band), "freq", &freq, NULL); g_object_get (G_OBJECT (band), "bandwidth", &bw, NULL); g_object_get (G_OBJECT (band), "gain", &gain, NULL); label = g_strdup_printf ("%d Hz", (int) (freq + 0.5)); frame = gtk_frame_new (label); g_free (label); scales_hbox = gtk_hbox_new (FALSE, 6); widget = gtk_vscale_new_with_range (-24.0, 12.0, 0.5); gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE); gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (widget), gain); gtk_widget_set_size_request (widget, 25, 150); g_signal_connect (G_OBJECT (widget), "value-changed", G_CALLBACK (on_gain_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), widget, FALSE, FALSE, 0); widget = gtk_vscale_new_with_range (0.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE); gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (widget), bw); gtk_widget_set_size_request (widget, 25, 150); g_signal_connect (G_OBJECT (widget), "value-changed", G_CALLBACK (on_bandwidth_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), widget, TRUE, TRUE, 0); widget = gtk_vscale_new_with_range (20.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE); gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (widget), freq); gtk_widget_set_size_request (widget, 25, 150); g_signal_connect (G_OBJECT (widget), "value-changed", G_CALLBACK (on_freq_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), widget, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (frame), scales_hbox); gtk_box_pack_start (GTK_BOX (hbox), frame, TRUE, TRUE, 0); } gtk_box_pack_start (GTK_BOX (vbox), hbox, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (appwindow), vbox); gtk_widget_show_all (appwindow); gst_element_set_state (bin, GST_STATE_PLAYING); gtk_main (); gst_element_set_state (bin, GST_STATE_NULL); gst_object_unref (bin); return 0; }
gint main (gint argc, gchar * argv[]) { LocalState state; GtkWidget *area, *combo, *w; const gchar *uri; XInitThreads (); gst_init (&argc, &argv); gtk_init (&argc, &argv); if (argc < 2) { g_print ("Usage: 3dvideo <uri-to-play>\n"); return 1; } uri = argv[1]; GstElement *pipeline = gst_element_factory_make ("playbin", NULL); GstBin *sinkbin = (GstBin *) gst_parse_bin_from_description ("glupload ! glcolorconvert ! glviewconvert name=viewconvert ! glimagesink name=sink", TRUE, NULL); #if USE_GLCONVERT_FOR_INPUT GstElement *glconvert = gst_bin_get_by_name (sinkbin, "viewconvert"); #endif GstElement *videosink = gst_bin_get_by_name (sinkbin, "sink"); /* Get defaults */ g_object_get (pipeline, "video-multiview-mode", &state.in_mode, "video-multiview-flags", &state.in_flags, NULL); gst_child_proxy_get (GST_CHILD_PROXY (videosink), "sink::output-multiview-mode", &state.out_mode, "sink::output-multiview-flags", &state.out_flags, NULL); detect_mode_from_uri (&state, uri); g_return_val_if_fail (pipeline != NULL, 1); g_return_val_if_fail (videosink != NULL, 1); g_object_set (G_OBJECT (pipeline), "video-sink", sinkbin, NULL); g_object_set (G_OBJECT (pipeline), "uri", uri, NULL); #if USE_GLCONVERT_FOR_INPUT g_object_set (G_OBJECT (glconvert), "input-mode-override", state.in_mode, NULL); g_object_set (G_OBJECT (glconvert), "input-flags-override", state.in_flags, NULL); #else g_object_set (G_OBJECT (pipeline), "video-multiview-mode", state.in_mode, NULL); g_object_set (G_OBJECT (pipeline), "video-multiview-flags", state.in_flags, NULL); #endif /* Connect to bus for signal handling */ GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline); gst_element_set_state (pipeline, GST_STATE_READY); area = gtk_drawing_area_new (); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL); gst_object_unref (bus); /* Toplevel window */ GtkWidget *window = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_default_size (GTK_WINDOW (window), 800, 600); gtk_window_set_title (GTK_WINDOW (window), "Stereoscopic video demo"); GdkGeometry geometry; geometry.min_width = 1; geometry.min_height = 1; geometry.max_width = -1; geometry.max_height = -1; gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE); GtkWidget *vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2); gtk_container_add (GTK_CONTAINER (window), vbox); /* area where the video is drawn */ gtk_box_pack_start (GTK_BOX (vbox), area, TRUE, TRUE, 0); /* Buttons to control the pipeline state */ GtkWidget *table = gtk_grid_new (); gtk_container_add (GTK_CONTAINER (vbox), table); GtkWidget *button_state_ready = gtk_button_new_with_label ("Stop"); g_signal_connect (G_OBJECT (button_state_ready), "clicked", G_CALLBACK (button_state_ready_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_ready, 1, 0, 1, 1); gtk_widget_show (button_state_ready); //control state paused GtkWidget *button_state_paused = gtk_button_new_with_label ("Pause"); g_signal_connect (G_OBJECT (button_state_paused), "clicked", G_CALLBACK (button_state_paused_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_paused, 2, 0, 1, 1); gtk_widget_show (button_state_paused); //control state playing GtkWidget *button_state_playing = gtk_button_new_with_label ("Play"); g_signal_connect (G_OBJECT (button_state_playing), "clicked", G_CALLBACK (button_state_playing_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_playing, 3, 0, 1, 1); //gtk_widget_show (button_state_playing); w = gst_mview_widget_new (FALSE); combo = GST_MVIEW_WIDGET (w)->mode_selector; gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo), enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING, state.in_mode)); #if USE_GLCONVERT_FOR_INPUT g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_input_mode), glconvert); #else g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_input_mode), pipeline); #endif g_object_set (G_OBJECT (w), "flags", state.in_flags, NULL); #if USE_GLCONVERT_FOR_INPUT g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (input_flags_changed), glconvert); #else g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (input_flags_changed), pipeline); #endif gtk_container_add (GTK_CONTAINER (vbox), w); w = gst_mview_widget_new (TRUE); combo = GST_MVIEW_WIDGET (w)->mode_selector; gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo), enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_MODE, state.out_mode)); g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_output_mode), videosink); g_object_set (G_OBJECT (w), "flags", state.out_flags, NULL); g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (output_flags_changed), videosink); g_signal_connect (G_OBJECT (w), "notify::downmix-mode", G_CALLBACK (downmix_method_changed), videosink); gtk_container_add (GTK_CONTAINER (vbox), w); //configure the pipeline g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb), pipeline); gtk_widget_realize (area); /* Redraw needed when paused or stopped (PAUSED or READY) */ g_signal_connect (area, "draw", G_CALLBACK (draw_cb), videosink); g_signal_connect(area, "configure-event", G_CALLBACK(resize_cb), videosink); gtk_widget_show_all (window); gst_element_set_state (pipeline, GST_STATE_PLAYING); gtk_main (); return 0; }
int main (int argc, char *argv[]) { GstElement *bin; GstElement *decodebin, *decconvert; GstElement *capsfilter, *equalizer, *spectrum, *sinkconvert, *sink; GstCaps *caps; GstBus *bus; GtkWidget *appwindow, *vbox, *hbox, *scale; int i, num_bands = NBANDS; GOptionEntry options[] = { {"bands", 'b', 0, G_OPTION_ARG_INT, &num_bands, "Number of bands", NULL}, {NULL} }; GOptionContext *ctx; GError *err = NULL; ctx = g_option_context_new ("- demo of audio equalizer"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); g_option_context_add_group (ctx, gtk_get_option_group (TRUE)); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing: %s\n", err->message); exit (1); } if (argc < 2) { g_print ("Usage: %s <uri to play>\n", argv[0]); g_print (" For optional arguments: --help\n"); exit (-1); } gst_init (&argc, &argv); gtk_init (&argc, &argv); bin = gst_pipeline_new ("bin"); /* Uri decoding */ decodebin = gst_element_factory_make ("uridecodebin", "decoder"); g_object_set (G_OBJECT (decodebin), "uri", argv[1], NULL); /* Force float32 samples */ decconvert = gst_element_factory_make ("audioconvert", "decconvert"); capsfilter = gst_element_factory_make ("capsfilter", "capsfilter"); caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "F32LE", NULL); g_object_set (capsfilter, "caps", caps, NULL); equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer"); g_object_set (G_OBJECT (equalizer), "num-bands", num_bands, NULL); spectrum = gst_element_factory_make ("spectrum", "spectrum"); g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80, "post-messages", TRUE, "interval", 500 * GST_MSECOND, NULL); sinkconvert = gst_element_factory_make ("audioconvert", "sinkconvert"); sink = gst_element_factory_make ("autoaudiosink", "sink"); gst_bin_add_many (GST_BIN (bin), decodebin, decconvert, capsfilter, equalizer, spectrum, sinkconvert, sink, NULL); if (!gst_element_link_many (decconvert, capsfilter, equalizer, spectrum, sinkconvert, sink, NULL)) { fprintf (stderr, "can't link elements\n"); exit (1); } /* Handle dynamic pads */ g_signal_connect (G_OBJECT (decodebin), "pad-added", G_CALLBACK (dynamic_link), gst_element_get_static_pad (decconvert, "sink")); bus = gst_element_get_bus (bin); gst_bus_add_watch (bus, message_handler, NULL); gst_object_unref (bus); appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_title (GTK_WINDOW (appwindow), "Equalizer Demo"); g_signal_connect (G_OBJECT (appwindow), "destroy", G_CALLBACK (on_window_destroy), NULL); vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 6); drawingarea = gtk_drawing_area_new (); gtk_widget_set_size_request (drawingarea, spect_bands, spect_height); g_signal_connect (G_OBJECT (drawingarea), "configure-event", G_CALLBACK (on_configure_event), (gpointer) spectrum); gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0); hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 20); for (i = 0; i < num_bands; i++) { GObject *band; gdouble freq; gdouble bw; gdouble gain; gchar *label; GtkWidget *frame, *scales_hbox; band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (equalizer), i); g_assert (band != NULL); g_object_get (band, "freq", &freq, NULL); g_object_get (band, "bandwidth", &bw, NULL); g_object_get (band, "gain", &gain, NULL); label = g_strdup_printf ("%d Hz", (int) (freq + 0.5)); frame = gtk_frame_new (label); g_free (label); scales_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 6); /* Create gain scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, -24.0, 12.0, 0.5); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), gain); gtk_widget_set_size_request (scale, 35, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_gain_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, FALSE, FALSE, 0); /* Create bandwidth scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, 0.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), bw); gtk_widget_set_size_request (scale, 45, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_bandwidth_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, TRUE, TRUE, 0); /* Create frequency scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, 20.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), freq); gtk_widget_set_size_request (scale, 45, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_freq_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (frame), scales_hbox); gtk_box_pack_start (GTK_BOX (hbox), frame, TRUE, TRUE, 0); } gtk_box_pack_start (GTK_BOX (vbox), hbox, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (appwindow), vbox); gtk_widget_show_all (appwindow); gst_element_set_state (bin, GST_STATE_PLAYING); gtk_main (); gst_element_set_state (bin, GST_STATE_NULL); gst_object_unref (bin); return 0; }