static void src_pad_added_cb (FsStream *stream, GstPad *pad, FsCodec *codec, gpointer user_data) { GstElement *pipeline = GST_ELEMENT_CAST (user_data); GstElement *sink = NULL; GError *error = NULL; GstPad *pad2; g_print ("Adding receive pipeline\n"); if (g_getenv ("AUDIOSINK")) sink = gst_parse_bin_from_description (g_getenv ("AUDIOSINK"), TRUE, &error); else sink = gst_parse_bin_from_description (DEFAULT_AUDIOSINK, TRUE, &error); print_error (error); g_assert (sink); g_assert (gst_bin_add (GST_BIN (pipeline), sink)); pad2 = gst_element_get_static_pad (sink, "sink"); g_assert (pad2); g_assert (GST_PAD_LINK_SUCCESSFUL (gst_pad_link (pad, pad2))); g_assert (gst_element_set_state (sink, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE); gst_object_unref (pad2); }
int main (int argc, char *argv[]) { GMainLoop *loop; GError *err = NULL; GstElement *pipeline, *srcbin, *sinkbin; GstBus *bus; guint bus_watch_id; if(config_from_xml("./sample.xml") < 0){ return -1; } gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); pipeline = gst_pipeline_new ("audio-player"); srcbin = gst_parse_bin_from_description(cmd_source, TRUE, &err); sinkbin = gst_parse_bin_from_description(cmd_others, TRUE, &err); if (!pipeline || !srcbin || !sinkbin) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); gst_bin_add_many (GST_BIN (pipeline), srcbin, sinkbin, NULL); gst_element_link (srcbin, sinkbin); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Iterate */ g_print ("Running...\n"); g_main_loop_run (loop); /* Out of the main loop, clean up nicely */ g_print ("Returned, stopping playback\n"); gst_element_set_state (pipeline, GST_STATE_NULL); g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); g_source_remove (bus_watch_id); g_main_loop_unref (loop); return 0; }
void Pipeline::setVideoEffect(const QString &value) { Effect *newEffect = EffectManager::instance()->getEffect(value); // close valves g_object_set(effectValve, "drop", TRUE, NULL); // unlink current effect, remove and destroy it gst_element_unlink_many(effectCapsFilter, effect, effectPostCS, NULL); g_object_ref(effect); gst_bin_remove(GST_BIN(effectInternalBin), effect); gst_element_set_state(effect, GST_STATE_NULL); g_object_unref(GST_OBJECT(effect)); effect = gst_parse_bin_from_description(newEffect->desc().toUtf8(), TRUE, NULL); // add new effect to the bin and link it gst_bin_add(GST_BIN(effectInternalBin), effect); gst_element_link_many(effectCapsFilter, effect, effectPostCS, NULL); gst_element_set_state(effectInternalBin, GST_STATE_READY); gst_element_set_state(effectInternalBin, GST_STATE_PAUSED); //open valve g_object_set(effectValve, "drop", FALSE, NULL); }
static gboolean acam_webcam_setup_create_webcam_source_bin (acam_webcam_device_s *acam_webcam_device) { GError *err = NULL; char *webcam_input; webcam_input = g_strdup_printf ( "%s name=video_source device=%s ! capsfilter name=capsfilter caps=video/x-raw-rgb,width=%d,height=%d,framerate=%d/%d;video/x-raw-yuv,width=%d,height=%d,framerate=%d/%d ! identity", acam_webcam_device->gstreamer_src, acam_webcam_device->device_name, acam_webcam_device->width, acam_webcam_device->height, acam_webcam_device->framerate_num, acam_webcam_device->framerate_den, acam_webcam_device->width, acam_webcam_device->height, acam_webcam_device->framerate_num, acam_webcam_device->framerate_den); acam_webcam_device->webcam_source_bin = gst_parse_bin_from_description (webcam_input, TRUE, &err); g_free (webcam_input); if (acam_webcam_device->webcam_source_bin == NULL) g_print ("Error\n\n"); acam_webcam_device->video_source = gst_bin_get_by_name (GST_BIN (acam_webcam_device->webcam_source_bin), "video_source"); acam_webcam_device->capsfilter = gst_bin_get_by_name (GST_BIN (acam_webcam_device->webcam_source_bin), "capsfilter"); return TRUE; }
static GstElement* build_encoder (SjExtractor *extractor) { SjExtractorPrivate *priv; GstElement *encodebin; const char *profile_name; static const char * mp3_pipeline = "lamemp3enc ! xingmux ! id3v2mux"; g_return_val_if_fail (SJ_IS_EXTRACTOR (extractor), NULL); priv = (SjExtractorPrivate*)extractor->priv; g_return_val_if_fail (priv->profile != NULL, NULL); /* encodebin does not use xingmux so do mp3 pipeline ourselves */ profile_name = gst_encoding_profile_get_name (priv->profile); if (strcmp (profile_name, "mp3") == 0) { encodebin = gst_parse_bin_from_description (mp3_pipeline, TRUE, NULL); } else { encodebin = gst_element_factory_make ("encodebin", NULL); if (encodebin == NULL) return NULL; g_object_set (encodebin, "profile", priv->profile, NULL); /* Nice big buffers... */ g_object_set (encodebin, "queue-time-max", 120 * GST_SECOND, NULL); } return encodebin; }
void test_parse_launch_errors() { GstElement *pipe; GError *err; const gchar *arr[] = { "fakesrc", "fakesink", NULL }; std_log(LOG_FILENAME_LINE, "Test started test_parse_launch_errors"); err = NULL; pipe = gst_parse_launch ("fakesrc ! fakesink", &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); err = NULL; pipe = gst_parse_bin_from_description ("fakesrc ! fakesink", TRUE, &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); err = NULL; pipe = gst_parse_launchv (arr, &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
void StreamPipeline::Init(const Song& song) { pipeline_ = gst_pipeline_new("pipeline"); GstElement* uridecodebin = CreateElement("uridecodebin", pipeline_); qLog(Debug) << "Streaming:" << song.url(); g_object_set( G_OBJECT(uridecodebin), "uri", song.url().toString().toUtf8().constData(), NULL); g_signal_connect( G_OBJECT(uridecodebin), "pad-added", G_CALLBACK(NewPadCallback), this); GError* error = NULL; convert_bin_ = gst_parse_bin_from_description(kPipeline, TRUE, &error); gst_bin_add(GST_BIN(pipeline_), convert_bin_); gst_element_set_state(uridecodebin, GST_STATE_PLAYING); app_sink_ = CreateElement("appsink", pipeline_); g_object_set(G_OBJECT(app_sink_), "emit-signals", TRUE, NULL); g_signal_connect( G_OBJECT(app_sink_), "new-buffer", G_CALLBACK(NewBufferCallback), this); qLog(Debug) << "Linking appsink:" << gst_element_link(convert_bin_, app_sink_); gst_bus_set_sync_handler( gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this); bus_cb_id_ = gst_bus_add_watch( gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this); }
static gboolean gst_gsettings_video_sink_change_child (GstGSettingsVideoSink * sink) { gchar *new_string; GError *err = NULL; GstElement *new_kid; GST_OBJECT_LOCK (sink); new_string = g_settings_get_string (sink->settings, GST_GSETTINGS_KEY_VIDEOSINK); if (new_string != NULL && sink->gsettings_str != NULL && (strlen (new_string) == 0 || strcmp (sink->gsettings_str, new_string) == 0)) { g_free (new_string); GST_DEBUG_OBJECT (sink, "GSettings key was updated, but it didn't change. Ignoring"); GST_OBJECT_UNLOCK (sink); return TRUE; } GST_OBJECT_UNLOCK (sink); GST_DEBUG_OBJECT (sink, "GSettings key changed from '%s' to '%s'", GST_STR_NULL (sink->gsettings_str), GST_STR_NULL (new_string)); if (new_string) { new_kid = gst_parse_bin_from_description (new_string, TRUE, &err); if (err) { GST_ERROR_OBJECT (sink, "error creating bin '%s': %s", new_string, err->message); g_error_free (err); } } else { new_kid = NULL; } if (new_kid == NULL) { GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL), ("Failed to render video sink from GSettings")); goto fail; } if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) { GST_WARNING_OBJECT (sink, "Failed to update child element"); goto fail; } g_free (sink->gsettings_str); sink->gsettings_str = new_string; return TRUE; fail: g_free (new_string); return FALSE; }
static gboolean player_gstreamer_trySetupDecoder (PlayerGstreamer* self, const gchar* extension, const gchar* decoder) { gboolean result = FALSE; GError * _inner_error_ = NULL; g_return_val_if_fail (self != NULL, FALSE); g_return_val_if_fail (extension != NULL, FALSE); g_return_val_if_fail (decoder != NULL, FALSE); { const gchar* _tmp0_; GstElement* _tmp1_ = NULL; GstElement* _tmp2_; GeeHashMap* _tmp3_; const gchar* _tmp4_; const gchar* _tmp5_; _tmp0_ = decoder; _tmp1_ = gst_parse_bin_from_description (_tmp0_, FALSE, &_inner_error_); gst_object_ref_sink (_tmp1_); _tmp2_ = _tmp1_; _gst_object_unref0 (_tmp2_); if (_inner_error_ != NULL) { goto __catch0_g_error; } _tmp3_ = self->priv->decoders; _tmp4_ = extension; _tmp5_ = decoder; gee_abstract_map_set ((GeeAbstractMap*) _tmp3_, _tmp4_, _tmp5_); result = TRUE; return result; } goto __finally0; __catch0_g_error: { GError* e = NULL; FsoFrameworkLogger* _tmp6_; const gchar* _tmp7_; const gchar* _tmp8_ = NULL; gchar* _tmp9_ = NULL; gchar* _tmp10_; e = _inner_error_; _inner_error_ = NULL; _tmp6_ = fso_framework_theLogger; _tmp7_ = decoder; _tmp8_ = string_to_string (_tmp7_); _tmp9_ = g_strconcat ("Gstreamer does not understand ", _tmp8_, "; not adding to map", NULL); _tmp10_ = _tmp9_; fso_framework_logger_warning (_tmp6_, _tmp10_); _g_free0 (_tmp10_); result = FALSE; _g_error_free0 (e); return result; } __finally0: g_critical ("file %s: line %d: uncaught error: %s (%s, %d)", __FILE__, __LINE__, _inner_error_->message, g_quark_to_string (_inner_error_->domain), _inner_error_->code); g_clear_error (&_inner_error_); return FALSE; }
static gchar * extractable_check_id (GType type, const gchar * id, GError ** error) { GstElement *effect = gst_parse_bin_from_description (id, TRUE, error); if (effect == NULL) return NULL; gst_object_unref (effect); return g_strdup (id); }
/** * gst_hal_render_bin_from_udi: * @udi: a #gchar string corresponding to a Hal UDI. * * Render bin from Hal UDI @udi. * * Returns: a #GstElement containing the rendered bin. */ GstElement * gst_hal_render_bin_from_udi (const gchar * udi, GstHalDeviceType type) { GstElement *bin = NULL; gchar *value; value = gst_hal_get_string (udi, type); if (value) bin = gst_parse_bin_from_description (value, TRUE, NULL); g_free (value); return bin; }
static GstElement * ges_effect_create_element (GESTrackElement * object) { GstElement *effect; gchar *bin_desc; GError *error = NULL; GESEffect *self = GES_EFFECT (object); GESTrack *track = ges_track_element_get_track (object); const gchar *wanted_categories[] = { "Effect", NULL }; if (!track) { GST_WARNING ("The object %p should be in a Track for the element to be created", object); return NULL; } if (track->type == GES_TRACK_TYPE_VIDEO) { bin_desc = g_strconcat ("videoconvert name=pre_video_convert ! ", self->priv->bin_description, " ! videoconvert name=post_video_convert", NULL); } else if (track->type == GES_TRACK_TYPE_AUDIO) { bin_desc = g_strconcat ("audioconvert ! audioresample !", self->priv->bin_description, NULL); } else { GST_DEBUG ("Track type not supported"); return NULL; } effect = gst_parse_bin_from_description (bin_desc, TRUE, &error); g_free (bin_desc); if (error != NULL) { GST_ERROR ("An error occured while creating the GstElement: %s", error->message); g_error_free (error); return NULL; } GST_DEBUG ("Created effect %p", effect); ges_track_element_add_children_props (object, effect, wanted_categories, NULL, NULL); return effect; }
static GstElement * gst_transcoder_build_encoder(const gchar *encoder_pipeline) { GstElement *encoder = NULL; gchar *pipeline; GError *error = NULL; pipeline = g_strdup_printf("%s", encoder_pipeline); encoder = gst_parse_bin_from_description(pipeline, TRUE, &error); g_free(pipeline); if(error != NULL) { return NULL; } return encoder; }
static GstElement * ges_track_parse_launch_effect_create_element (GESTrackObject * object) { GstElement *effect; gchar *bin_desc; GError *error = NULL; GESTrackParseLaunchEffect *self = GES_TRACK_PARSE_LAUNCH_EFFECT (object); GESTrack *track = ges_track_object_get_track (object); if (!track) { GST_WARNING ("The object %p should be in a Track for the element to be created", object); return NULL; } if (track->type == GES_TRACK_TYPE_VIDEO) { bin_desc = g_strconcat ("ffmpegcolorspace name=beforecolorspace ! ", self->priv->bin_description, " ! ffmpegcolorspace name=aftercolorspace", NULL); } else if (track->type == GES_TRACK_TYPE_AUDIO) { bin_desc = g_strconcat ("audioconvert ! audioresample !", self->priv->bin_description, NULL); } else { GST_DEBUG ("Track type not supported"); return NULL; } effect = gst_parse_bin_from_description (bin_desc, TRUE, &error); g_free (bin_desc); if (error != NULL) { GST_ERROR ("An error occured while creating the GstElement: %s", error->message); g_error_free (error); return NULL; } GST_DEBUG ("Created effect %p", effect); return effect; }
GstElement* GstEnginePipeline::CreateDecodeBinFromString(const char* pipeline) { GError* error = nullptr; GstElement* bin = gst_parse_bin_from_description(pipeline, TRUE, &error); if (error) { QString message = QString::fromLocal8Bit(error->message); int domain = error->domain; int code = error->code; g_error_free(error); qLog(Warning) << message; emit Error(id(), message, domain, code); return nullptr; } else { return bin; } }
static GstElement* build_encoder (NscGStreamer *gstreamer) { NscGStreamerPrivate *priv; GstElement *element = NULL; gchar *pipeline; g_return_val_if_fail (NSC_IS_GSTREAMER (gstreamer), NULL); priv = NSC_GSTREAMER_GET_PRIVATE (gstreamer); g_return_val_if_fail (priv->profile != NULL, NULL); pipeline = g_strdup_printf ("audioconvert ! audioresample ! %s", gm_audio_profile_get_pipeline (priv->profile)); element = gst_parse_bin_from_description (pipeline, TRUE, NULL); g_free (pipeline); return element; }
static GstElement * create_sink (EmpathyGstAudioSink *self) { GstElement *sink; const gchar *description; description = g_getenv ("EMPATHY_AUDIO_SINK"); if (description != NULL) { GError *error = NULL; sink = gst_parse_bin_from_description (description, TRUE, &error); if (sink == NULL) { DEBUG ("Failed to create bin %s: %s", description, error->message); g_error_free (error); } return sink; } /* Use pulsesink as default */ sink = gst_element_factory_make ("pulsesink", NULL); if (sink == NULL) return NULL; empathy_call_set_stream_properties (sink, self->priv->echo_cancel); /* Set latency (buffering on the PulseAudio side) of 40ms and transfer data * in 10ms chunks */ g_object_set (sink, "buffer-time", (gint64) 40000, "latency-time", (gint64) 10000, NULL); return sink; }
GstElement * parse_bin_from_description_all_linked (const gchar *bin_description, guint *src_pad_count, guint *sink_pad_count, GError **error) { GstElement *bin = gst_parse_bin_from_description (bin_description, FALSE, error); if (!bin) return NULL; if (!link_unlinked_pads (bin, GST_PAD_SRC, "src", src_pad_count, error)) goto error; if (!link_unlinked_pads (bin, GST_PAD_SINK, "sink", sink_pad_count, error)) goto error; return bin; error: gst_object_unref (bin); return NULL; }
static GstElement * create_src (void) { GstElement *src; const gchar *description; description = g_getenv ("EMPATHY_AUDIO_SRC"); if (description != NULL) { GError *error = NULL; src = gst_parse_bin_from_description (description, TRUE, &error); if (src == NULL) { DEBUG ("Failed to create bin %s: %s", description, error->message); g_error_free (error); } return src; } /* Use pulsesrc as default */ src = gst_element_factory_make ("pulsesrc", NULL); if (src == NULL) { g_warning ("Missing 'pulsesrc' element"); return NULL; } empathy_audio_set_stream_properties (src, TRUE); /* Set latency (buffering on the PulseAudio side) of 20ms */ g_object_set (src, "buffer-time", (gint64) 20000, NULL); return src; }
static GstPlay * play_new (gchar ** uris, const gchar * audio_sink, const gchar * video_sink, gboolean gapless, gdouble initial_volume) { GstElement *sink; GstPlay *play; play = g_new0 (GstPlay, 1); play->uris = uris; play->num_uris = g_strv_length (uris); play->cur_idx = -1; play->playbin = gst_element_factory_make ("playbin", "playbin"); if (audio_sink != NULL) { if (strchr (audio_sink, ' ') != NULL) sink = gst_parse_bin_from_description (audio_sink, TRUE, NULL); else sink = gst_element_factory_make (audio_sink, NULL); if (sink != NULL) g_object_set (play->playbin, "audio-sink", sink, NULL); else g_warning ("Couldn't create specified audio sink '%s'", audio_sink); } if (video_sink != NULL) { if (strchr (video_sink, ' ') != NULL) sink = gst_parse_bin_from_description (video_sink, TRUE, NULL); else sink = gst_element_factory_make (video_sink, NULL); if (sink != NULL) g_object_set (play->playbin, "video-sink", sink, NULL); else g_warning ("Couldn't create specified video sink '%s'", video_sink); } play->loop = g_main_loop_new (NULL, FALSE); play->bus_watch = gst_bus_add_watch (GST_ELEMENT_BUS (play->playbin), play_bus_msg, play); /* FIXME: make configurable incl. 0 for disable */ play->timeout = g_timeout_add (100, play_timeout, play); play->missing = NULL; play->buffering = FALSE; play->is_live = FALSE; play->desired_state = GST_STATE_PLAYING; play->gapless = gapless; if (gapless) { g_signal_connect (play->playbin, "about-to-finish", G_CALLBACK (play_about_to_finish), play); } play_set_relative_volume (play, initial_volume - 1.0); return play; }
static TestSession* add_audio_session (GstElement *pipeline, FsConference *conf, guint id, FsParticipant *part, gchar *send_socket, gchar *recv_socket) { TestSession *ses = g_slice_new0 (TestSession); GError *error = NULL; GstPad *pad = NULL, *pad2 = NULL; GstElement *src = NULL; GList *cands = NULL; GParameter param = {0}; gboolean res; FsCandidate *cand; GList *codecs = NULL; ses->send_socket = send_socket; ses->recv_socket = recv_socket; ses->session = fs_conference_new_session (conf, FS_MEDIA_TYPE_AUDIO, &error); print_error (error); g_assert (ses->session); g_object_get (ses->session, "sink-pad", &pad, NULL); if (g_getenv ("AUDIOSRC")) src = gst_parse_bin_from_description (g_getenv ("AUDIOSRC"), TRUE, &error); else src = gst_parse_bin_from_description (DEFAULT_AUDIOSRC, TRUE, &error); print_error (error); g_assert (src); g_assert (gst_bin_add (GST_BIN (pipeline), src)); pad2 = gst_element_get_static_pad (src, "src"); g_assert (pad2); g_assert (GST_PAD_LINK_SUCCESSFUL (gst_pad_link (pad2, pad))); gst_object_unref (pad2); gst_object_unref (pad); ses->stream = fs_session_new_stream (ses->session, part, FS_DIRECTION_BOTH, &error); print_error (error); g_assert (ses->stream); cand = fs_candidate_new ("", FS_COMPONENT_RTP, FS_CANDIDATE_TYPE_HOST, FS_NETWORK_PROTOCOL_UDP, send_socket, 0); cands = g_list_prepend (NULL, cand); param.name = "preferred-local-candidates"; g_value_init (¶m.value, FS_TYPE_CANDIDATE_LIST); g_value_take_boxed (¶m.value, cands); res = fs_stream_set_transmitter (ses->stream, "shm", ¶m, 1, &error); print_error (error); g_value_unset (¶m.value); g_signal_connect (ses->stream, "src-pad-added", G_CALLBACK (src_pad_added_cb), pipeline); codecs = g_list_prepend (NULL, fs_codec_new (FS_CODEC_ID_ANY, "PCMA", FS_MEDIA_TYPE_AUDIO, 0)); codecs = g_list_prepend (codecs, fs_codec_new (FS_CODEC_ID_ANY, "PCMU", FS_MEDIA_TYPE_AUDIO, 0)); res = fs_session_set_codec_preferences (ses->session, codecs, &error); print_error (error); fs_codec_list_destroy (codecs); g_object_get (ses->session, "codecs-without-config", &codecs, NULL); res = fs_stream_set_remote_codecs (ses->stream, codecs, &error); print_error (error); g_assert (res); return ses; }
static void mex_telepathy_channel_on_content_added (TfChannel *channel, TfContent *content, gpointer user_data) { MexTelepathyChannel *self = MEX_TELEPATHY_CHANNEL (user_data); MexTelepathyChannelPrivate *priv = self->priv; GstPad *srcpad, *sinkpad; FsMediaType mtype; GstElement *element; GstStateChangeReturn ret; MEX_DEBUG ("Content added"); g_object_get (content, "sink-pad", &sinkpad, "media-type", &mtype, NULL); switch (mtype) { case FS_MEDIA_TYPE_AUDIO: MEX_DEBUG ("Audio content added"); element = gst_parse_bin_from_description ( "autoaudiosrc ! audioresample ! audioconvert ! volume name=micvolume", TRUE, NULL); priv->outgoing_mic = element; priv->mic_volume = gst_bin_get_by_name (GST_BIN (priv->outgoing_mic), "micvolume"); break; case FS_MEDIA_TYPE_VIDEO: MEX_DEBUG ("Video content added"); element = mex_telepathy_channel_setup_video_source (self, content); break; default: MEX_WARNING ("Unknown media type"); g_object_unref (sinkpad); return; } g_signal_connect (content, "src-pad-added", G_CALLBACK (mex_telepathy_channel_on_src_pad_added), self); gst_bin_add (GST_BIN (priv->pipeline), element); srcpad = gst_element_get_pad (element, "src"); if (GST_PAD_LINK_FAILED (gst_pad_link (srcpad, sinkpad))) { tp_channel_close_async (TP_CHANNEL (priv->channel), NULL, NULL); MEX_WARNING ("Couldn't link source pipeline !?"); return; } ret = gst_element_set_state (element, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { tp_channel_close_async (TP_CHANNEL (priv->channel), NULL, NULL); MEX_WARNING ("source pipeline failed to start!?"); return; } g_object_unref (srcpad); g_object_unref (sinkpad); }
static void mex_telepathy_channel_on_src_pad_added (TfContent *content, TpHandle handle, FsStream *stream, GstPad *pad, FsCodec *codec, gpointer user_data) { MexTelepathyChannel *self = MEX_TELEPATHY_CHANNEL (user_data); MexTelepathyChannelPrivate *priv = self->priv; gchar *cstr = fs_codec_to_string (codec); FsMediaType mtype; GstPad *sinkpad; GstElement *element; GstStateChangeReturn ret; /* Upon pad added, clear the "in progress" box+padding */ clutter_actor_hide (CLUTTER_ACTOR (priv->busy_box)); clutter_actor_show (CLUTTER_ACTOR (priv->full_frame) ); MEX_DEBUG ("New src pad: %s", cstr); g_object_get (content, "media-type", &mtype, NULL); switch (mtype) { case FS_MEDIA_TYPE_AUDIO: element = gst_parse_bin_from_description ( "audioconvert ! audioresample ! audioconvert ! autoaudiosink", TRUE, NULL); break; case FS_MEDIA_TYPE_VIDEO: element = priv->incoming_sink; break; default: MEX_WARNING ("Unknown media type"); return; } if (!gst_bin_add (GST_BIN (priv->pipeline), element)) { MEX_WARNING ("Failed to add sink element to pipeline"); } sinkpad = gst_element_get_pad (element, "sink"); ret = gst_element_set_state (element, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { tp_channel_close_async (TP_CHANNEL (priv->channel), NULL, NULL); MEX_WARNING ("Failed to start tee sink pipeline !?"); return; } if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad))) { tp_channel_close_async (TP_CHANNEL (priv->channel), NULL, NULL); MEX_WARNING ("Couldn't link sink pipeline !?"); return; } g_object_unref (sinkpad); /* Start in FULL mode */ mex_telepathy_channel_set_tool_mode (self, TOOL_MODE_FULL, 100); }
void test_parse_bin_from_description() { struct { const gchar *bin_desc; const gchar *pad_names; } bin_tests[] = { { "identity", "identity0/sink,identity0/src"}, { "identity ! identity ! identity", "identity1/sink,identity3/src"}, { "identity ! fakesink", "identity4/sink"}, { "fakesrc ! identity", "identity5/src"}, { "fakesrc ! fakesink", ""} }; gint i; xmlfile = "gstutils_test_parse_bin_from_description"; std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_parse_bin_from_description"); for (i = 0; i < G_N_ELEMENTS (bin_tests); ++i) { GstElement *bin, *parent; GString *s; GstPad *ghost_pad, *target_pad; GError *err = NULL; bin = gst_parse_bin_from_description (bin_tests[i].bin_desc, TRUE, &err); if (err) { g_error ("ERROR in gst_parse_bin_from_description (%s): %s", bin_tests[i].bin_desc, err->message); } g_assert (bin != NULL); s = g_string_new (""); if ((ghost_pad = gst_element_get_pad (bin, "sink"))) { g_assert (GST_IS_GHOST_PAD (ghost_pad)); target_pad = gst_ghost_pad_get_target (GST_GHOST_PAD (ghost_pad)); g_assert (target_pad != NULL); g_assert (GST_IS_PAD (target_pad)); parent = gst_pad_get_parent_element (target_pad); g_assert (parent != NULL); g_string_append_printf (s, "%s/sink", GST_ELEMENT_NAME (parent)); gst_object_unref (parent); gst_object_unref (target_pad); gst_object_unref (ghost_pad); } if ((ghost_pad = gst_element_get_pad (bin, "src"))) { g_assert (GST_IS_GHOST_PAD (ghost_pad)); target_pad = gst_ghost_pad_get_target (GST_GHOST_PAD (ghost_pad)); g_assert (target_pad != NULL); g_assert (GST_IS_PAD (target_pad)); parent = gst_pad_get_parent_element (target_pad); g_assert (parent != NULL); if (s->len > 0) { g_string_append (s, ","); } g_string_append_printf (s, "%s/src", GST_ELEMENT_NAME (parent)); gst_object_unref (parent); gst_object_unref (target_pad); gst_object_unref (ghost_pad); } if (strcmp (s->str, bin_tests[i].pad_names) != 0) { g_error ("FAILED: expted '%s', got '%s' for bin '%s'", bin_tests[i].pad_names, s->str, bin_tests[i].bin_desc); } g_string_free (s, TRUE); gst_object_unref (bin); } std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
gint main (gint argc, gchar * argv[]) { GstStateChangeReturn ret; GstElement *pipeline; GstElement *filter, *sink; GstElement *sourcebin; GError *error = NULL; GtkWidget *window; GtkWidget *screen; GtkWidget *vbox, *combo; GtkWidget *hbox; GtkWidget *play, *pause, *null, *ready; gchar **source_desc_array = NULL; gchar *source_desc = NULL; GOptionContext *context; GOptionEntry options[] = { {"source-bin", 's', 0, G_OPTION_ARG_STRING_ARRAY, &source_desc_array, "Use a custom source bin description (gst-launch style)", NULL} , {NULL} }; context = g_option_context_new (NULL); g_option_context_add_main_entries (context, options, NULL); g_option_context_add_group (context, gst_init_get_option_group ()); g_option_context_add_group (context, gtk_get_option_group (TRUE)); if (!g_option_context_parse (context, &argc, &argv, &error)) { g_print ("Inizialization error: %s\n", GST_STR_NULL (error->message)); return -1; } g_option_context_free (context); if (source_desc_array != NULL) { source_desc = g_strjoinv (" ", source_desc_array); g_strfreev (source_desc_array); } if (source_desc == NULL) { source_desc = g_strdup ("videotestsrc ! video/x-raw, width=352, height=288 ! identity"); } sourcebin = gst_parse_bin_from_description (g_strdup (source_desc), TRUE, &error); g_free (source_desc); if (error) { g_print ("Error while parsing source bin description: %s\n", GST_STR_NULL (error->message)); return -1; } g_set_application_name ("gst-gl-effects test app"); window = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_container_set_border_width (GTK_CONTAINER (window), 3); pipeline = gst_pipeline_new ("pipeline"); filter = gst_element_factory_make ("gleffects", "flt"); sink = gst_element_factory_make ("glimagesink", "glsink"); gst_bin_add_many (GST_BIN (pipeline), sourcebin, filter, sink, NULL); if (!gst_element_link_many (sourcebin, filter, sink, NULL)) { g_print ("Failed to link one or more elements!\n"); return -1; } g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb), pipeline); g_signal_connect (G_OBJECT (window), "destroy-event", G_CALLBACK (destroy_cb), pipeline); screen = gtk_drawing_area_new (); gtk_widget_set_size_request (screen, 640, 480); // 500 x 376 vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2); gtk_box_pack_start (GTK_BOX (vbox), screen, TRUE, TRUE, 0); combo = gtk_combo_box_text_new (); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "identity"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "mirror"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "squeeze"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "stretch"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "fisheye"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "twirl"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "bulge"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "tunnel"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "square"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "heat"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "xpro"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "lumaxpro"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "sepia"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "xray"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "sin"); gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "glow"); g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (apply_fx), filter); gtk_box_pack_start (GTK_BOX (vbox), combo, FALSE, FALSE, 0); hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0); play = gtk_button_new_with_label ("PLAY"); g_signal_connect (G_OBJECT (play), "clicked", G_CALLBACK (play_cb), pipeline); pause = gtk_button_new_with_label ("PAUSE"); g_signal_connect (G_OBJECT (pause), "clicked", G_CALLBACK (pause_cb), pipeline); null = gtk_button_new_with_label ("NULL"); g_signal_connect (G_OBJECT (null), "clicked", G_CALLBACK (null_cb), pipeline); ready = gtk_button_new_with_label ("READY"); g_signal_connect (G_OBJECT (ready), "clicked", G_CALLBACK (ready_cb), pipeline); gtk_box_pack_start (GTK_BOX (hbox), null, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (hbox), ready, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (hbox), play, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (hbox), pause, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (vbox), hbox, FALSE, FALSE, 0); gtk_container_add (GTK_CONTAINER (window), vbox); g_signal_connect (screen, "realize", G_CALLBACK (expose_cb), pipeline); ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_print ("Failed to start up pipeline!\n"); return -1; } gtk_widget_show_all (GTK_WIDGET (window)); gtk_main (); return 0; }
static gboolean gst_gsettings_audio_sink_change_child (GstGSettingsAudioSink * sink) { const gchar *key = NULL; gchar *new_string; GError *err = NULL; GstElement *new_kid; GST_OBJECT_LOCK (sink); switch (sink->profile) { case GST_GSETTINGS_AUDIOSINK_PROFILE_SOUNDS: key = GST_GSETTINGS_KEY_SOUNDS_AUDIOSINK; break; case GST_GSETTINGS_AUDIOSINK_PROFILE_MUSIC: key = GST_GSETTINGS_KEY_MUSIC_AUDIOSINK; break; case GST_GSETTINGS_AUDIOSINK_PROFILE_CHAT: key = GST_GSETTINGS_KEY_CHAT_AUDIOSINK; break; default: break; } new_string = g_settings_get_string (sink->settings, key); if (new_string != NULL && sink->gsettings_str != NULL && (strlen (new_string) == 0 || strcmp (sink->gsettings_str, new_string) == 0)) { g_free (new_string); GST_DEBUG_OBJECT (sink, "GSettings key was updated, but it didn't change. Ignoring"); GST_OBJECT_UNLOCK (sink); return TRUE; } GST_OBJECT_UNLOCK (sink); GST_DEBUG_OBJECT (sink, "GSettings key changed from '%s' to '%s'", GST_STR_NULL (sink->gsettings_str), GST_STR_NULL (new_string)); if (new_string) { new_kid = gst_parse_bin_from_description (new_string, TRUE, &err); if (err) { GST_ERROR_OBJECT (sink, "error creating bin '%s': %s", new_string, err->message); g_error_free (err); } } else { new_kid = NULL; } if (new_kid == NULL) { GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL), ("Failed to render audio sink from GSettings")); goto fail; } if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) { GST_WARNING_OBJECT (sink, "Failed to update child element"); goto fail; } g_free (sink->gsettings_str); sink->gsettings_str = new_string; return TRUE; fail: g_free (new_string); return FALSE; }
bool RTMP::create_gst_pipeline() { shmaudio_sub_.reset(); shmvideo_sub_.reset(); gst_pipeline_ = std::make_unique<GstPipeliner>(nullptr, nullptr); std::string dest = "rtmpsink"; if (audio_shmpath_.empty() || video_shmpath_.empty()) { warning("Could not send stream because no video or audio is connected (rtmp)."); dest = "fakesink"; } if (stream_app_url_.empty() || stream_key_.empty()) { warning("Could not send stream because stream application URL or key is empty (rtmp)."); dest = "fakesink"; } std::string description = "flvmux streamable=true name=mux ! queue ! " + dest + " name=rtmpsink sync=false "; description += "shmdatasrc socket-path=/tmp/fake name=shmvideo copy-buffers=true do-timestamp=true ! "; description += "h264parse ! queue ! mux. "; description += "shmdatasrc socket-path=/tmp/fake name=shmaudio copy-buffers=true do-timestamp=true ! "; description += "audioconvert ! audioresample ! queue ! voaacenc bitrate=256000 ! queue ! mux."; GError* error = nullptr; auto bin = gst_parse_bin_from_description(description.c_str(), FALSE, &error); if (error) { warning("Failed to create GstBin from pipeline description (rtmp): %", std::string(error->message)); return false; } g_object_set(G_OBJECT(gst_pipeline_->get_pipeline()), "async-handling", TRUE, nullptr); g_object_set(G_OBJECT(bin), "async-handling", TRUE, nullptr); if (!video_shmpath_.empty()) { auto shmdatavideo = gst_bin_get_by_name(GST_BIN(bin), "shmvideo"); g_object_set(G_OBJECT(shmdatavideo), "socket-path", video_shmpath_.c_str(), nullptr); shmvideo_sub_ = std::make_unique<GstShmTreeUpdater>( this, shmdatavideo, video_shmpath_, GstShmTreeUpdater::Direction::reader); } if (!audio_shmpath_.empty()) { auto shmdataaudio = gst_bin_get_by_name(GST_BIN(bin), "shmaudio"); g_object_set(G_OBJECT(shmdataaudio), "socket-path", audio_shmpath_.c_str(), nullptr); shmaudio_sub_ = std::make_unique<GstShmTreeUpdater>( this, shmdataaudio, audio_shmpath_, GstShmTreeUpdater::Direction::reader); } if (!audio_shmpath_.empty() && !video_shmpath_.empty() && !stream_app_url_.empty() && !stream_key_.empty()) { auto rtmpsink = gst_bin_get_by_name(GST_BIN(bin), "rtmpsink"); g_object_set( G_OBJECT(rtmpsink), "location", (stream_app_url_ + "/" + stream_key_).c_str(), nullptr); } gst_bin_add(GST_BIN(gst_pipeline_->get_pipeline()), bin); gst_pipeline_->play(true); return true; }
int main (int argc, char *argv[]) { GstPipeline *pipeline; GstBus *bus; GstElement *srcbin; GstElement *tee; GstElement *queue[N_ACTORS], *sink[N_ACTORS]; GstElement *upload[N_ACTORS]; /* GstElement *effect[N_ACTORS]; */ ClutterActor *stage; GstGLClutterActor *actor[N_ACTORS]; Display *disp; Window stage_win; const gchar *desc; gint i; gint ok = FALSE; ClutterInitError clutter_err = CLUTTER_INIT_ERROR_UNKNOWN; clutter_err = clutter_init (&argc, &argv); if (clutter_err != CLUTTER_INIT_SUCCESS) g_warning ("Failed to initalize clutter: %d\n", clutter_err); gst_init (&argc, &argv); disp = clutter_x11_get_default_display (); if (!clutter_x11_has_composite_extension ()) { g_error ("XComposite extension missing"); } stage = clutter_stage_get_default (); clutter_actor_set_size (CLUTTER_ACTOR (stage), W * COLS + (COLS - 1), H * ROWS + (ROWS - 1)); stage_win = clutter_x11_get_stage_window (CLUTTER_STAGE (stage)); XCompositeRedirectSubwindows (disp, stage_win, CompositeRedirectManual); for (i = 0; i < N_ACTORS; i++) { actor[i] = g_new0 (GstGLClutterActor, 1); actor[i]->stage = stage; actor[i]->win = XCreateSimpleWindow (disp, stage_win, 0, 0, W, H, 0, 0, 0); XMapRaised (disp, actor[i]->win); XSync (disp, FALSE); } /* desc = g_strdup_printf ("v4l2src ! " "video/x-raw, width=640, height=480, framerate=30/1 ! " "videoscale !" "video/x-raw, width=%d, height=%d ! " "identity", W, H); */ desc = g_strdup_printf ("videotestsrc ! " "video/x-raw, format=RGB, width=%d, height=%d !" "identity", W, H); pipeline = GST_PIPELINE (gst_pipeline_new (NULL)); srcbin = gst_parse_bin_from_description (desc, TRUE, NULL); if (!srcbin) g_error ("Source bin creation failed"); tee = gst_element_factory_make ("tee", NULL); gst_bin_add_many (GST_BIN (pipeline), srcbin, tee, NULL); for (i = 0; i < N_ACTORS; i++) { queue[i] = gst_element_factory_make ("queue", NULL); upload[i] = gst_element_factory_make ("glupload", NULL); /* effect[i] = gst_element_factory_make ("gleffects", NULL); */ sink[i] = gst_element_factory_make ("glimagesink", NULL); /* gst_bin_add_many (GST_BIN (pipeline), queue[i], upload[i], effect[i], sink[i], NULL); */ gst_bin_add_many (GST_BIN (pipeline), queue[i], upload[i], sink[i], NULL); } gst_element_link_many (srcbin, tee, NULL); for (i = 0; i < N_ACTORS; i++) { ok |= // gst_element_link_many (tee, queue[i], upload[i], effect[i], sink[i], gst_element_link_many (tee, queue[i], upload[i], sink[i], NULL); } if (!ok) g_error ("Failed to link one or more elements"); /* for (i = 0; i < N_ACTORS; i++) { g_message ("setting effect %d on %s", i + 1, gst_element_get_name (effect[i])); g_object_set (G_OBJECT (effect[i]), "effect", i + 1, NULL); } */ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, actor, NULL); gst_object_unref (bus); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); clutter_actor_show_all (stage); clutter_main (); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
static GstPlay * play_new (gchar ** uris, const gchar * audio_sink, const gchar * video_sink, gboolean gapless, gdouble initial_volume, gboolean verbose, const gchar * flags_string) { GstElement *sink, *playbin; GstPlay *play; playbin = gst_element_factory_make ("playbin", "playbin"); if (playbin == NULL) return NULL; play = g_new0 (GstPlay, 1); play->uris = uris; play->num_uris = g_strv_length (uris); play->cur_idx = -1; play->playbin = playbin; if (audio_sink != NULL) { if (strchr (audio_sink, ' ') != NULL) sink = gst_parse_bin_from_description (audio_sink, TRUE, NULL); else sink = gst_element_factory_make (audio_sink, NULL); if (sink != NULL) g_object_set (play->playbin, "audio-sink", sink, NULL); else g_warning ("Couldn't create specified audio sink '%s'", audio_sink); } if (video_sink != NULL) { if (strchr (video_sink, ' ') != NULL) sink = gst_parse_bin_from_description (video_sink, TRUE, NULL); else sink = gst_element_factory_make (video_sink, NULL); if (sink != NULL) g_object_set (play->playbin, "video-sink", sink, NULL); else g_warning ("Couldn't create specified video sink '%s'", video_sink); } if (flags_string != NULL) { GParamSpec *pspec; GValue val = { 0, }; pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (playbin), "flags"); g_value_init (&val, pspec->value_type); if (gst_value_deserialize (&val, flags_string)) g_object_set_property (G_OBJECT (play->playbin), "flags", &val); else g_printerr ("Couldn't convert '%s' to playbin flags!\n", flags_string); g_value_unset (&val); } if (verbose) { play->deep_notify_id = g_signal_connect (play->playbin, "deep-notify", G_CALLBACK (gst_object_default_deep_notify), NULL); } play->loop = g_main_loop_new (NULL, FALSE); play->bus_watch = gst_bus_add_watch (GST_ELEMENT_BUS (play->playbin), play_bus_msg, play); /* FIXME: make configurable incl. 0 for disable */ play->timeout = g_timeout_add (100, play_timeout, play); play->missing = NULL; play->buffering = FALSE; play->is_live = FALSE; play->desired_state = GST_STATE_PLAYING; play->gapless = gapless; if (gapless) { g_signal_connect (play->playbin, "about-to-finish", G_CALLBACK (play_about_to_finish), play); } if (initial_volume != -1) play_set_relative_volume (play, initial_volume - 1.0); play->rate = 1.0; play->trick_mode = GST_PLAY_TRICK_MODE_NONE; return play; }
static void generate_test_sound (const gchar * fn, const gchar * launch_string, guint num_samples) { GstElement *pipeline, *src, *parse, *enc_bin, *sink; GstFlowReturn flow; GstMessage *msg; GstBuffer *buf; GstCaps *caps; pipeline = gst_pipeline_new (NULL); src = gst_element_factory_make ("appsrc", NULL); caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, GST_AUDIO_NE (S16), "rate", G_TYPE_INT, SAMPLE_FREQ, "channels", G_TYPE_INT, 2, "layout", G_TYPE_STRING, "interleaved", "channel-mask", GST_TYPE_BITMASK, (guint64) 3, NULL); g_object_set (src, "caps", caps, "format", GST_FORMAT_TIME, NULL); gst_base_src_set_format (GST_BASE_SRC (src), GST_FORMAT_TIME); gst_caps_unref (caps); /* audioparse to put proper timestamps on buffers for us, without which * vorbisenc in particular is unhappy (or oggmux, rather) */ parse = gst_element_factory_make ("audioparse", NULL); if (parse != NULL) { g_object_set (parse, "use-sink-caps", TRUE, NULL); } else { parse = gst_element_factory_make ("identity", NULL); g_warning ("audioparse element not available, vorbis/ogg might not work\n"); } enc_bin = gst_parse_bin_from_description (launch_string, TRUE, NULL); sink = gst_element_factory_make ("filesink", NULL); g_object_set (sink, "location", fn, NULL); gst_bin_add_many (GST_BIN (pipeline), src, parse, enc_bin, sink, NULL); gst_element_link_many (src, parse, enc_bin, sink, NULL); gst_element_set_state (pipeline, GST_STATE_PLAYING); buf = generate_test_data (num_samples); flow = gst_app_src_push_buffer (GST_APP_SRC (src), buf); g_assert (flow == GST_FLOW_OK); gst_app_src_end_of_stream (GST_APP_SRC (src)); /*g_print ("generating test sound %s, waiting for EOS..\n", fn); */ msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (pipeline), GST_CLOCK_TIME_NONE, GST_MESSAGE_EOS | GST_MESSAGE_ERROR); g_assert (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS); gst_message_unref (msg); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); /* g_print ("Done %s\n", fn); */ }