void GstPipelineWrapper::EnsureGstreamerInitialization() { if (!gst_is_initialized()) { // int argc = 0; // char** argv = NULL; int argc = 0; char **argv = new char *[2]; argv[argc++] = "--gst-version"; argv[argc++] = "--gst-debug-level=1"; gst_init(&argc, &argv); delete argv; // get and display GST version { guint major = 0; guint minor = 0; guint micro = 0; guint nano = 0; gst_version(&major,&minor,µ,&nano); std::cout << "GStreamer V" << major << "." << minor << "." << micro << "." << nano << std::endl; } } }
bool initializeGStreamer() { if (gst_is_initialized()) return true; #if ENABLE(SECCOMP_FILTERS) // The gst-plugin-scanner helper binary will receive SIGSYS and dump core // when it attempts to open a file. Disable it so that plugin scanning // occurs in-process. The disadvantage is that a plugin that crashes while // loading will now crash the web process. gst_registry_fork_set_enabled(FALSE); #endif GUniqueOutPtr<GError> error; // FIXME: We should probably pass the arguments from the command line. bool gstInitialized = gst_init_check(0, 0, &error.outPtr()); ASSERT_WITH_MESSAGE(gstInitialized, "GStreamer initialization failed: %s", error ? error->message : "unknown error occurred"); #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS) if (gstInitialized) gst_mpegts_initialize(); #endif return gstInitialized; }
Player::Backend::Backend(Player * player): pipeline_(), appsrc_(), conv_(), audiosink_(), loop_(), push_id_(), bus_watch_id_(), player_(player) { if(!gst_is_initialized()) { GError *err; if(!gst_init_check(nullptr,nullptr,&err)) { std::exit(err->code); } } pipeline_ = gst_pipeline_new ("pipeline"); if(pipeline_==nullptr) { std::exit(EXIT_FAILURE); }; build_gst_element(appsrc_,"appsrc","source"); build_gst_element(conv_,"audioconvert","conv"); GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline_)); bus_watch_id_ = gst_bus_add_watch (bus, wrap_bus_callback, this); gst_object_unref (bus); GstCaps * caps = gst_caps_new_simple( "audio/x-raw", "format", G_TYPE_STRING, format_, "rate", G_TYPE_INT, Config::sample_rate, "channels",G_TYPE_INT, Config::channels, "signed", G_TYPE_BOOLEAN, TRUE, "layout", G_TYPE_STRING, "interleaved", nullptr); g_object_set(G_OBJECT(appsrc_),"caps",caps,nullptr); g_object_set(G_OBJECT(appsrc_),"is-live",true,nullptr); g_object_set(G_OBJECT(appsrc_),"min-latency",0,nullptr); g_object_set(G_OBJECT(appsrc_),"emit-signals",false,nullptr); g_object_set(G_OBJECT(appsrc_),"format",GST_FORMAT_TIME,nullptr); //the gstreamer main loop is the main event loop for audio generation loop_ = g_main_loop_new (nullptr, FALSE); gst_bin_add_many (GST_BIN (pipeline_), appsrc_, conv_, nullptr); gst_element_link (appsrc_, conv_); GstAppSrcCallbacks callbacks = {wrap_need_data, wrap_enough_data, wrap_seek_data}; gst_app_src_set_callbacks(GST_APP_SRC(appsrc_), &callbacks, this, nullptr); }
bool initializeGStreamer() { #if GST_CHECK_VERSION(0, 10, 31) if (gst_is_initialized()) return true; #endif GOwnPtr<GError> error; // FIXME: We should probably pass the arguments from the command line. bool gstInitialized = gst_init_check(0, 0, &error.outPtr()); ASSERT_WITH_MESSAGE(gstInitialized, "GStreamer initialization failed: %s", error ? error->message : "unknown error occurred"); return gstInitialized; }
ITSMFDecoder* freerdp_tsmf_client_subsystem_entry(void) { TSMFGstreamerDecoder *decoder; if (!gst_is_initialized()) { gst_init(NULL, NULL); } decoder = calloc(1, sizeof(TSMFGstreamerDecoder)); if (!decoder) return NULL; decoder->iface.SetFormat = tsmf_gstreamer_set_format; decoder->iface.Decode = NULL; decoder->iface.GetDecodedData = NULL; decoder->iface.GetDecodedFormat = NULL; decoder->iface.GetDecodedDimension = NULL; decoder->iface.GetRunningTime = tsmf_gstreamer_get_running_time; decoder->iface.UpdateRenderingArea = tsmf_gstreamer_update_rendering_area; decoder->iface.Free = tsmf_gstreamer_free; decoder->iface.Control = tsmf_gstreamer_control; decoder->iface.DecodeEx = tsmf_gstreamer_decodeEx; decoder->iface.ChangeVolume = tsmf_gstreamer_change_volume; decoder->iface.BufferLevel = tsmf_gstreamer_buffer_level; decoder->iface.SetAckFunc = tsmf_gstreamer_ack; decoder->iface.SetSyncFunc = tsmf_gstreamer_sync; decoder->paused = FALSE; decoder->gstVolume = 0.5; decoder->gstMuted = FALSE; decoder->state = GST_STATE_VOID_PENDING; /* No real state yet */ decoder->last_sample_start_time = 0; decoder->last_sample_end_time = 0; decoder->seek_offset = 0; decoder->seeking = FALSE; if (tsmf_platform_create(decoder) < 0) { free(decoder); return NULL; } return (ITSMFDecoder*) decoder; }
/** * gst_element_factory_make: * @factoryname: a named factory to instantiate * @name: (allow-none): name of new element, or %NULL to automatically create * a unique name * * Create a new element of the type defined by the given element factory. * If name is %NULL, then the element will receive a guaranteed unique name, * consisting of the element factory name and a number. * If name is given, it will be given the name supplied. * * Returns: (transfer floating) (nullable): new #GstElement or %NULL * if unable to create element */ GstElement * gst_element_factory_make (const gchar * factoryname, const gchar * name) { GstElementFactory *factory; GstElement *element; g_return_val_if_fail (factoryname != NULL, NULL); g_return_val_if_fail (gst_is_initialized (), NULL); GST_LOG ("gstelementfactory: make \"%s\" \"%s\"", factoryname, GST_STR_NULL (name)); factory = gst_element_factory_find (factoryname); if (factory == NULL) goto no_factory; GST_LOG_OBJECT (factory, "found factory %p", factory); element = gst_element_factory_create (factory, name); if (element == NULL) goto create_failed; gst_object_unref (factory); return element; /* ERRORS */ no_factory: { GST_WARNING ("no such element factory \"%s\"!", factoryname); return NULL; } create_failed: { GST_INFO_OBJECT (factory, "couldn't create instance!"); gst_object_unref (factory); return NULL; } }
static void gt_player_backend_gstreamer_cairo_class_init(GtPlayerBackendGstreamerCairoClass* klass) { GObjectClass* obj_class = G_OBJECT_CLASS(klass); obj_class->finalize = finalise; obj_class->get_property = get_property; obj_class->set_property = set_property; props[PROP_VOLUME] = g_param_spec_double("volume", "Volume", "Volume of player", 0.0, 1.0, 0.3, G_PARAM_READWRITE | G_PARAM_CONSTRUCT); props[PROP_PLAYING] = g_param_spec_boolean("playing", "Playing", "Whether playing", FALSE, G_PARAM_READABLE | G_PARAM_CONSTRUCT); props[PROP_URI] = g_param_spec_string("uri", "Uri", "Current uri", "", G_PARAM_READWRITE); props[PROP_BUFFER_FILL] = g_param_spec_double("buffer-fill", "Buffer Fill", "Current buffer fill", 0, 1.0, 0, G_PARAM_READWRITE | G_PARAM_CONSTRUCT); g_object_class_override_property(obj_class, PROP_VOLUME, "volume"); g_object_class_override_property(obj_class, PROP_PLAYING, "playing"); g_object_class_override_property(obj_class, PROP_URI, "uri"); g_object_class_override_property(obj_class, PROP_BUFFER_FILL, "buffer-fill"); if (!gst_is_initialized()) gst_init(NULL, NULL); }
shmdata_any_reader_t * shmdata_any_reader_init (const char *socketName) { shmdata_any_reader_t *reader = (shmdata_any_reader_t *) g_malloc0 (sizeof (shmdata_any_reader_t)); reader->debug_ = SHMDATA_DISABLE_DEBUG; g_log_set_default_handler (shmdata_any_reader_log_handler, reader); reader->on_data_ = NULL; reader->on_data_user_data_ = NULL; reader->type_ = NULL; reader->data_caps_ = NULL; reader->do_absolute_ = FALSE; reader->run_gmainloop_ = TRUE; if (!gst_is_initialized()) gst_init (NULL, NULL); reader->loop_ = g_main_loop_new (NULL, FALSE); reader->pipeline_ = gst_pipeline_new (NULL); if (reader->pipeline_ == NULL) g_critical ("cannot create gstreamer pipeline"); gst_element_set_state (reader->pipeline_, GST_STATE_PLAYING); return reader; }
/** * gst_device_provider_factory_get_by_name: * @factoryname: a named factory to instantiate * * Returns the device provider of the type defined by the given device * provider factory. * * Returns: (transfer full) (nullable): a #GstDeviceProvider or %NULL * if unable to create device provider * * Since: 1.4 */ GstDeviceProvider * gst_device_provider_factory_get_by_name (const gchar * factoryname) { GstDeviceProviderFactory *factory; GstDeviceProvider *device_provider; g_return_val_if_fail (factoryname != NULL, NULL); g_return_val_if_fail (gst_is_initialized (), NULL); GST_LOG ("gstdeviceproviderfactory: get_by_name \"%s\"", factoryname); factory = gst_device_provider_factory_find (factoryname); if (factory == NULL) goto no_factory; GST_LOG_OBJECT (factory, "found factory %p", factory); device_provider = gst_device_provider_factory_get (factory); if (device_provider == NULL) goto create_failed; gst_object_unref (factory); return device_provider; /* ERRORS */ no_factory: { GST_INFO ("no such device provider factory \"%s\"!", factoryname); return NULL; } create_failed: { GST_INFO_OBJECT (factory, "couldn't create instance!"); gst_object_unref (factory); return NULL; } }
int main(int, char**) { gst_is_initialized(); return 0; }
void MediaPlayer::threadLoadMedia() { m_duration = -1; m_errorsDetected = false; // Initialize gstreamer if not initialized yet if ( !gst_is_initialized() ) { //qputenv( "GST_DEBUG", "*:4" ); #ifdef WIN32 QString env = QString("GST_PLUGIN_PATH=%1\\gstreamer\\") .arg( QApplication::applicationDirPath() ); env.replace( "/", "\\" ); _putenv( qPrintable(env) ); Logger::debug( "GstMediaPlayer: setting %s", qPrintable( env ) ); #endif gst_init(0, 0); } // Create the empty pipeline (this must be done first) m_gst_pipeline = gst_pipeline_new ("karaokepipeline"); if ( !m_gst_pipeline ) { reportError( "Pipeline could not be created." ); return; } // Create the pipeline bus m_gst_bus = gst_element_get_bus( m_gst_pipeline ); if ( !m_gst_bus ) { reportError( "Pipeline bus could not be created." ); return; } // Set the handler for the bus gst_bus_set_sync_handler( m_gst_bus, cb_busMessageDispatcher, this, 0 ); // Create our media source, which could be either QIODevice/appsrc or a file // this also creates a decoder setupSource(); // Those are mandatory if ( !m_gst_pipeline || !m_gst_source || !m_gst_decoder ) { reportError( "Not all elements could be created." ); return; } // Link and set up source and decoder if they are not the same object. if ( m_gst_source != m_gst_decoder ) { if ( !gst_element_link( m_gst_source, m_gst_decoder ) ) { reportError( "Cannot link source and decoder." ); return; } } // If we do not have raw data, connect to the pad-added signal g_signal_connect( m_gst_decoder, "pad-added", G_CALLBACK (cb_pad_added), this ); // Pre-create video elements if we need them if ( (m_loadOptions & MediaPlayer::LoadVideoStream) != 0 ) { m_gst_video_colorconv = createElement( "videoconvert", "videoconvert" ); m_gst_video_sink = createVideoSink(); if ( !m_gst_video_colorconv || !m_gst_video_sink ) { reportError( "Not all elements could be created." ); return; } // Link the color converter and video sink if ( !gst_element_link( m_gst_video_colorconv, m_gst_video_sink ) ) { reportError( "Cannor link video elements" ); return; } } // Pre-create audio elements if we need them if ( (m_loadOptions & MediaPlayer::LoadAudioStream) != 0 ) { // Load the pitch plugin if it is available m_pitchPlugin = pPluginManager->loadPitchChanger(); // Create the audio elements, and add them to the bin m_gst_audioconverter = createElement ("audioconvert", "convert"); m_gst_audio_volume = createElement("volume", "volume"); m_gst_audiosink = createElement ("autoaudiosink", "sink"); // Those are mandatory if ( !m_gst_audioconverter || !m_gst_audiosink || !m_gst_audio_volume ) { reportError( "Not all elements could be created." ); return; } // This one is optional, although it seems to be present everywhere m_gst_audio_tempo = createElement( "scaletempo", "tempo", false ); // If we have the pitch changer if ( m_pitchPlugin && m_pitchPlugin->init() ) m_gst_audio_pitchadjust = createElement( m_pitchPlugin->elementName(), "audiopitchchanger", false ); else m_gst_audio_pitchadjust = 0; // Start linking bool linksucceed = true; GstElement * last = m_gst_audioconverter; if ( m_gst_audio_pitchadjust ) { m_gst_audioconverter2 = createElement ("audioconvert", "convert2"); linksucceed = gst_element_link_many( m_gst_audioconverter, m_gst_audio_pitchadjust, m_gst_audioconverter2, NULL ); last = m_gst_audioconverter2; } // Now link in volume linksucceed = gst_element_link( last, m_gst_audio_volume ); last = m_gst_audio_volume; // Now link in tempo if it is available if ( linksucceed && m_gst_audio_tempo ) { linksucceed = gst_element_link( last, m_gst_audio_tempo ); last = m_gst_audio_tempo; } // And finally the audio sink if ( linksucceed ) linksucceed = gst_element_link( last, m_gst_audiosink ); if ( !linksucceed ) { reportError( "Audio elements could not be linked." ); return; } } setPipelineState( GST_STATE_PAUSED ); }
bool GSCam::init_stream() { if(!gst_is_initialized()) { // Initialize gstreamer pipeline ROS_DEBUG_STREAM( "Initializing gstreamer..." ); gst_init(0,0); } ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() ); GError *error = 0; // Assignment to zero is a gst requirement pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error); if (pipeline_ == NULL) { ROS_FATAL_STREAM( error->message ); return false; } // Create RGB sink sink_ = gst_element_factory_make("appsink",NULL); GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ? gst_caps_new_simple("video/x-raw-rgb", NULL) : gst_caps_new_simple("video/x-raw-gray", NULL); gst_app_sink_set_caps(GST_APP_SINK(sink_), caps); gst_caps_unref(caps); // Set whether the sink should sync // Sometimes setting this to true can cause a large number of frames to be // dropped gst_base_sink_set_sync( GST_BASE_SINK(sink_), (sync_sink_) ? TRUE : FALSE); if(GST_IS_PIPELINE(pipeline_)) { GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC); g_assert(outpad); GstElement *outelement = gst_pad_get_parent_element(outpad); g_assert(outelement); gst_object_unref(outpad); if(!gst_bin_add(GST_BIN(pipeline_), sink_)) { ROS_FATAL("gst_bin_add() failed"); gst_object_unref(outelement); gst_object_unref(pipeline_); return false; } if(!gst_element_link(outelement, sink_)) { ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement)); gst_object_unref(outelement); gst_object_unref(pipeline_); return false; } gst_object_unref(outelement); } else { GstElement* launchpipe = pipeline_; pipeline_ = gst_pipeline_new(NULL); g_assert(pipeline_); gst_object_unparent(GST_OBJECT(launchpipe)); gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL); if(!gst_element_link(launchpipe, sink_)) { ROS_FATAL("GStreamer: cannot link launchpipe -> sink"); gst_object_unref(pipeline_); return false; } } gst_element_set_state(pipeline_, GST_STATE_PAUSED); if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) { ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration."); return false; } else { ROS_DEBUG_STREAM("Stream is PAUSED."); } // Create ROS camera interface camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1); return true; }
static void gt_player_backend_gstreamer_cairo_class_finalize(GtPlayerBackendGstreamerCairoClass* klass) { if (gst_is_initialized()) gst_deinit(); }