void AudioContext::constructCommon() { // According to spec AudioContext must die only after page navigate. // Lets mark it as ActiveDOMObject with pending activity and unmark it in clear method. setPendingActivity(this); #if USE(GSTREAMER) initializeGStreamer(); #endif FFTFrame::initialize(); m_listener = AudioListener::create(); #if PLATFORM(IOS) if (!document()->settings() || document()->settings()->mediaPlaybackRequiresUserGesture()) addBehaviorRestriction(RequireUserGestureForAudioStartRestriction); else m_restrictions = NoRestrictions; #endif #if PLATFORM(MAC) addBehaviorRestriction(RequirePageConsentForAudioStartRestriction); #endif }
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink() { ASSERT(initializeGStreamer()); GstElement* videoSink = nullptr; m_webkitVideoSink = webkitVideoSinkNew(); m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this); m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink"); if (m_fpsSink) { g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr); // Turn off text overlay unless logging is enabled. #if LOG_DISABLED g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); #else if (!isLogChannelEnabled("Media")) g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); #endif // LOG_DISABLED if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) { g_object_set(m_fpsSink.get(), "video-sink", m_webkitVideoSink.get(), nullptr); videoSink = m_fpsSink.get(); } else m_fpsSink = nullptr; } if (!m_fpsSink) videoSink = m_webkitVideoSink.get(); ASSERT(videoSink); return videoSink; }
void AudioContext::constructCommon() { // According to spec AudioContext must die only after page navigate. // Lets mark it as ActiveDOMObject with pending activity and unmark it in clear method. setPendingActivity(this); #if USE(GSTREAMER) initializeGStreamer(); #endif FFTFrame::initialize(); m_listener = AudioListener::create(); }
// This function creates and initializes some internal variables, and returns a // pointer to the element that should receive the data flow first GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink(GstElement* pipeline) { if (!initializeGStreamer()) return 0; #if USE(NATIVE_FULLSCREEN_VIDEO) m_gstGWorld = GStreamerGWorld::createGWorld(pipeline); m_webkitVideoSink = webkitVideoSinkNew(m_gstGWorld.get()); #else UNUSED_PARAM(pipeline); m_webkitVideoSink = webkitVideoSinkNew(); #endif m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this); #if USE(NATIVE_FULLSCREEN_VIDEO) // Build a new video sink consisting of a bin containing a tee // (meant to distribute data to multiple video sinks) and our // internal video sink. For fullscreen we create an autovideosink // and initially block the data flow towards it and configure it m_videoSinkBin = gst_bin_new("video-sink"); GstElement* videoTee = gst_element_factory_make("tee", "videoTee"); GstElement* queue = gst_element_factory_make("queue", 0); #ifdef GST_API_VERSION_1 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(videoTee, "sink")); GST_OBJECT_FLAG_SET(GST_OBJECT(sinkPad.get()), GST_PAD_FLAG_PROXY_ALLOCATION); #endif gst_bin_add_many(GST_BIN(m_videoSinkBin.get()), videoTee, queue, NULL); // Link a new src pad from tee to queue1. gst_element_link_pads_full(videoTee, 0, queue, "sink", GST_PAD_LINK_CHECK_NOTHING); #endif GstElement* actualVideoSink = 0; m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink"); if (m_fpsSink) { // The verbose property has been added in -bad 0.10.22. Making // this whole code depend on it because we don't want // fpsdiplaysink to spit data on stdout. GstElementFactory* factory = GST_ELEMENT_FACTORY(GST_ELEMENT_GET_CLASS(m_fpsSink)->elementfactory); if (gst_plugin_feature_check_version(GST_PLUGIN_FEATURE(factory), 0, 10, 22)) { g_object_set(m_fpsSink, "silent", TRUE , NULL); // Turn off text overlay unless logging is enabled. #if LOG_DISABLED g_object_set(m_fpsSink, "text-overlay", FALSE , NULL); #else WTFLogChannel* channel = getChannelFromName("Media"); if (channel->state != WTFLogChannelOn) g_object_set(m_fpsSink, "text-overlay", FALSE , NULL); #endif // LOG_DISABLED if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink), "video-sink")) { g_object_set(m_fpsSink, "video-sink", m_webkitVideoSink.get(), NULL); #if USE(NATIVE_FULLSCREEN_VIDEO) gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_fpsSink); #endif actualVideoSink = m_fpsSink; } else m_fpsSink = 0; } else m_fpsSink = 0; } if (!m_fpsSink) { #if USE(NATIVE_FULLSCREEN_VIDEO) gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_webkitVideoSink.get()); #endif actualVideoSink = m_webkitVideoSink.get(); } ASSERT(actualVideoSink); #if USE(NATIVE_FULLSCREEN_VIDEO) // Faster elements linking. gst_element_link_pads_full(queue, "src", actualVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING); // Add a ghostpad to the bin so it can proxy to tee. GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(videoTee, "sink")); gst_element_add_pad(m_videoSinkBin.get(), gst_ghost_pad_new("sink", pad.get())); // Set the bin as video sink of playbin. return m_videoSinkBin.get(); #else return actualVideoSink; #endif }