bool decode(ArgumentDecoder* decoder, GRefPtr<GtkPageSetup>& pageSetup) { GOwnPtr<GKeyFile> keyFile; if (!decodeGKeyFile(decoder, keyFile)) return false; pageSetup = adoptGRef(gtk_page_setup_new()); if (!keyFile) return true; if (!gtk_page_setup_load_key_file(pageSetup.get(), keyFile.get(), "Page Setup", 0)) pageSetup = 0; return pageSetup; }
gboolean AudioFileReader::handleMessage(GstMessage* message) { GOwnPtr<GError> error; GOwnPtr<gchar> debug; switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_EOS: g_main_loop_quit(m_loop.get()); break; case GST_MESSAGE_WARNING: gst_message_parse_warning(message, &error.outPtr(), &debug.outPtr()); g_warning("Warning: %d, %s. Debug output: %s", error->code, error->message, debug.get()); break; case GST_MESSAGE_ERROR: gst_message_parse_error(message, &error.outPtr(), &debug.outPtr()); g_warning("Error: %d, %s. Debug output: %s", error->code, error->message, debug.get()); m_errorOccurred = true; g_main_loop_quit(m_loop.get()); break; default: break; } return TRUE; }
bool decode(ArgumentDecoder* decoder, GRefPtr<GtkPrintSettings>& printSettings) { GOwnPtr<GKeyFile> keyFile; if (!decodeGKeyFile(decoder, keyFile)) return false; printSettings = adoptGRef(gtk_print_settings_new()); if (!keyFile) return true; if (!gtk_print_settings_load_key_file(printSettings.get(), keyFile.get(), "Print Settings", 0)) printSettings = 0; return printSettings; }
bool RenderThemeGtk::paintCapsLockIndicator(RenderObject* renderObject, const PaintInfo& paintInfo, const IntRect& rect) { // The other paint methods don't need to check whether painting is disabled because RenderTheme already checks it // before calling them, but paintCapsLockIndicator() is called by RenderTextControlSingleLine which doesn't check it. if (paintInfo.context->paintingDisabled()) return true; int iconSize = std::min(rect.width(), rect.height()); GRefPtr<GdkPixbuf> icon = getStockIcon(GTK_TYPE_ENTRY, GTK_STOCK_CAPS_LOCK_WARNING, gtkTextDirection(renderObject->style()->direction()), 0, getIconSizeForPixelSize(iconSize)); // Only re-scale the icon when it's smaller than the minimum icon size. if (iconSize >= gtkIconSizeMenu) iconSize = gdk_pixbuf_get_height(icon.get()); // GTK+ locates the icon right aligned in the entry. The given rectangle is already // centered vertically by RenderTextControlSingleLine. IntRect iconRect(rect.x() + rect.width() - iconSize, rect.y() + (rect.height() - iconSize) / 2, iconSize, iconSize); paintGdkPixbuf(paintInfo.context, icon.get(), iconRect); return true; }
static void testWebKitWebViewProcessCrashed(WebViewTest* test, gconstpointer) { test->loadHtml("<html></html>", 0); test->waitUntilLoadFinished(); g_signal_connect_after(test->m_webView, "web-process-crashed", G_CALLBACK(webProcessCrashedCallback), test); test->m_expectedWebProcessCrash = true; GUniquePtr<char> extensionBusName(g_strdup_printf("org.webkit.gtk.WebExtensionTest%u", Test::s_webExtensionID)); GRefPtr<GDBusProxy> proxy = adoptGRef(bus->createProxy(extensionBusName.get(), "/org/webkit/gtk/WebExtensionTest", "org.webkit.gtk.WebExtensionTest", test->m_mainLoop)); GRefPtr<GVariant> result = adoptGRef(g_dbus_proxy_call_sync( proxy.get(), "AbortProcess", 0, G_DBUS_CALL_FLAGS_NONE, -1, 0, 0)); g_assert(!result); g_main_loop_run(test->m_mainLoop); test->m_expectedWebProcessCrash = false; }
static void didSendRequestForResource(WKPageRef, WKFrameRef, uint64_t resourceIdentifier, WKURLRequestRef wkRequest, WKURLResponseRef wkRedirectResponse, const void* clientInfo) { GRefPtr<WebKitWebResource> resource = webkitWebViewGetLoadingWebResource(WEBKIT_WEB_VIEW(clientInfo), resourceIdentifier); if (!resource) return; GRefPtr<WebKitURIRequest> request = adoptGRef(webkitURIRequestCreateForResourceRequest(toImpl(wkRequest)->resourceRequest())); GRefPtr<WebKitURIResponse> redirectResponse = wkRedirectResponse ? adoptGRef(webkitURIResponseCreateForResourceResponse(toImpl(wkRedirectResponse)->resourceResponse())) : 0; webkitWebResourceSentRequest(resource.get(), request.get(), redirectResponse.get()); }
bool WebInspectorServer::platformResourceForPath(const String& path, Vector<char>& data, String& contentType) { // The page list contains an unformated list of pages that can be inspected with a link to open a session. if (path == "/pagelist.json") { buildPageList(data, contentType); return true; } // Point the default path to a formatted page that queries the page list and display them. CString localPath = WebCore::fileSystemRepresentation(inspectorServerFilesPath() + ((path == "/") ? "/inspectorPageIndex.html" : path)); if (localPath.isNull()) return false; GRefPtr<GFile> file = adoptGRef(g_file_new_for_path(localPath.data())); GOwnPtr<GError> error; GRefPtr<GFileInfo> fileInfo = adoptGRef(g_file_query_info(file.get(), G_FILE_ATTRIBUTE_STANDARD_SIZE "," G_FILE_ATTRIBUTE_STANDARD_FAST_CONTENT_TYPE, G_FILE_QUERY_INFO_NONE, 0, &error.outPtr())); if (!fileInfo) { StringBuilder builder; builder.appendLiteral("<!DOCTYPE html><html><head></head><body>Error: "); builder.appendNumber(error->code); builder.appendLiteral(", "); builder.append(error->message); builder.appendLiteral(" occurred during fetching webinspector resource files.<br>Make sure you ran make install or have set WEBKIT_INSPECTOR_SERVER_PATH in your environment to point to webinspector folder.</body></html>"); CString cstr = builder.toString().utf8(); data.append(cstr.data(), cstr.length()); contentType = "text/html; charset=utf-8"; g_warning("Error fetching webinspector resource files: %d, %s", error->code, error->message); return true; } GRefPtr<GFileInputStream> inputStream = adoptGRef(g_file_read(file.get(), 0, 0)); if (!inputStream) return false; data.grow(g_file_info_get_size(fileInfo.get())); if (!g_input_stream_read_all(G_INPUT_STREAM(inputStream.get()), data.data(), data.size(), 0, 0, 0)) return false; contentType = GOwnPtr<gchar>(g_file_info_get_attribute_as_string(fileInfo.get(), G_FILE_ATTRIBUTE_STANDARD_FAST_CONTENT_TYPE)).get(); return true; }
void AudioFileReader::decodeAudioForBusCreation() { // Build the pipeline (giostreamsrc | filesrc) ! decodebin2 // A deinterleave element is added once a src pad becomes available in decodebin. m_pipeline = gst_pipeline_new(0); GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline))); ASSERT(bus); gst_bus_add_signal_watch(bus.get()); g_signal_connect(bus.get(), "message", G_CALLBACK(messageCallback), this); GstElement* source; if (m_data) { ASSERT(m_dataSize); source = gst_element_factory_make("giostreamsrc", 0); GRefPtr<GInputStream> memoryStream = adoptGRef(g_memory_input_stream_new_from_data(m_data, m_dataSize, 0)); g_object_set(source, "stream", memoryStream.get(), NULL); } else { source = gst_element_factory_make("filesrc", 0); g_object_set(source, "location", m_filePath, NULL); } m_decodebin = gst_element_factory_make("decodebin", "decodebin"); g_signal_connect(m_decodebin.get(), "pad-added", G_CALLBACK(onGStreamerDecodebinPadAddedCallback), this); gst_bin_add_many(GST_BIN(m_pipeline), source, m_decodebin.get(), NULL); gst_element_link_pads_full(source, "src", m_decodebin.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); // Catch errors here immediately, there might not be an error message if // we're unlucky. if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { g_warning("Error: Failed to set pipeline to PAUSED"); m_errorOccurred = true; g_main_loop_quit(m_loop.get()); } }
void WebProcess::platformInitializeWebProcess(WebProcessCreationParameters&& parameters) { #if ENABLE(SECCOMP_FILTERS) { #if PLATFORM(EFL) SeccompFiltersWebProcessEfl seccompFilters(parameters); #elif PLATFORM(GTK) SeccompFiltersWebProcessGtk seccompFilters(parameters); #endif seccompFilters.initialize(); } #endif if (usesNetworkProcess()) return; ASSERT(!parameters.diskCacheDirectory.isEmpty()); // We used to use the given cache directory for the soup cache, but now we use a subdirectory to avoid // conflicts with other cache files in the same directory. Remove the old cache files if they still exist. WebCore::SoupNetworkSession::defaultSession().clearCache(WebCore::directoryName(parameters.diskCacheDirectory)); #if ENABLE(NETWORK_CACHE) // When network cache is enabled, the disk cache directory is the network process one. CString diskCachePath = WebCore::pathByAppendingComponent(WebCore::directoryName(parameters.diskCacheDirectory), "webkit").utf8(); #else CString diskCachePath = parameters.diskCacheDirectory.utf8(); #endif GRefPtr<SoupCache> soupCache = adoptGRef(soup_cache_new(diskCachePath.data(), SOUP_CACHE_SINGLE_USER)); WebCore::SoupNetworkSession::defaultSession().setCache(soupCache.get()); // Set an initial huge max_size for the SoupCache so the call to soup_cache_load() won't evict any cached // resource. The final size of the cache will be set by NetworkProcess::platformSetCacheModel(). unsigned initialMaxSize = soup_cache_get_max_size(soupCache.get()); soup_cache_set_max_size(soupCache.get(), G_MAXUINT); soup_cache_load(soupCache.get()); soup_cache_set_max_size(soupCache.get(), initialMaxSize); if (!parameters.cookiePersistentStoragePath.isEmpty()) { supplement<WebCookieManager>()->setCookiePersistentStorage(parameters.cookiePersistentStoragePath, parameters.cookiePersistentStorageType); } supplement<WebCookieManager>()->setHTTPCookieAcceptPolicy(parameters.cookieAcceptPolicy); if (!parameters.languages.isEmpty()) setSoupSessionAcceptLanguage(parameters.languages); setIgnoreTLSErrors(parameters.ignoreTLSErrors); WebCore::addLanguageChangeObserver(this, languageChanged); }
AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback, float sampleRate) : m_callback(callback) , m_renderBus(AudioBus::create(2, framesToPull, false)) , m_sampleRate(sampleRate) , m_isPlaying(false) { m_pipeline = gst_pipeline_new("play"); GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline))); ASSERT(bus); gst_bus_add_signal_watch(bus.get()); g_signal_connect(bus.get(), "message", G_CALLBACK(messageCallback), this); GstElement* webkitAudioSrc = reinterpret_cast<GstElement*>(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC, "rate", sampleRate, "bus", m_renderBus.get(), "provider", &m_callback, "frames", framesToPull, NULL)); GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0); m_audioSinkAvailable = audioSink; if (!audioSink) { LOG_ERROR("Failed to create GStreamer autoaudiosink element"); return; } // Autoaudiosink does the real sink detection in the GST_STATE_NULL->READY transition // so it's best to roll it to READY as soon as possible to ensure the underlying platform // audiosink was loaded correctly. GstStateChangeReturn stateChangeReturn = gst_element_set_state(audioSink.get(), GST_STATE_READY); if (stateChangeReturn == GST_STATE_CHANGE_FAILURE) { LOG_ERROR("Failed to change autoaudiosink element state"); gst_element_set_state(audioSink.get(), GST_STATE_NULL); m_audioSinkAvailable = false; return; } GstElement* audioConvert = gst_element_factory_make("audioconvert", 0); GstElement* audioResample = gst_element_factory_make("audioresample", 0); gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), NULL); // Link src pads from webkitAudioSrc to audioConvert ! audioResample ! autoaudiosink. gst_element_link_pads_full(webkitAudioSrc, "src", audioConvert, "sink", GST_PAD_LINK_CHECK_NOTHING); gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING); gst_element_link_pads_full(audioResample, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); }
PassOwnPtr<AudioBus> AudioFileReader::createBus(float sampleRate, bool mixToMono) { m_sampleRate = sampleRate; m_frontLeftBuffers = gst_buffer_list_new(); m_frontRightBuffers = gst_buffer_list_new(); #ifndef GST_API_VERSION_1 m_frontLeftBuffersIterator = gst_buffer_list_iterate(m_frontLeftBuffers); gst_buffer_list_iterator_add_group(m_frontLeftBuffersIterator); m_frontRightBuffersIterator = gst_buffer_list_iterate(m_frontRightBuffers); gst_buffer_list_iterator_add_group(m_frontRightBuffersIterator); #endif GRefPtr<GMainContext> context = adoptGRef(g_main_context_new()); g_main_context_push_thread_default(context.get()); m_loop = adoptGRef(g_main_loop_new(context.get(), FALSE)); // Start the pipeline processing just after the loop is started. GRefPtr<GSource> timeoutSource = adoptGRef(g_timeout_source_new(0)); g_source_attach(timeoutSource.get(), context.get()); g_source_set_callback(timeoutSource.get(), reinterpret_cast<GSourceFunc>(enteredMainLoopCallback), this, 0); g_main_loop_run(m_loop.get()); g_main_context_pop_thread_default(context.get()); if (m_errorOccurred) return nullptr; unsigned channels = mixToMono ? 1 : 2; OwnPtr<AudioBus> audioBus = adoptPtr(new AudioBus(channels, m_channelSize, true)); audioBus->setSampleRate(m_sampleRate); copyGstreamerBuffersToAudioChannel(m_frontLeftBuffers, audioBus->channel(0)); if (!mixToMono) copyGstreamerBuffersToAudioChannel(m_frontRightBuffers, audioBus->channel(1)); return audioBus.release(); }
void GSourceWrap::OneShot::construct(const char* name, std::function<void ()>&& function, std::chrono::microseconds delay, int priority, GMainContext* context) { GRefPtr<GSource> source = adoptGRef(g_source_new(&sourceFunctions, sizeof(GSource))); g_source_set_name(source.get(), name); g_source_set_priority(source.get(), priority); g_source_set_callback(source.get(), static_cast<GSourceFunc>(staticOneShotCallback), new CallbackContext{ WTF::move(function), nullptr }, static_cast<GDestroyNotify>(destroyCallbackContext<CallbackContext>)); g_source_set_ready_time(source.get(), targetTimeForDelay(delay)); if (!context) context = g_main_context_get_thread_default(); g_source_attach(source.get(), context); }
static void webKitWebAudioSrcConstructed(GObject* object) { WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object); WebKitWebAudioSourcePrivate* priv = src->priv; ASSERT(priv->bus); ASSERT(priv->provider); ASSERT(priv->sampleRate); priv->interleave = gst_element_factory_make("interleave", nullptr); if (!priv->interleave) { GST_ERROR_OBJECT(src, "Failed to create interleave"); return; } gst_bin_add(GST_BIN(src), priv->interleave.get()); // For each channel of the bus create a new upstream branch for interleave, like: // appsrc ! . which is plugged to a new interleave request sinkpad. for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) { GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex)); GstElement* appsrc = gst_element_factory_make("appsrc", appsrcName.get()); GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate)); GstAudioInfo info; gst_audio_info_from_caps(&info, monoCaps.get()); GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex); GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info)); // Configure the appsrc for minimal latency. g_object_set(appsrc, "max-bytes", 2 * priv->bufferSize, "block", TRUE, "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr); priv->sources = g_slist_prepend(priv->sources, gst_object_ref(appsrc)); gst_bin_add(GST_BIN(src), appsrc); gst_element_link_pads_full(appsrc, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING); } priv->sources = g_slist_reverse(priv->sources); // interleave's src pad is the only visible pad of our element. GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src")); gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get()); }
void didReceiveResponse(ResourceHandle*, const ResourceResponse& response) { m_response = adoptGRef(response.toSoupMessage()); m_download->didReceiveResponse(response); if (response.httpStatusCode() >= 400) { downloadFailed(platformDownloadNetworkError(response.httpStatusCode(), response.url().string(), response.httpStatusText())); return; } String suggestedFilename = response.suggestedFilename(); if (suggestedFilename.isEmpty()) { URL url = response.url(); url.setQuery(String()); url.removeFragmentIdentifier(); suggestedFilename = decodeURLEscapeSequences(url.lastPathComponent()); } bool overwrite; String destinationURI = m_download->decideDestinationWithSuggestedFilename(suggestedFilename, overwrite); if (destinationURI.isEmpty()) { #if PLATFORM(GTK) GOwnPtr<char> buffer(g_strdup_printf(_("Cannot determine destination URI for download with suggested filename %s"), suggestedFilename.utf8().data())); String errorMessage = String::fromUTF8(buffer.get()); #else String errorMessage = makeString("Cannot determine destination URI for download with suggested filename ", suggestedFilename); #endif downloadFailed(platformDownloadDestinationError(response, errorMessage)); return; } GRefPtr<GFile> file = adoptGRef(g_file_new_for_uri(destinationURI.utf8().data())); GOwnPtr<GError> error; m_outputStream = adoptGRef(g_file_replace(file.get(), 0, TRUE, G_FILE_CREATE_NONE, 0, &error.outPtr())); if (!m_outputStream) { downloadFailed(platformDownloadDestinationError(response, error->message)); return; } GRefPtr<GFileInfo> info = adoptGRef(g_file_info_new()); g_file_info_set_attribute_string(info.get(), "metadata::download-uri", response.url().string().utf8().data()); g_file_set_attributes_async(file.get(), info.get(), G_FILE_QUERY_INFO_NONE, G_PRIORITY_DEFAULT, 0, 0, 0); m_download->didCreateDestination(destinationURI); }
void didFinishLoading(ResourceHandle*, double) { m_outputStream = 0; ASSERT(m_intermediateFile); GRefPtr<GFile> destinationFile = adoptGRef(g_file_new_for_uri(m_destinationURI.utf8().data())); GOwnPtr<GError> error; if (!g_file_move(m_intermediateFile.get(), destinationFile.get(), G_FILE_COPY_NONE, nullptr, nullptr, nullptr, &error.outPtr())) { downloadFailed(platformDownloadDestinationError(m_response, error->message)); return; } GRefPtr<GFileInfo> info = adoptGRef(g_file_info_new()); CString uri = m_response.url().string().utf8(); g_file_info_set_attribute_string(info.get(), "metadata::download-uri", uri.data()); g_file_info_set_attribute_string(info.get(), "xattr::xdg.origin.url", uri.data()); g_file_set_attributes_async(destinationFile.get(), info.get(), G_FILE_QUERY_INFO_NONE, G_PRIORITY_DEFAULT, nullptr, nullptr, nullptr); m_download->didFinish(); }
static gboolean webkit_download_open_stream_for_uri(WebKitDownload* download, const gchar* uri, gboolean append=FALSE) { g_return_val_if_fail(uri, FALSE); WebKitDownloadPrivate* priv = download->priv; GRefPtr<GFile> file = adoptGRef(g_file_new_for_uri(uri)); GOwnPtr<GError> error; if (append) priv->outputStream = g_file_append_to(file.get(), G_FILE_CREATE_NONE, NULL, &error.outPtr()); else priv->outputStream = g_file_replace(file.get(), NULL, TRUE, G_FILE_CREATE_NONE, NULL, &error.outPtr()); if (error) { webkitDownloadEmitError(download, downloadDestinationError(core(priv->networkResponse), error->message)); return FALSE; } GRefPtr<GFileInfo> info = adoptGRef(g_file_info_new()); g_file_info_set_attribute_string(info.get(), "metadata::download-uri", webkit_download_get_uri(download)); g_file_set_attributes_async(file.get(), info.get(), G_FILE_QUERY_INFO_NONE, G_PRIORITY_DEFAULT, 0, 0, 0); return TRUE; }
std::unique_ptr<SoupNetworkSession> SoupNetworkSession::createTestingSession() { GRefPtr<SoupCookieJar> cookieJar = adoptGRef(createPrivateBrowsingCookieJar()); return std::unique_ptr<SoupNetworkSession>(new SoupNetworkSession(cookieJar.get())); }
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) { g_mutex_lock(m_bufferMutex); if (!m_buffer) { g_mutex_unlock(m_bufferMutex); return 0; } const void* srcData = 0; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(m_buffer); #endif if (!caps) { g_mutex_unlock(m_bufferMutex); return 0; } IntSize size; GstVideoFormat format; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) { g_mutex_unlock(m_bufferMutex); return 0; } RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { g_mutex_unlock(m_bufferMutex); return texture; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(m_buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(m_buffer); #endif texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(m_buffer, &srcInfo); #endif g_mutex_unlock(m_bufferMutex); return texture; }
static GRefPtr<GdkCursor> createCustomCursor(Image* image, const IntPoint& hotSpot) { IntPoint effectiveHotSpot = determineHotSpot(image, hotSpot); GRefPtr<GdkPixbuf> pixbuf = adoptGRef(image->getGdkPixbuf()); return adoptGRef(gdk_cursor_new_from_pixbuf(gdk_display_get_default(), pixbuf.get(), effectiveHotSpot.x(), effectiveHotSpot.y())); }
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer) { WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink); WebKitVideoSinkPrivate* priv = sink->priv; g_mutex_lock(priv->bufferMutex); if (priv->unlocked) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; } #if USE(NATIVE_FULLSCREEN_VIDEO) // Ignore buffers if the video is already in fullscreen using // another sink. if (priv->gstGWorld->isFullscreen()) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; } #endif priv->buffer = gst_buffer_ref(buffer); #ifndef GST_API_VERSION_1 // For the unlikely case where the buffer has no caps, the caps // are implicitely the caps of the pad. This shouldn't happen. if (UNLIKELY(!GST_BUFFER_CAPS(buffer))) { buffer = priv->buffer = gst_buffer_make_metadata_writable(priv->buffer); gst_buffer_set_caps(priv->buffer, GST_PAD_CAPS(GST_BASE_SINK_PAD(baseSink))); } GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer); #else GRefPtr<GstCaps> caps; // The video info structure is valid only if the sink handled an allocation query. if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN) caps = adoptGRef(gst_video_info_to_caps(&priv->info)); else caps = priv->currentCaps; #endif GstVideoFormat format; WebCore::IntSize size; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) { gst_buffer_unref(buffer); g_mutex_unlock(priv->bufferMutex); return GST_FLOW_ERROR; } // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't. // Here we convert to Cairo's ARGB. if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) { // Because GstBaseSink::render() only owns the buffer reference in the // method scope we can't use gst_buffer_make_writable() here. Also // The buffer content should not be changed here because the same buffer // could be passed multiple times to this method (in theory). GstBuffer* newBuffer = createGstBuffer(buffer); // Check if allocation failed. if (UNLIKELY(!newBuffer)) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_ERROR; } // We don't use Color::premultipliedARGBFromColor() here because // one function call per video pixel is just too expensive: // For 720p/PAL for example this means 1280*720*25=23040000 // function calls per second! #ifndef GST_API_VERSION_1 const guint8* source = GST_BUFFER_DATA(buffer); guint8* destination = GST_BUFFER_DATA(newBuffer); #else GstMapInfo sourceInfo; GstMapInfo destinationInfo; gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ); const guint8* source = const_cast<guint8*>(sourceInfo.data); gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE); guint8* destination = static_cast<guint8*>(destinationInfo.data); #endif for (int x = 0; x < size.height(); x++) { for (int y = 0; y < size.width(); y++) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN unsigned short alpha = source[3]; destination[0] = (source[0] * alpha + 128) / 255; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = alpha; #else unsigned short alpha = source[0]; destination[0] = alpha; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = (source[3] * alpha + 128) / 255; #endif source += 4; destination += 4; } } #ifdef GST_API_VERSION_1 gst_buffer_unmap(buffer, &sourceInfo); gst_buffer_unmap(newBuffer, &destinationInfo); #endif gst_buffer_unref(buffer); buffer = priv->buffer = newBuffer; } // This should likely use a lower priority, but glib currently starves // lower priority sources. // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830. priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback, gst_object_ref(sink), reinterpret_cast<GDestroyNotify>(gst_object_unref)); g_cond_wait(priv->dataCondition, priv->bufferMutex); g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; }
static void webkitCookieManagerGetAcceptPolicyCallback(WKHTTPCookieAcceptPolicy policy, WKErrorRef, void* context) { GRefPtr<GTask> task = adoptGRef(G_TASK(context)); g_task_return_int(task.get(), policy); }
// This function creates and initializes some internal variables, and returns a // pointer to the element that should receive the data flow first GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink(GstElement* pipeline) { if (!initializeGStreamer()) return 0; #if USE(NATIVE_FULLSCREEN_VIDEO) m_gstGWorld = GStreamerGWorld::createGWorld(pipeline); m_webkitVideoSink = webkitVideoSinkNew(m_gstGWorld.get()); #else UNUSED_PARAM(pipeline); m_webkitVideoSink = webkitVideoSinkNew(); #endif m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this); #if USE(NATIVE_FULLSCREEN_VIDEO) // Build a new video sink consisting of a bin containing a tee // (meant to distribute data to multiple video sinks) and our // internal video sink. For fullscreen we create an autovideosink // and initially block the data flow towards it and configure it m_videoSinkBin = gst_bin_new("video-sink"); GstElement* videoTee = gst_element_factory_make("tee", "videoTee"); GstElement* queue = gst_element_factory_make("queue", 0); #ifdef GST_API_VERSION_1 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(videoTee, "sink")); GST_OBJECT_FLAG_SET(GST_OBJECT(sinkPad.get()), GST_PAD_FLAG_PROXY_ALLOCATION); #endif gst_bin_add_many(GST_BIN(m_videoSinkBin.get()), videoTee, queue, NULL); // Link a new src pad from tee to queue1. gst_element_link_pads_full(videoTee, 0, queue, "sink", GST_PAD_LINK_CHECK_NOTHING); #endif GstElement* actualVideoSink = 0; m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink"); if (m_fpsSink) { // The verbose property has been added in -bad 0.10.22. Making // this whole code depend on it because we don't want // fpsdiplaysink to spit data on stdout. GstElementFactory* factory = GST_ELEMENT_FACTORY(GST_ELEMENT_GET_CLASS(m_fpsSink)->elementfactory); if (gst_plugin_feature_check_version(GST_PLUGIN_FEATURE(factory), 0, 10, 22)) { g_object_set(m_fpsSink, "silent", TRUE , NULL); // Turn off text overlay unless logging is enabled. #if LOG_DISABLED g_object_set(m_fpsSink, "text-overlay", FALSE , NULL); #else WTFLogChannel* channel = getChannelFromName("Media"); if (channel->state != WTFLogChannelOn) g_object_set(m_fpsSink, "text-overlay", FALSE , NULL); #endif // LOG_DISABLED if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink), "video-sink")) { g_object_set(m_fpsSink, "video-sink", m_webkitVideoSink.get(), NULL); #if USE(NATIVE_FULLSCREEN_VIDEO) gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_fpsSink); #endif actualVideoSink = m_fpsSink; } else m_fpsSink = 0; } else m_fpsSink = 0; } if (!m_fpsSink) { #if USE(NATIVE_FULLSCREEN_VIDEO) gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_webkitVideoSink.get()); #endif actualVideoSink = m_webkitVideoSink.get(); } ASSERT(actualVideoSink); #if USE(NATIVE_FULLSCREEN_VIDEO) // Faster elements linking. gst_element_link_pads_full(queue, "src", actualVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING); // Add a ghostpad to the bin so it can proxy to tee. GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(videoTee, "sink")); gst_element_add_pad(m_videoSinkBin.get(), gst_ghost_pad_new("sink", pad.get())); // Set the bin as video sink of playbin. return m_videoSinkBin.get(); #else return actualVideoSink; #endif }
// Returns the size of the video IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const { if (!hasVideo()) return IntSize(); if (!m_videoSize.isEmpty()) return m_videoSize; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else g_mutex_lock(m_bufferMutex); GRefPtr<GstCaps> caps = m_buffer ? GST_BUFFER_CAPS(m_buffer) : 0; g_mutex_unlock(m_bufferMutex); #endif if (!caps) return IntSize(); // TODO: handle possible clean aperture data. See // https://bugzilla.gnome.org/show_bug.cgi?id=596571 // TODO: handle possible transformation matrix. See // https://bugzilla.gnome.org/show_bug.cgi?id=596326 // Get the video PAR and original size, if this fails the // video-sink has likely not yet negotiated its caps. int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; IntSize originalSize; GstVideoFormat format; if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) return IntSize(); LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height()); LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); // Calculate DAR based on PAR and video size. int displayWidth = originalSize.width() * pixelAspectRatioNumerator; int displayHeight = originalSize.height() * pixelAspectRatioDenominator; // Divide display width and height by their GCD to avoid possible overflows. int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight); displayWidth /= displayAspectRatioGCD; displayHeight /= displayAspectRatioGCD; // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function. guint64 width = 0, height = 0; if (!(originalSize.height() % displayHeight)) { LOG_MEDIA_MESSAGE("Keeping video original height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } else if (!(originalSize.width() % displayWidth)) { LOG_MEDIA_MESSAGE("Keeping video original width"); height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth); width = static_cast<guint64>(originalSize.width()); } else { LOG_MEDIA_MESSAGE("Approximating while keeping original video height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height)); return m_videoSize; }
virtual bool runOpenPanel(WebPageProxy*, WebFrameProxy*, WebOpenPanelParameters* parameters, WebOpenPanelResultListenerProxy* listener) override { GRefPtr<WebKitFileChooserRequest> request = adoptGRef(webkitFileChooserRequestCreate(parameters, listener)); webkitWebViewRunFileChooserRequest(m_webView, request.get()); return true; }
void AudioSourceProviderGStreamer::handleRemovedDeinterleavePad(GstPad* pad) { m_deinterleaveSourcePads--; // Remove the queue ! appsink chain downstream of deinterleave. GQuark quark = g_quark_from_static_string("peer"); GstPad* sinkPad = reinterpret_cast<GstPad*>(g_object_get_qdata(G_OBJECT(pad), quark)); GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(sinkPad)); GRefPtr<GstPad> queueSrcPad = adoptGRef(gst_element_get_static_pad(queue.get(), "src")); GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_pad_get_peer(queueSrcPad.get())); GRefPtr<GstElement> sink = adoptGRef(gst_pad_get_parent_element(appsinkSinkPad.get())); gst_element_set_state(sink.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); gst_element_unlink(queue.get(), sink.get()); gst_bin_remove_many(GST_BIN(m_audioSinkBin.get()), queue.get(), sink.get(), nullptr); }
virtual bool decidePolicyForGeolocationPermissionRequest(WebPageProxy*, WebFrameProxy*, WebSecurityOrigin*, GeolocationPermissionRequestProxy* permissionRequest) override { GRefPtr<WebKitGeolocationPermissionRequest> geolocationPermissionRequest = adoptGRef(webkitGeolocationPermissionRequestCreate(permissionRequest)); webkitWebViewMakePermissionRequest(m_webView, WEBKIT_PERMISSION_REQUEST(geolocationPermissionRequest.get())); return true; }
static void testWebKitSettingsUserAgent(WebViewTest* test, gconstpointer) { GRefPtr<WebKitSettings> settings = adoptGRef(webkit_settings_new()); CString defaultUserAgent = webkit_settings_get_user_agent(settings.get()); webkit_web_view_set_settings(test->m_webView, settings.get()); g_assert(g_strstr_len(defaultUserAgent.data(), -1, "AppleWebKit")); g_assert(g_strstr_len(defaultUserAgent.data(), -1, "Safari")); webkit_settings_set_user_agent(settings.get(), 0); g_assert_cmpstr(defaultUserAgent.data(), ==, webkit_settings_get_user_agent(settings.get())); assertThatUserAgentIsSentInHeaders(test, defaultUserAgent.data()); webkit_settings_set_user_agent(settings.get(), ""); g_assert_cmpstr(defaultUserAgent.data(), ==, webkit_settings_get_user_agent(settings.get())); const char* funkyUserAgent = "Funky!"; webkit_settings_set_user_agent(settings.get(), funkyUserAgent); g_assert_cmpstr(funkyUserAgent, ==, webkit_settings_get_user_agent(settings.get())); assertThatUserAgentIsSentInHeaders(test, funkyUserAgent); webkit_settings_set_user_agent_with_application_details(settings.get(), "WebKitGTK+", 0); const char* userAgentWithNullVersion = webkit_settings_get_user_agent(settings.get()); g_assert_cmpstr(g_strstr_len(userAgentWithNullVersion, -1, defaultUserAgent.data()), ==, userAgentWithNullVersion); g_assert(g_strstr_len(userAgentWithNullVersion, -1, "WebKitGTK+")); webkit_settings_set_user_agent_with_application_details(settings.get(), "WebKitGTK+", ""); g_assert_cmpstr(webkit_settings_get_user_agent(settings.get()), ==, userAgentWithNullVersion); webkit_settings_set_user_agent_with_application_details(settings.get(), "WebCatGTK+", "3.4.5"); const char* newUserAgent = webkit_settings_get_user_agent(settings.get()); g_assert(g_strstr_len(newUserAgent, -1, "3.4.5")); g_assert(g_strstr_len(newUserAgent, -1, "WebCatGTK+")); GUniquePtr<char> applicationUserAgent(g_strdup_printf("%s %s", defaultUserAgent.data(), "WebCatGTK+/3.4.5")); g_assert_cmpstr(applicationUserAgent.get(), ==, webkit_settings_get_user_agent(settings.get())); }
virtual PassRefPtr<WebPageProxy> createNewPage(WebPageProxy*, WebFrameProxy*, const WebCore::ResourceRequest& resourceRequest, const WebCore::WindowFeatures& windowFeatures, const NavigationActionData& navigationActionData) override { GRefPtr<WebKitURIRequest> request = adoptGRef(webkitURIRequestCreateForResourceRequest(resourceRequest)); WebKitNavigationAction navigationAction(request.get(), navigationActionData); return webkitWebViewCreateNewPage(m_webView, windowFeatures, &navigationAction); }
static void didReceiveURIRequest(WKSoupRequestManagerRef soupRequestManagerRef, WKURLRef urlRef, WKPageRef initiatingPageRef, uint64_t requestID, const void* clientInfo) { WebKitWebContext* webContext = WEBKIT_WEB_CONTEXT(clientInfo); GRefPtr<WebKitURISchemeRequest> request = adoptGRef(webkitURISchemeRequestCreate(webContext, toImpl(soupRequestManagerRef), toImpl(urlRef), toImpl(initiatingPageRef), requestID)); webkitWebContextReceivedURIRequest(webContext, request.get()); }
static void didInitiateLoadForResource(WKPageRef, WKFrameRef wkFrame, uint64_t resourceIdentifier, WKURLRequestRef wkRequest, bool pageIsProvisionallyLoading, const void* clientInfo) { GRefPtr<WebKitURIRequest> request = adoptGRef(webkitURIRequestCreateForResourceRequest(toImpl(wkRequest)->resourceRequest())); webkitWebViewResourceLoadStarted(WEBKIT_WEB_VIEW(clientInfo), toImpl(wkFrame), resourceIdentifier, request.get()); }