RealtimeMediaSourceQt5WebRTCMap RealtimeMediaSourceCenterQt5WebRTC::enumerateSources(bool needsAudio, bool needsVideo) { RealtimeMediaSourceQt5WebRTCMap sourceMap; if (needsAudio) { std::vector<std::string> audioDevices; WRTCInt::enumerateDevices(WRTCInt::AUDIO, audioDevices); for (auto& device : audioDevices) { String name(device.c_str()); String id(createCanonicalUUIDString()); printf("audio device id='%s', name='%s'\n", id.utf8().data(), name.utf8().data()); RefPtr<RealtimeMediaSourceQt5WebRTC> audioSource = adoptRef(new RealtimeAudioSourceQt5WebRTC(id, name)); sourceMap.add(id, audioSource.release()); } } if (needsVideo) { std::vector<std::string> videoDevices; WRTCInt::enumerateDevices(WRTCInt::VIDEO, videoDevices); for (auto& device : videoDevices) { String name(device.c_str()); String id(createCanonicalUUIDString()); printf("video device id='%s', name='%s'\n", id.utf8().data(), name.utf8().data()); RefPtr<RealtimeMediaSourceQt5WebRTC> videoSource = adoptRef(new RealtimeVideoSourceQt5WebRTC(id, name)); sourceMap.add(id, videoSource.release()); } } return WTFMove(sourceMap); }
static PassRefPtr<MediaStream> createFromSourceVectors(ScriptExecutionContext* context, const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources) { RefPtr<MediaStreamDescriptor> descriptor = MediaStreamDescriptor::create(createCanonicalUUIDString(), audioSources, videoSources); MediaStreamCenter::instance().didCreateMediaStream(descriptor.get()); return MediaStream::create(context, descriptor.release()); }
bool NotificationPresenterImpl::show(Notification* notification) { ASSERT(notification); ASSERT(notification->scriptExecutionContext()); RefPtr<Notification> n = notification; if (m_notifications.contains(n)) return false; if (checkPermission(notification->scriptExecutionContext()) != NotificationPresenter::PermissionAllowed) return false; String uuid = createCanonicalUUIDString(); m_notifications.add(n, uuid); String message; if (n->isHTML()) { // FIXME: Load and display HTML content. message = n->url().string(); } else { // FIXME: Strip the content into one line. message = n->contents().title() + ": " + n->contents().body(); } m_platformPresenter->show(std::string(uuid.utf8().data()), std::string(message.utf8().data())); return true; }
DOMFileSystem::DOMFileSystem(Ref<File>&& file) : m_name(createCanonicalUUIDString()) , m_file(WTFMove(file)) , m_rootPath(FileSystem::directoryName(m_file->path())) , m_workQueue(WorkQueue::create("DOMFileSystem work queue")) { ASSERT(!m_rootPath.endsWith('/')); }
void NotificationManager::requestPermission(ScriptExecutionContext* context, PassRefPtr<NotificationPermissionCallback> callback) { SecurityOrigin* origin = context->securityOrigin(); String requestID = createCanonicalUUIDString(); m_originToIDMap.set(origin, requestID); m_idToOriginMap.set(requestID, origin); m_idToCallbackMap.set(requestID, callback); m_webPagePrivate->client()->requestNotificationPermission(requestID, origin->toString()); }
MediaStreamTrackPrivate::MediaStreamTrackPrivate(const MediaStreamTrackPrivate& other) : RefCounted() , m_source(other.source()) , m_id(createCanonicalUUIDString()) , m_isEnabled(other.enabled()) , m_isEnded(other.ended()) { m_source->addObserver(this); }
static String generateDatabaseFileName() { StringBuilder stringBuilder; stringBuilder.append(createCanonicalUUIDString()); stringBuilder.appendLiteral(".db"); return stringBuilder.toString(); }
String openTemporaryFile(const String& prefix, PlatformFileHandle& handle) { GUniquePtr<gchar> filename(g_strdup_printf("%s%s", prefix.utf8().data(), createCanonicalUUIDString().utf8().data())); GUniquePtr<gchar> tempPath(g_build_filename(g_get_tmp_dir(), filename.get(), NULL)); GRefPtr<GFile> file = adoptGRef(g_file_new_for_path(tempPath.get())); handle = g_file_create_readwrite(file.get(), G_FILE_CREATE_NONE, 0, 0); if (!isHandleValid(handle)) return String(); return String::fromUTF8(tempPath.get()); }
RealtimeMediaSource::RealtimeMediaSource(const String& id, Type type, const String& name) : m_id(id) , m_type(type) , m_name(name) { // FIXME(147205): Need to implement fitness score for constraints if (m_id.isEmpty()) m_id = createCanonicalUUIDString(); m_persistentID = m_id; }
bool NotificationManager::show(Notification* notification) { String notificationID = createCanonicalUUIDString(); m_notificationMap.set(notification, notificationID); m_notificationIDMap.set(notificationID, notification); NotificationContextMap::iterator it = m_notificationContextMap.add(notification->scriptExecutionContext(), Vector<String>()).iterator; it->value.append(notificationID); m_webPagePrivate->client()->showNotification(notificationID, notification->title(), notification->body(), notification->iconURL().string(), notification->tag(), notification->scriptExecutionContext()->securityOrigin()->toString()); return true; }
KURL BlobURL::createBlobURL(const String& originString) { ASSERT(!originString.isEmpty()); if (originString == "null") return KURL(); String urlString = kBlobProtocol; urlString += ":"; urlString += encodeWithURLEscapeSequences(originString); urlString += "/"; urlString += createCanonicalUUIDString(); return KURL(ParsedURLString, urlString); }
MediaStreamSource::MediaStreamSource(const String& id, Type type, const String& name) : m_id(id) , m_type(type) , m_name(name) , m_readyState(New) , m_enabled(true) , m_muted(false) , m_readonly(false) , m_remote(false) { if (!m_id.isEmpty()) return; m_id = createCanonicalUUIDString(); }
MediaStreamAudioDestinationHandler::MediaStreamAudioDestinationHandler(AudioNode& node, size_t numberOfChannels) : AudioBasicInspectorHandler(NodeTypeMediaStreamAudioDestination, node, node.context()->sampleRate(), numberOfChannels) , m_mixBus(AudioBus::create(numberOfChannels, ProcessingSizeInFrames)) { m_source = MediaStreamSource::create("WebAudio-" + createCanonicalUUIDString(), MediaStreamSource::TypeAudio, "MediaStreamAudioDestinationNode", false, true, MediaStreamSource::ReadyStateLive, true); MediaStreamSourceVector audioSources; audioSources.append(m_source.get()); MediaStreamSourceVector videoSources; m_stream = MediaStream::create(node.context()->executionContext(), MediaStreamDescriptor::create(audioSources, videoSources)); MediaStreamCenter::instance().didCreateMediaStreamAndTracks(m_stream->descriptor()); m_source->setAudioFormat(numberOfChannels, node.context()->sampleRate()); initialize(); }
const String& MediaStreamTrack::id() const { if (!m_id.isEmpty()) return m_id; // The spec says: // Unless a MediaStreamTrack object is created as a part a of special purpose algorithm that // specifies how the track id must be initialized, the user agent must generate a globally // unique identifier string and initialize the object's id attribute to that string. if (m_source && m_source->useIDForTrackID()) return m_source->id(); m_id = createCanonicalUUIDString(); return m_id; }
RealtimeMediaSource::RealtimeMediaSource(const String& id, Type type, const String& name) : m_id(id) , m_type(type) , m_name(name) , m_stopped(false) , m_muted(false) , m_readonly(false) , m_remote(false) , m_fitnessScore(0) { // FIXME(147205): Need to implement fitness score for constraints if (m_id.isEmpty()) m_id = createCanonicalUUIDString(); m_persistentId = m_id; }
bool NotificationPresenterImpl::show(Notification* n) { if (NotificationPresenter::PermissionAllowed != checkPermission(n->scriptExecutionContext())) return false; RefPtr<Notification> notification = n; if (m_notifications.contains(notification)) return false; String uuid = createCanonicalUUIDString(); m_notifications.add(notification, uuid); String message; if (notification->isHTML()) // FIXME: load and display HTML content message = notification->url().string(); else message = notification->contents().title() + ": " + notification->contents().body(); // FIXME: strip to one line m_platformPresenter->show(std::string(uuid.utf8().data()), std::string(message.utf8().data())); return true; }
MediaStreamTrack::MediaStreamTrack(MediaStreamTrack* other) : ActiveDOMObject(other->scriptExecutionContext()) { suspendIfNeeded(); // When the clone() method is invoked, the user agent must run the following steps: // 1. Let trackClone be a newly constructed MediaStreamTrack object. // 2. Initialize trackClone's id attribute to a newly generated value. m_id = createCanonicalUUIDString(); // 3. Let trackClone inherit this track's underlying source, kind, label and enabled attributes. setSource(other->source()); m_readyState = m_source ? m_source->readyState() : MediaStreamSource::New; m_enabled = other->enabled(); // Note: the "clone" steps don't say anything about 'muted', but 4.3.1 says: // For a newly created MediaStreamTrack object, the following applies. The track is always enabled // unless stated otherwise (for examlpe when cloned) and the muted state reflects the state of the // source at the time the track is created. m_muted = other->muted(); m_eventDispatchScheduled =false; m_stopped = other->stopped(); }
RefPtr<MediaStreamTrackPrivate> MediaStreamTrackPrivate::create(RefPtr<RealtimeMediaSource>&& source) { return adoptRef(new MediaStreamTrackPrivate(WTF::move(source), createCanonicalUUIDString())); }
MediaStreamDescriptor* MediaStreamDescriptor::create(const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources) { return new MediaStreamDescriptor(createCanonicalUUIDString(), audioSources, videoSources); }
PassRefPtr<MediaStreamComponent> MediaStreamComponent::create(MediaStreamDescriptor* stream, PassRefPtr<MediaStreamSource> source) { return adoptRef(new MediaStreamComponent(createCanonicalUUIDString(), stream, source)); }
KURL BlobURL::createBlobURL(const String& originString) { ASSERT(!originString.isEmpty()); String urlString = "blob:" + encodeWithURLEscapeSequences(originString) + '/' + createCanonicalUUIDString(); return KURL::createIsolated(ParsedURLString, urlString); }
void WebMediaStream::initialize(const WebVector<WebMediaStreamTrack>& audioTracks, const WebVector<WebMediaStreamTrack>& videoTracks) { initialize(createCanonicalUUIDString(), audioTracks, videoTracks); }
MediaStreamDescriptor* MediaStreamDescriptor::create(const MediaStreamComponentVector& audioComponents, const MediaStreamComponentVector& videoComponents) { return new MediaStreamDescriptor(createCanonicalUUIDString(), audioComponents, videoComponents); }
MockRealtimeAudioSource::MockRealtimeAudioSource() : MockRealtimeMediaSource(createCanonicalUUIDString(), RealtimeMediaSource::Audio, mockAudioSourceName()) { }
PassRefPtr<MediaStreamPrivate> MediaStreamPrivate::create(const Vector<RefPtr<MediaStreamTrackPrivate>>& audioPrivateTracks, const Vector<RefPtr<MediaStreamTrackPrivate>>& videoPrivateTracks) { return adoptRef(new MediaStreamPrivate(createCanonicalUUIDString(), audioPrivateTracks, videoPrivateTracks)); }
PassRefPtr<MediaStreamComponent> MediaStreamComponent::create(PassRefPtr<MediaStreamSource> source) { return adoptRef(new MediaStreamComponent(createCanonicalUUIDString(), 0, source)); }
PassRefPtr<MediaStreamDescriptor> MediaStreamDescriptor::create(const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources) { return adoptRef(new MediaStreamDescriptor(createCanonicalUUIDString(), audioSources, videoSources)); }
RefPtr<MediaStreamPrivate> MediaStreamPrivate::create(const MediaStreamTrackPrivateVector& tracks) { return adoptRef(new MediaStreamPrivate(createCanonicalUUIDString(), tracks)); }
KURL BlobURL::createBlobURL(const String& originString) { ASSERT(!originString.isEmpty()); String urlString = "blob:" + originString + '/' + createCanonicalUUIDString(); return KURL::createIsolated(ParsedURLString, urlString); }
MediaStreamAudioSource::MediaStreamAudioSource() : RealtimeMediaSource(ASCIILiteral("WebAudio-") + createCanonicalUUIDString(), RealtimeMediaSource::Audio, "MediaStreamAudioDestinationNode") { }