void ResourceLoader::didReceiveBuffer(ResourceHandle*, Ref<SharedBuffer>&& buffer, int encodedDataLength) { didReceiveBuffer(WTFMove(buffer), encodedDataLength, DataPayloadBytes); }
bool ArgumentCoder<ResourceRequest>::decodePlatformData(ArgumentDecoder& decoder, ResourceRequest& resourceRequest) { String url; if (!decoder.decode(url)) return false; resourceRequest.setURL(URL(URL(), url)); String httpMethod; if (!decoder.decode(httpMethod)) return false; resourceRequest.setHTTPMethod(httpMethod); HTTPHeaderMap headers; if (!decoder.decode(headers)) return false; resourceRequest.setHTTPHeaderFields(WTFMove(headers)); double timeoutInterval; if (!decoder.decode(timeoutInterval)) return false; resourceRequest.setTimeoutInterval(timeoutInterval); bool hasHTTPBody; if (!decoder.decode(hasHTTPBody)) return false; if (hasHTTPBody) { String httpBody; if (!decoder.decode(httpBody)) return false; resourceRequest.setHTTPBody(FormData::create(httpBody.utf8())); } String firstPartyForCookies; if (!decoder.decode(firstPartyForCookies)) return false; resourceRequest.setFirstPartyForCookies(URL(URL(), firstPartyForCookies)); bool allowCookies; if (!decoder.decode(allowCookies)) return false; resourceRequest.setAllowCookies(allowCookies); ResourceLoadPriority priority; if (!decoder.decodeEnum(priority)) return false; resourceRequest.setPriority(priority); ResourceRequestCachePolicy cachePolicy; if (!decoder.decodeEnum(cachePolicy)) return false; resourceRequest.setCachePolicy(cachePolicy); ResourceRequest::Requester requester; if (!decoder.decodeEnum(requester)) return false; resourceRequest.setRequester(requester); uint32_t soupMessageFlags; if (!decoder.decode(soupMessageFlags)) return false; resourceRequest.setSoupMessageFlags(static_cast<SoupMessageFlags>(soupMessageFlags)); uint64_t initiatingPageID; if (!decoder.decode(initiatingPageID)) return false; resourceRequest.setInitiatingPageID(initiatingPageID); return true; }
WebsiteDataStore::WebsiteDataStore(WebKit::WebsiteDataStore::Configuration configuration) : m_websiteDataStore(WebKit::WebsiteDataStore::create(WTFMove(configuration))) { }
bool RenderSVGResourceFilter::applyResource(RenderElement& renderer, const RenderStyle&, GraphicsContext*& context, unsigned short resourceMode) { ASSERT(context); ASSERT_UNUSED(resourceMode, resourceMode == ApplyToDefaultMode); if (m_filter.contains(&renderer)) { FilterData* filterData = m_filter.get(&renderer); if (filterData->state == FilterData::PaintingSource || filterData->state == FilterData::Applying) filterData->state = FilterData::CycleDetected; return false; // Already built, or we're in a cycle, or we're marked for removal. Regardless, just do nothing more now. } auto filterData = std::make_unique<FilterData>(); FloatRect targetBoundingBox = renderer.objectBoundingBox(); filterData->boundaries = SVGLengthContext::resolveRectangle<SVGFilterElement>(&filterElement(), filterElement().filterUnits(), targetBoundingBox); if (filterData->boundaries.isEmpty()) return false; // Determine absolute transformation matrix for filter. AffineTransform absoluteTransform = SVGRenderingContext::calculateTransformationToOutermostCoordinateSystem(renderer); if (!absoluteTransform.isInvertible()) return false; // Eliminate shear of the absolute transformation matrix, to be able to produce unsheared tile images for feTile. filterData->shearFreeAbsoluteTransform = AffineTransform(absoluteTransform.xScale(), 0, 0, absoluteTransform.yScale(), 0, 0); // Determine absolute boundaries of the filter and the drawing region. FloatRect absoluteFilterBoundaries = filterData->shearFreeAbsoluteTransform.mapRect(filterData->boundaries); filterData->drawingRegion = renderer.strokeBoundingBox(); filterData->drawingRegion.intersect(filterData->boundaries); FloatRect absoluteDrawingRegion = filterData->shearFreeAbsoluteTransform.mapRect(filterData->drawingRegion); // Create the SVGFilter object. bool primitiveBoundingBoxMode = filterElement().primitiveUnits() == SVGUnitTypes::SVG_UNIT_TYPE_OBJECTBOUNDINGBOX; filterData->filter = SVGFilter::create(filterData->shearFreeAbsoluteTransform, absoluteDrawingRegion, targetBoundingBox, filterData->boundaries, primitiveBoundingBoxMode); // Create all relevant filter primitives. filterData->builder = buildPrimitives(*filterData->filter); if (!filterData->builder) return false; // Calculate the scale factor for the use of filterRes. // Also see http://www.w3.org/TR/SVG/filters.html#FilterEffectsRegion FloatSize scale(1, 1); if (filterElement().hasAttribute(SVGNames::filterResAttr)) { scale.setWidth(filterElement().filterResX() / absoluteFilterBoundaries.width()); scale.setHeight(filterElement().filterResY() / absoluteFilterBoundaries.height()); } if (scale.isEmpty()) return false; // Determine scale factor for filter. The size of intermediate ImageBuffers shouldn't be bigger than kMaxFilterSize. FloatRect tempSourceRect = absoluteDrawingRegion; ImageBuffer::sizeNeedsClamping(tempSourceRect.size(), scale); tempSourceRect.scale(scale.width(), scale.height()); // Set the scale level in SVGFilter. filterData->filter->setFilterResolution(scale); static const unsigned maxTotalOfEffectInputs = 100; FilterEffect* lastEffect = filterData->builder->lastEffect(); if (!lastEffect || lastEffect->totalNumberOfEffectInputs() > maxTotalOfEffectInputs) return false; RenderSVGResourceFilterPrimitive::determineFilterPrimitiveSubregion(*lastEffect); FloatRect subRegion = lastEffect->maxEffectRect(); // At least one FilterEffect has a too big image size, // recalculate the effect sizes with new scale factors. if (ImageBuffer::sizeNeedsClamping(subRegion.size(), scale)) { filterData->filter->setFilterResolution(scale); RenderSVGResourceFilterPrimitive::determineFilterPrimitiveSubregion(*lastEffect); } // If the drawingRegion is empty, we have something like <g filter=".."/>. // Even if the target objectBoundingBox() is empty, we still have to draw the last effect result image in postApplyResource. if (filterData->drawingRegion.isEmpty()) { ASSERT(!m_filter.contains(&renderer)); filterData->savedContext = context; m_filter.set(&renderer, WTFMove(filterData)); return false; } // Change the coordinate transformation applied to the filtered element to reflect the resolution of the filter. AffineTransform effectiveTransform; effectiveTransform.scale(scale.width(), scale.height()); effectiveTransform.multiply(filterData->shearFreeAbsoluteTransform); RenderingMode renderingMode = renderer.frame().settings().acceleratedFiltersEnabled() ? Accelerated : Unaccelerated; auto sourceGraphic = SVGRenderingContext::createImageBuffer(filterData->drawingRegion, effectiveTransform, ColorSpaceLinearRGB, renderingMode); if (!sourceGraphic) { ASSERT(!m_filter.contains(&renderer)); filterData->savedContext = context; m_filter.set(&renderer, WTFMove(filterData)); return false; } // Set the rendering mode from the page's settings. filterData->filter->setRenderingMode(renderingMode); GraphicsContext& sourceGraphicContext = sourceGraphic->context(); filterData->sourceGraphicBuffer = WTFMove(sourceGraphic); filterData->savedContext = context; context = &sourceGraphicContext; ASSERT(!m_filter.contains(&renderer)); m_filter.set(&renderer, WTFMove(filterData)); return true; }
void FileIconLoader::iconLoaded(RefPtr<Icon>&& icon) { if (m_client) m_client->iconLoaded(WTFMove(icon)); }
void MemoryIDBBackingStore::restoreObjectStoreForVersionChangeAbort(std::unique_ptr<MemoryObjectStore>&& objectStore) { registerObjectStore(WTFMove(objectStore)); }
void RenderSVGResourceFilter::postApplyResource(RenderElement& renderer, GraphicsContext*& context, unsigned short resourceMode, const Path*, const RenderSVGShape*) { ASSERT(context); ASSERT_UNUSED(resourceMode, resourceMode == ApplyToDefaultMode); FilterData* filterData = m_filter.get(&renderer); if (!filterData) return; switch (filterData->state) { case FilterData::MarkedForRemoval: m_filter.remove(&renderer); return; case FilterData::CycleDetected: case FilterData::Applying: // We have a cycle if we are already applying the data. // This can occur due to FeImage referencing a source that makes use of the FEImage itself. // This is the first place we've hit the cycle, so set the state back to PaintingSource so the return stack // will continue correctly. filterData->state = FilterData::PaintingSource; return; case FilterData::PaintingSource: if (!filterData->savedContext) { removeClientFromCache(renderer); return; } context = filterData->savedContext; filterData->savedContext = 0; break; case FilterData::Built: { } // Empty } FilterEffect* lastEffect = filterData->builder->lastEffect(); if (lastEffect && !filterData->boundaries.isEmpty() && !lastEffect->filterPrimitiveSubregion().isEmpty()) { // This is the real filtering of the object. It just needs to be called on the // initial filtering process. We just take the stored filter result on a // second drawing. if (filterData->state != FilterData::Built) filterData->filter->setSourceImage(WTFMove(filterData->sourceGraphicBuffer)); // Always true if filterData is just built (filterData->state == FilterData::Built). if (!lastEffect->hasResult()) { filterData->state = FilterData::Applying; lastEffect->applyAll(); lastEffect->correctFilterResultIfNeeded(); lastEffect->transformResultColorSpace(ColorSpaceDeviceRGB); } filterData->state = FilterData::Built; ImageBuffer* resultImage = lastEffect->asImageBuffer(); if (resultImage) { context->concatCTM(filterData->shearFreeAbsoluteTransform.inverse().valueOr(AffineTransform())); context->scale(FloatSize(1 / filterData->filter->filterResolution().width(), 1 / filterData->filter->filterResolution().height())); context->drawImageBuffer(*resultImage, lastEffect->absolutePaintRect()); context->scale(filterData->filter->filterResolution()); context->concatCTM(filterData->shearFreeAbsoluteTransform); } } filterData->sourceGraphicBuffer.reset(); }
RenderPtr<RenderElement> SVGPatternElement::createElementRenderer(Ref<RenderStyle>&& style, const RenderTreePosition&) { return createRenderer<RenderSVGResourcePattern>(*this, WTFMove(style)); }
void CrossOriginPreflightResultCache::appendEntry(const String& origin, const URL& url, std::unique_ptr<CrossOriginPreflightResultCacheItem> preflightResult) { ASSERT(isMainThread()); m_preflightHashMap.set(std::make_pair(origin, url), WTFMove(preflightResult)); }
RenderPtr<RenderElement> SVGMaskElement::createElementRenderer(RenderStyle&& style, const RenderTreePosition&) { return createRenderer<RenderSVGResourceMasker>(*this, WTFMove(style)); }
JSValue toJSNewlyCreated(ExecState* state, JSDOMGlobalObject* globalObject, Ref<Document>&& document) { return createNewDocumentWrapper(*state, *globalObject, WTFMove(document)); }
std::unique_ptr<RootInlineBox> RenderSVGText::createRootInlineBox() { auto box = std::make_unique<SVGRootInlineBox>(*this); box->setHasVirtualLogicalHeight(); return WTFMove(box); }
void LegacyCDM::registerCDMFactory(CreateCDM&& constructor, CDMSupportsKeySystem supportsKeySystem, CDMSupportsKeySystemAndMimeType supportsKeySystemAndMimeType) { installedCDMFactories().append(new CDMFactory(WTFMove(constructor), supportsKeySystem, supportsKeySystemAndMimeType)); }
CDMFactory(CreateCDM&& constructor, CDMSupportsKeySystem supportsKeySystem, CDMSupportsKeySystemAndMimeType supportsKeySystemAndMimeType) : constructor(WTFMove(constructor)) , supportsKeySystem(supportsKeySystem) , supportsKeySystemAndMimeType(supportsKeySystemAndMimeType) { }
SynchronousLoadData(RefPtr<Messages::NetworkConnectionToWebProcess::PerformSynchronousLoad::DelayedReply>&& reply) : delayedReply(WTFMove(reply)) { ASSERT(delayedReply); }
RenderSVGRect::RenderSVGRect(SVGRectElement& element, Ref<RenderStyle>&& style) : RenderSVGShape(element, WTFMove(style)) , m_usePathFallback(false) { }
SharedBuffer::SharedBuffer(MappedFileData&& fileData) : m_buffer(adoptRef(new DataBuffer)) , m_fileData(WTFMove(fileData)) { }
void WebLoaderStrategy::scheduleLoad(ResourceLoader* resourceLoader, CachedResource* resource, bool shouldClearReferrerOnHTTPSToHTTPRedirect) { ASSERT(resourceLoader); ResourceLoadIdentifier identifier = resourceLoader->identifier(); ASSERT(identifier); #if ENABLE(WEB_ARCHIVE) || ENABLE(MHTML) // If the DocumentLoader schedules this as an archive resource load, // then we should remember the ResourceLoader in our records but not schedule it in the NetworkProcess. if (resourceLoader->documentLoader()->scheduleArchiveLoad(resourceLoader, resourceLoader->request())) { LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be handled as an archive resource.", resourceLoader->url().string().utf8().data()); m_webResourceLoaders.set(identifier, WebResourceLoader::create(resourceLoader)); return; } #endif if (resourceLoader->documentLoader()->applicationCacheHost()->maybeLoadResource(resourceLoader, resourceLoader->request(), resourceLoader->request().url())) { LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be loaded from application cache.", resourceLoader->url().string().utf8().data()); m_webResourceLoaders.set(identifier, WebResourceLoader::create(resourceLoader)); return; } if (resourceLoader->request().url().protocolIsData()) { LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be loaded as data.", resourceLoader->url().string().utf8().data()); startLocalLoad(*resourceLoader); return; } #if USE(QUICK_LOOK) if (resourceLoader->request().url().protocolIs(QLPreviewProtocol())) { LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be handled as a QuickLook resource.", resourceLoader->url().string().utf8().data()); startLocalLoad(*resourceLoader); return; } #endif #if USE(SOUP) // For apps that call g_resource_load in a web extension. // https://blogs.gnome.org/alexl/2012/01/26/resources-in-glib/ if (resourceLoader->request().url().protocolIs("resource")) { LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be handled as a GResource.", resourceLoader->url().string().utf8().data()); startLocalLoad(*resourceLoader); return; } #endif LOG(NetworkScheduling, "(WebProcess) WebLoaderStrategy::scheduleLoad, url '%s' will be scheduled with the NetworkProcess with priority %d", resourceLoader->url().string().latin1().data(), static_cast<int>(resourceLoader->request().priority())); ContentSniffingPolicy contentSniffingPolicy = resourceLoader->shouldSniffContent() ? SniffContent : DoNotSniffContent; StoredCredentials allowStoredCredentials = resourceLoader->shouldUseCredentialStorage() ? AllowStoredCredentials : DoNotAllowStoredCredentials; // FIXME: Some entities in WebCore use WebCore's "EmptyFrameLoaderClient" instead of having a proper WebFrameLoaderClient. // EmptyFrameLoaderClient shouldn't exist and everything should be using a WebFrameLoaderClient, // but in the meantime we have to make sure not to mis-cast. WebFrameLoaderClient* webFrameLoaderClient = toWebFrameLoaderClient(resourceLoader->frameLoader()->client()); WebFrame* webFrame = webFrameLoaderClient ? webFrameLoaderClient->webFrame() : 0; WebPage* webPage = webFrame ? webFrame->page() : 0; NetworkResourceLoadParameters loadParameters; loadParameters.identifier = identifier; loadParameters.webPageID = webPage ? webPage->pageID() : 0; loadParameters.webFrameID = webFrame ? webFrame->frameID() : 0; loadParameters.sessionID = webPage ? webPage->sessionID() : SessionID::defaultSessionID(); loadParameters.request = resourceLoader->request(); loadParameters.contentSniffingPolicy = contentSniffingPolicy; loadParameters.allowStoredCredentials = allowStoredCredentials; // If there is no WebFrame then this resource cannot be authenticated with the client. loadParameters.clientCredentialPolicy = (webFrame && webPage && resourceLoader->isAllowedToAskUserForCredentials()) ? AskClientForAllCredentials : DoNotAskClientForAnyCredentials; loadParameters.shouldClearReferrerOnHTTPSToHTTPRedirect = shouldClearReferrerOnHTTPSToHTTPRedirect; loadParameters.defersLoading = resourceLoader->defersLoading(); loadParameters.needsCertificateInfo = resourceLoader->shouldIncludeCertificateInfo(); loadParameters.maximumBufferingTime = maximumBufferingTime(resource); ASSERT((loadParameters.webPageID && loadParameters.webFrameID) || loadParameters.clientCredentialPolicy == DoNotAskClientForAnyCredentials); if (!WebProcess::singleton().networkConnection()->connection()->send(Messages::NetworkConnectionToWebProcess::ScheduleResourceLoad(loadParameters), 0)) { WEBLOADERSTRATEGY_LOG_ALWAYS_ERROR("WebLoaderStrategy::scheduleLoad: Unable to schedule resource with the NetworkProcess with priority = %d, pageID = %llu, frameID = %llu", static_cast<int>(resourceLoader->request().priority()), static_cast<unsigned long long>(loadParameters.webPageID), static_cast<unsigned long long>(loadParameters.webFrameID)); // We probably failed to schedule this load with the NetworkProcess because it had crashed. // This load will never succeed so we will schedule it to fail asynchronously. scheduleInternallyFailedLoad(resourceLoader); return; } auto webResourceLoader = WebResourceLoader::create(resourceLoader); WEBLOADERSTRATEGY_LOG_ALWAYS("WebLoaderStrategy::scheduleLoad: Resource will be scheduled with the NetworkProcess with priority = %d, pageID = %llu, frameID = %llu, WebResourceLoader = %p", static_cast<int>(resourceLoader->request().priority()), static_cast<unsigned long long>(loadParameters.webPageID), static_cast<unsigned long long>(loadParameters.webFrameID), webResourceLoader.ptr()); m_webResourceLoaders.set(identifier, WTFMove(webResourceLoader)); }
TrackEvent::TrackEvent(const AtomicString& type, bool canBubble, bool cancelable, Ref<TrackBase>&& track) : Event(type, canBubble, cancelable) , m_track(WTFMove(track)) { }
bool GraphicsContext3D::ImageExtractor::extractImage(bool premultiplyAlpha, bool ignoreGammaAndColorProfile) { if (!m_image) return false; // We need this to stay in scope because the native image is just a shallow copy of the data. AlphaOption alphaOption = premultiplyAlpha ? AlphaOption::Premultiplied : AlphaOption::NotPremultiplied; GammaAndColorProfileOption gammaAndColorProfileOption = ignoreGammaAndColorProfile ? GammaAndColorProfileOption::Ignored : GammaAndColorProfileOption::Applied; m_decoder = new ImageSource(nullptr, alphaOption, gammaAndColorProfileOption); if (!m_decoder) return false; ImageSource& decoder = *m_decoder; m_alphaOp = AlphaDoNothing; if (m_image->data()) { decoder.setData(m_image->data(), true); if (!decoder.frameCount()) return false; m_imageSurface = decoder.createFrameImageAtIndex(0); } else { m_imageSurface = m_image->nativeImageForCurrentFrame(); // 1. For texImage2D with HTMLVideoElment input, assume no PremultiplyAlpha had been applied and the alpha value is 0xFF for each pixel, // which is true at present and may be changed in the future and needs adjustment accordingly. // 2. For texImage2D with HTMLCanvasElement input in which Alpha is already Premultiplied in this port, // do AlphaDoUnmultiply if UNPACK_PREMULTIPLY_ALPHA_WEBGL is set to false. if (!premultiplyAlpha && m_imageHtmlDomSource != HtmlDomVideo) m_alphaOp = AlphaDoUnmultiply; // if m_imageSurface is not an image, extract a copy of the surface if (m_imageSurface && cairo_surface_get_type(m_imageSurface.get()) != CAIRO_SURFACE_TYPE_IMAGE) { IntSize surfaceSize = cairoSurfaceSize(m_imageSurface.get()); auto tmpSurface = adoptRef(cairo_image_surface_create(CAIRO_FORMAT_ARGB32, surfaceSize.width(), surfaceSize.height())); copyRectFromOneSurfaceToAnother(m_imageSurface.get(), tmpSurface.get(), IntSize(), IntRect(IntPoint(), surfaceSize), IntSize(), CAIRO_OPERATOR_SOURCE); m_imageSurface = WTFMove(tmpSurface); } } if (!m_imageSurface) return false; ASSERT(cairo_surface_get_type(m_imageSurface.get()) == CAIRO_SURFACE_TYPE_IMAGE); IntSize imageSize = cairoSurfaceSize(m_imageSurface.get()); m_imageWidth = imageSize.width(); m_imageHeight = imageSize.height(); if (!m_imageWidth || !m_imageHeight) return false; if (cairo_image_surface_get_format(m_imageSurface.get()) != CAIRO_FORMAT_ARGB32) return false; unsigned int srcUnpackAlignment = 1; size_t bytesPerRow = cairo_image_surface_get_stride(m_imageSurface.get()); size_t bitsPerPixel = 32; unsigned padding = bytesPerRow - bitsPerPixel / 8 * m_imageWidth; if (padding) { srcUnpackAlignment = padding + 1; while (bytesPerRow % srcUnpackAlignment) ++srcUnpackAlignment; } m_imagePixelData = cairo_image_surface_get_data(m_imageSurface.get()); m_imageSourceFormat = DataFormatBGRA8; m_imageSourceUnpackAlignment = srcUnpackAlignment; return true; }
RenderSVGResourceFilter::RenderSVGResourceFilter(SVGFilterElement& element, RenderStyle&& style) : RenderSVGResourceContainer(element, WTFMove(style)) { }
std::unique_ptr<AccessCase> IntrinsicGetterAccessCase::clone() const { std::unique_ptr<IntrinsicGetterAccessCase> result(new IntrinsicGetterAccessCase(*this)); result->resetState(); return WTFMove(result); }
MessageRecorder::MessageProcessingToken::MessageProcessingToken(WebKitMessageRecord record) : m_record(WTFMove(record)) { m_record.startTime = monotonicallyIncreasingTime(); }
bool Connection::processMessage() { if (m_readBufferSize < sizeof(MessageInfo)) return false; uint8_t* messageData = m_readBuffer.data(); MessageInfo messageInfo; memcpy(&messageInfo, messageData, sizeof(messageInfo)); messageData += sizeof(messageInfo); size_t messageLength = sizeof(MessageInfo) + messageInfo.attachmentCount() * sizeof(AttachmentInfo) + (messageInfo.isMessageBodyIsOutOfLine() ? 0 : messageInfo.bodySize()); if (m_readBufferSize < messageLength) return false; size_t attachmentFileDescriptorCount = 0; size_t attachmentCount = messageInfo.attachmentCount(); std::unique_ptr<AttachmentInfo[]> attachmentInfo; if (attachmentCount) { attachmentInfo = std::make_unique<AttachmentInfo[]>(attachmentCount); memcpy(attachmentInfo.get(), messageData, sizeof(AttachmentInfo) * attachmentCount); messageData += sizeof(AttachmentInfo) * attachmentCount; for (size_t i = 0; i < attachmentCount; ++i) { switch (attachmentInfo[i].getType()) { case Attachment::MappedMemoryType: case Attachment::SocketType: if (!attachmentInfo[i].isNull()) attachmentFileDescriptorCount++; break; case Attachment::Uninitialized: default: break; } } if (messageInfo.isMessageBodyIsOutOfLine()) attachmentCount--; } Vector<Attachment> attachments(attachmentCount); RefPtr<WebKit::SharedMemory> oolMessageBody; size_t fdIndex = 0; for (size_t i = 0; i < attachmentCount; ++i) { int fd = -1; switch (attachmentInfo[i].getType()) { case Attachment::MappedMemoryType: if (!attachmentInfo[i].isNull()) fd = m_fileDescriptors[fdIndex++]; attachments[attachmentCount - i - 1] = Attachment(fd, attachmentInfo[i].getSize()); break; case Attachment::SocketType: if (!attachmentInfo[i].isNull()) fd = m_fileDescriptors[fdIndex++]; attachments[attachmentCount - i - 1] = Attachment(fd); break; case Attachment::Uninitialized: attachments[attachmentCount - i - 1] = Attachment(); default: break; } } if (messageInfo.isMessageBodyIsOutOfLine()) { ASSERT(messageInfo.bodySize()); if (attachmentInfo[attachmentCount].isNull()) { ASSERT_NOT_REACHED(); return false; } WebKit::SharedMemory::Handle handle; handle.adoptAttachment(IPC::Attachment(m_fileDescriptors[attachmentFileDescriptorCount - 1], attachmentInfo[attachmentCount].getSize())); oolMessageBody = WebKit::SharedMemory::map(handle, WebKit::SharedMemory::Protection::ReadOnly); if (!oolMessageBody) { ASSERT_NOT_REACHED(); return false; } } ASSERT(attachments.size() == (messageInfo.isMessageBodyIsOutOfLine() ? messageInfo.attachmentCount() - 1 : messageInfo.attachmentCount())); uint8_t* messageBody = messageData; if (messageInfo.isMessageBodyIsOutOfLine()) messageBody = reinterpret_cast<uint8_t*>(oolMessageBody->data()); auto decoder = std::make_unique<MessageDecoder>(DataReference(messageBody, messageInfo.bodySize()), WTFMove(attachments)); processIncomingMessage(WTFMove(decoder)); if (m_readBufferSize > messageLength) { memmove(m_readBuffer.data(), m_readBuffer.data() + messageLength, m_readBufferSize - messageLength); m_readBufferSize -= messageLength; } else m_readBufferSize = 0; if (attachmentFileDescriptorCount) { if (m_fileDescriptorsSize > attachmentFileDescriptorCount) { size_t fileDescriptorsLength = attachmentFileDescriptorCount * sizeof(int); memmove(m_fileDescriptors.data(), m_fileDescriptors.data() + fileDescriptorsLength, m_fileDescriptorsSize - fileDescriptorsLength); m_fileDescriptorsSize -= fileDescriptorsLength; } else m_fileDescriptorsSize = 0; } return true; }
RefPtr<AsyncStackTrace> AsyncStackTrace::create(Ref<ScriptCallStack>&& callStack, bool singleShot, RefPtr<AsyncStackTrace> parent) { ASSERT(callStack->size()); return adoptRef(*new AsyncStackTrace(WTFMove(callStack), singleShot, WTFMove(parent))); }
Ref<RTCCertificate> RTCCertificate::create(Ref<SecurityOrigin>&& origin, double expires, Vector<DtlsFingerprint>&& fingerprints, String&& pemCertificate, String&& pemPrivateKey) { return adoptRef(*new RTCCertificate(WTFMove(origin), expires, WTFMove(fingerprints), WTFMove(pemCertificate), WTFMove(pemPrivateKey))); }
Ref<WebsiteDataStore> WebsiteDataStore::create(WebKit::WebsiteDataStore::Configuration configuration) { return adoptRef(*new WebsiteDataStore(WTFMove(configuration))); }
void NetworkResourceLoader::convertToDownload(DownloadID downloadID, const ResourceRequest& request, const ResourceResponse& response) { ASSERT(m_networkLoad); NetworkProcess::singleton().downloadManager().convertNetworkLoadToDownload(downloadID, std::exchange(m_networkLoad, nullptr), WTFMove(m_fileReferences), request, response); }
Ref<RTCPeerConnectionIceEvent> RTCPeerConnectionIceEvent::create(CanBubble canBubble, IsCancelable cancelable, RefPtr<RTCIceCandidate>&& candidate, String&& serverURL) { return adoptRef(*new RTCPeerConnectionIceEvent(eventNames().icecandidateEvent, canBubble, cancelable, WTFMove(candidate), WTFMove(serverURL))); }
void ResourceLoader::didReceiveBuffer(Ref<SharedBuffer>&& buffer, long long encodedDataLength, DataPayloadType dataPayloadType) { didReceiveDataOrBuffer(nullptr, 0, WTFMove(buffer), encodedDataLength, dataPayloadType); }