CSSParserToken CSSTokenizer::nextToken() { // Unlike the HTMLTokenizer, the CSS Syntax spec is written // as a stateless, (fixed-size) look-ahead tokenizer. // We could move to the stateful model and instead create // states for all the "next 3 codepoints are X" cases. // State-machine tokenizers are easier to write to handle // incremental tokenization of partial sources. // However, for now we follow the spec exactly. UChar cc = consume(); CodePoint codePointFunc = 0; if (isASCII(cc)) { ASSERT_WITH_SECURITY_IMPLICATION(cc < codePointsNumber); codePointFunc = codePoints[cc]; } else { codePointFunc = &CSSTokenizer::nameStart; } if (codePointFunc) return ((this)->*(codePointFunc))(cc); return CSSParserToken(DelimiterToken, cc); }
void CSSImageSetValue::fillImageSet() { size_t length = this->length(); size_t i = 0; while (i < length) { CSSValue* imageValue = item(i); String imageURL = downcast<CSSImageValue>(*imageValue).url(); ++i; ASSERT_WITH_SECURITY_IMPLICATION(i < length); CSSValue* scaleFactorValue = item(i); float scaleFactor = downcast<CSSPrimitiveValue>(*scaleFactorValue).getFloatValue(); ImageWithScale image; image.imageURL = imageURL; image.scaleFactor = scaleFactor; m_imagesInSet.append(image); ++i; } // Sort the images so that they are stored in order from lowest resolution to highest. std::sort(m_imagesInSet.begin(), m_imagesInSet.end(), CSSImageSetValue::compareByScaleFactor); }
void CSSImageGeneratorValue::removeClient(const LayoutObject* layoutObject) { ASSERT(layoutObject); LayoutObjectSizeCountMap::iterator it = m_clients.find(layoutObject); ASSERT_WITH_SECURITY_IMPLICATION(it != m_clients.end()); IntSize removedImageSize; SizeAndCount& sizeCount = it->value; IntSize size = sizeCount.size; if (!size.isEmpty()) { m_sizes.remove(size); if (!m_sizes.contains(size)) m_images.remove(size); } if (!--sizeCount.count) m_clients.remove(layoutObject); if (m_clients.isEmpty()) { ASSERT(m_keepAlive); m_keepAlive.clear(); } }
RawPtr<FilterOperation> BasicComponentTransferFilterOperation::blend(const FilterOperation* from, double progress) const { double fromAmount; if (from) { ASSERT_WITH_SECURITY_IMPLICATION(from->isSameType(*this)); fromAmount = toBasicComponentTransferFilterOperation(from)->amount(); } else { switch (m_type) { case OPACITY: case CONTRAST: case BRIGHTNESS: fromAmount = 1; break; case INVERT: fromAmount = 0; break; default: fromAmount = 0; ASSERT_NOT_REACHED(); } } double result = blink::blend(fromAmount, m_amount, progress); switch (m_type) { case BRIGHTNESS: case CONTRAST: result = clampTo<double>(result, 0); break; case INVERT: case OPACITY: result = clampTo<double>(result, 0, 1); break; default: ASSERT_NOT_REACHED(); } return BasicComponentTransferFilterOperation::create(result, m_type); }
void CSSSelectorList::adoptSelectorVector(Vector<OwnPtr<CSSParserSelector>>& selectorVector) { ASSERT_WITH_SECURITY_IMPLICATION(!selectorVector.isEmpty()); deleteSelectors(); size_t flattenedSize = 0; for (size_t i = 0; i < selectorVector.size(); ++i) { for (CSSParserSelector* selector = selectorVector[i].get(); selector; selector = selector->tagHistory()) ++flattenedSize; } ASSERT(flattenedSize); m_selectorArray = reinterpret_cast<CSSSelector*>(fastMalloc(sizeof(CSSSelector) * flattenedSize)); size_t arrayIndex = 0; for (size_t i = 0; i < selectorVector.size(); ++i) { CSSParserSelector* current = selectorVector[i].get(); while (current) { { // Move item from the parser selector vector into m_selectorArray without invoking destructor (Ugh.) CSSSelector* currentSelector = current->releaseSelector().leakPtr(); memcpy(&m_selectorArray[arrayIndex], currentSelector, sizeof(CSSSelector)); // Free the underlying memory without invoking the destructor. operator delete (currentSelector); } current = current->tagHistory(); ASSERT(!m_selectorArray[arrayIndex].isLastInSelectorList()); if (current) m_selectorArray[arrayIndex].setNotLastInTagHistory(); ++arrayIndex; } ASSERT(m_selectorArray[arrayIndex - 1].isLastInTagHistory()); } ASSERT(flattenedSize == arrayIndex); m_selectorArray[arrayIndex - 1].setLastInSelectorList(); selectorVector.clear(); }
void AutofillPopupMenuClient::valueChanged(unsigned listIndex, bool fireEvents) { WebViewImpl* webView = getWebView(); if (!webView) return; ASSERT_WITH_SECURITY_IMPLICATION(listIndex < m_names.size()); if (m_useLegacyBehavior) { for (size_t i = 0; i < m_itemIDs.size(); ++i) { if (m_itemIDs[i] == WebAutofillClient::MenuItemIDSeparator) { if (listIndex > i) listIndex--; break; } } } webView->autofillClient()->didAcceptAutofillSuggestion(WebNode(getTextField()), m_names[listIndex], m_labels[listIndex], m_itemIDs[listIndex], listIndex); }
bool SVGMaskPainter::prepareEffect(const LayoutObject& object, GraphicsContext* context) { ASSERT(context); ASSERT(m_mask.style()); ASSERT_WITH_SECURITY_IMPLICATION(!m_mask.needsLayout()); m_mask.clearInvalidationMask(); FloatRect paintInvalidationRect = object.paintInvalidationRectInLocalCoordinates(); if (paintInvalidationRect.isEmpty() || !m_mask.element()->hasChildren()) return false; if (RuntimeEnabledFeatures::slimmingPaintEnabled()) { ASSERT(context->displayItemList()); if (context->displayItemList()->displayItemConstructionIsDisabled()) return true; context->displayItemList()->createAndAppend<BeginCompositingDisplayItem>(object, SkXfermode::kSrcOver_Mode, 1, &paintInvalidationRect); } else { BeginCompositingDisplayItem beginCompositingContent(object, SkXfermode::kSrcOver_Mode, 1, &paintInvalidationRect); beginCompositingContent.replay(*context); } return true; }
// Returns two point ranges (<left, width> pairs) at row 'canvasY', that belong to 'src' but not 'dst'. // A point range is empty if the corresponding width is 0. inline void findBlendRangeAtRow(const blink::IntRect& src, const blink::IntRect& dst, int canvasY, int& left1, int& width1, int& left2, int& width2) { ASSERT_WITH_SECURITY_IMPLICATION(canvasY >= src.y() && canvasY < src.maxY()); left1 = -1; width1 = 0; left2 = -1; width2 = 0; if (canvasY < dst.y() || canvasY >= dst.maxY() || src.x() >= dst.maxX() || src.maxX() <= dst.x()) { left1 = src.x(); width1 = src.width(); return; } if (src.x() < dst.x()) { left1 = src.x(); width1 = dst.x() - src.x(); } if (src.maxX() > dst.maxX()) { left2 = dst.maxX(); width2 = src.maxX() - dst.maxX(); } }
void CSSImageSetValue::fillImageSet() { size_t length = this->length(); size_t i = 0; while (i < length) { CSSImageValue* imageValue = toCSSImageValue(item(i)); String imageURL = imageValue->url(); ++i; ASSERT_WITH_SECURITY_IMPLICATION(i < length); CSSValue* scaleFactorValue = item(i); float scaleFactor = toCSSPrimitiveValue(scaleFactorValue)->getFloatValue(); ImageWithScale image; image.imageURL = imageURL; image.referrer = SecurityPolicy::generateReferrer(imageValue->referrer().referrerPolicy, KURL(ParsedURLString, imageURL), imageValue->referrer().referrer); image.scaleFactor = scaleFactor; m_imagesInSet.append(image); ++i; } // Sort the images so that they are stored in order from lowest resolution to highest. std::sort(m_imagesInSet.begin(), m_imagesInSet.end(), CSSImageSetValue::compareByScaleFactor); }
bool ICOImageDecoder::decodeAtIndex(size_t index) { ASSERT_WITH_SECURITY_IMPLICATION(index < m_dirEntries.size()); const IconDirectoryEntry& dirEntry = m_dirEntries[index]; const ImageType imageType = imageTypeAtIndex(index); if (imageType == Unknown) return false; // Not enough data to determine image type yet. if (imageType == BMP) { if (!m_bmpReaders[index]) { // We need to have already sized m_frameBufferCache before this, and // we must not resize it again later (see caution in frameCount()). ASSERT(m_frameBufferCache.size() == m_dirEntries.size()); m_bmpReaders[index] = adoptPtr(new BMPImageReader(this, dirEntry.m_imageOffset, 0, true)); m_bmpReaders[index]->setData(m_data.get()); m_bmpReaders[index]->setBuffer(&m_frameBufferCache[index]); } m_frameSize = dirEntry.m_size; bool result = m_bmpReaders[index]->decodeBMP(false); m_frameSize = IntSize(); return result; } if (!m_pngDecoders[index]) { m_pngDecoders[index] = adoptPtr( new PNGImageDecoder(m_premultiplyAlpha ? ImageSource::AlphaPremultiplied : ImageSource::AlphaNotPremultiplied, m_ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied)); setDataForPNGDecoderAtIndex(index); } // Fail if the size the PNGImageDecoder calculated does not match the size // in the directory. if (m_pngDecoders[index]->isSizeAvailable() && (m_pngDecoders[index]->size() != dirEntry.m_size)) return setFailed(); m_frameBufferCache[index] = *m_pngDecoders[index]->frameBufferAtIndex(0); return !m_pngDecoders[index]->failed() || setFailed(); }
bool SVGClipPainter::prepareEffect(const LayoutObject& target, const FloatRect& targetBoundingBox, const FloatRect& paintInvalidationRect, GraphicsContext* context, ClipperState& clipperState) { ASSERT(context); ASSERT(clipperState == ClipperNotApplied); ASSERT_WITH_SECURITY_IMPLICATION(!m_clip.needsLayout()); m_clip.clearInvalidationMask(); if (paintInvalidationRect.isEmpty() || m_clip.hasCycle()) return false; SVGClipExpansionCycleHelper inClipExpansionChange(m_clip); AffineTransform animatedLocalTransform = toSVGClipPathElement(m_clip.element())->calculateAnimatedLocalTransform(); // When drawing a clip for non-SVG elements, the CTM does not include the zoom factor. // In this case, we need to apply the zoom scale explicitly - but only for clips with // userSpaceOnUse units (the zoom is accounted for objectBoundingBox-resolved lengths). if (!target.isSVG() && m_clip.clipPathUnits() == SVGUnitTypes::SVG_UNIT_TYPE_USERSPACEONUSE) { ASSERT(m_clip.style()); animatedLocalTransform.scale(m_clip.style()->effectiveZoom()); } // First, try to apply the clip as a clipPath. Path clipPath; if (m_clip.asPath(animatedLocalTransform, targetBoundingBox, clipPath)) { clipperState = ClipperAppliedPath; ASSERT(context->displayItemList()); context->displayItemList()->createAndAppend<BeginClipPathDisplayItem>(target, clipPath); return true; } // Fall back to masking. clipperState = ClipperAppliedMask; // Begin compositing the clip mask. CompositingRecorder::beginCompositing(*context, target, SkXfermode::kSrcOver_Mode, 1, &paintInvalidationRect); { TransformRecorder recorder(*context, target, animatedLocalTransform); // clipPath can also be clipped by another clipPath. SVGResources* resources = SVGResourcesCache::cachedResourcesForLayoutObject(&m_clip); LayoutSVGResourceClipper* clipPathClipper = resources ? resources->clipper() : 0; ClipperState clipPathClipperState = ClipperNotApplied; if (clipPathClipper && !SVGClipPainter(*clipPathClipper).prepareEffect(m_clip, targetBoundingBox, paintInvalidationRect, context, clipPathClipperState)) { // End the clip mask's compositor. CompositingRecorder::endCompositing(*context, target); return false; } drawClipMaskContent(context, target, targetBoundingBox, paintInvalidationRect); if (clipPathClipper) SVGClipPainter(*clipPathClipper).finishEffect(m_clip, context, clipPathClipperState); } // Masked content layer start. CompositingRecorder::beginCompositing(*context, target, SkXfermode::kSrcIn_Mode, 1, &paintInvalidationRect); return true; }
String TextCodecUTF8::decode(const char* bytes, size_t length, bool flush, bool stopOnError, bool& sawError) { // Each input byte might turn into a character. // That includes all bytes in the partial-sequence buffer because // each byte in an invalid sequence will turn into a replacement character. StringBuffer<LChar> buffer(m_partialSequenceSize + length); const uint8_t* source = reinterpret_cast<const uint8_t*>(bytes); const uint8_t* end = source + length; const uint8_t* alignedEnd = alignToMachineWord(end); LChar* destination = buffer.characters(); do { if (m_partialSequenceSize) { // Explicitly copy destination and source pointers to avoid taking pointers to the // local variables, which may harm code generation by disabling some optimizations // in some compilers. LChar* destinationForHandlePartialSequence = destination; const uint8_t* sourceForHandlePartialSequence = source; if (handlePartialSequence(destinationForHandlePartialSequence, sourceForHandlePartialSequence, end, flush, stopOnError, sawError)) { source = sourceForHandlePartialSequence; goto upConvertTo16Bit; } destination = destinationForHandlePartialSequence; source = sourceForHandlePartialSequence; if (m_partialSequenceSize) break; } while (source < end) { if (isASCII(*source)) { // Fast path for ASCII. Most UTF-8 text will be ASCII. if (isAlignedToMachineWord(source)) { while (source < alignedEnd) { MachineWord chunk = *reinterpret_cast_ptr<const MachineWord*>(source); if (!isAllASCII<LChar>(chunk)) break; copyASCIIMachineWord(destination, source); source += sizeof(MachineWord); destination += sizeof(MachineWord); } if (source == end) break; if (!isASCII(*source)) continue; } *destination++ = *source++; continue; } int count = nonASCIISequenceLength(*source); int character; if (!count) character = nonCharacter; else { if (count > end - source) { ASSERT_WITH_SECURITY_IMPLICATION(end - source < static_cast<ptrdiff_t>(sizeof(m_partialSequence))); ASSERT(!m_partialSequenceSize); m_partialSequenceSize = end - source; memcpy(m_partialSequence, source, m_partialSequenceSize); source = end; break; } character = decodeNonASCIISequence(source, count); } if (character == nonCharacter) { sawError = true; if (stopOnError) break; goto upConvertTo16Bit; } if (character > 0xff) goto upConvertTo16Bit; source += count; *destination++ = character; } } while (flush && m_partialSequenceSize); buffer.shrink(destination - buffer.characters()); return String::adopt(buffer); upConvertTo16Bit: StringBuffer<UChar> buffer16(m_partialSequenceSize + length); UChar* destination16 = buffer16.characters(); // Copy the already converted characters for (LChar* converted8 = buffer.characters(); converted8 < destination;) *destination16++ = *converted8++; do { if (m_partialSequenceSize) { // Explicitly copy destination and source pointers to avoid taking pointers to the // local variables, which may harm code generation by disabling some optimizations // in some compilers. UChar* destinationForHandlePartialSequence = destination16; const uint8_t* sourceForHandlePartialSequence = source; handlePartialSequence(destinationForHandlePartialSequence, sourceForHandlePartialSequence, end, flush, stopOnError, sawError); destination16 = destinationForHandlePartialSequence; source = sourceForHandlePartialSequence; if (m_partialSequenceSize) break; } while (source < end) { if (isASCII(*source)) { // Fast path for ASCII. Most UTF-8 text will be ASCII. if (isAlignedToMachineWord(source)) { while (source < alignedEnd) { MachineWord chunk = *reinterpret_cast_ptr<const MachineWord*>(source); if (!isAllASCII<LChar>(chunk)) break; copyASCIIMachineWord(destination16, source); source += sizeof(MachineWord); destination16 += sizeof(MachineWord); } if (source == end) break; if (!isASCII(*source)) continue; } *destination16++ = *source++; continue; } int count = nonASCIISequenceLength(*source); int character; if (!count) character = nonCharacter; else { if (count > end - source) { ASSERT_WITH_SECURITY_IMPLICATION(end - source < static_cast<ptrdiff_t>(sizeof(m_partialSequence))); ASSERT(!m_partialSequenceSize); m_partialSequenceSize = end - source; memcpy(m_partialSequence, source, m_partialSequenceSize); source = end; break; } character = decodeNonASCIISequence(source, count); } if (character == nonCharacter) { sawError = true; if (stopOnError) break; // Each error generates a replacement character and consumes one byte. *destination16++ = replacementCharacter; ++source; continue; } source += count; destination16 = appendCharacter(destination16, character); } } while (flush && m_partialSequenceSize); buffer16.shrink(destination16 - buffer16.characters()); return String::adopt(buffer16); }
bool SVGResources::buildCachedResources(const RenderElement& renderer, const RenderStyle& style) { ASSERT(renderer.element()); ASSERT_WITH_SECURITY_IMPLICATION(renderer.element()->isSVGElement()); if (!renderer.element()) return false; auto& element = downcast<SVGElement>(*renderer.element()); Document& document = element.document(); SVGDocumentExtensions& extensions = document.accessSVGExtensions(); const AtomicString& tagName = element.localName(); if (tagName.isNull()) return false; const SVGRenderStyle& svgStyle = style.svgStyle(); bool foundResources = false; if (clipperFilterMaskerTags().contains(tagName)) { if (svgStyle.hasClipper()) { AtomicString id(svgStyle.clipperResource()); if (setClipper(getRenderSVGResourceById<RenderSVGResourceClipper>(document, id))) foundResources = true; else registerPendingResource(extensions, id, element); } if (style.hasFilter()) { const FilterOperations& filterOperations = style.filter(); if (filterOperations.size() == 1) { const FilterOperation& filterOperation = *filterOperations.at(0); if (filterOperation.type() == FilterOperation::REFERENCE) { const auto& referenceFilterOperation = downcast<ReferenceFilterOperation>(filterOperation); AtomicString id = SVGURIReference::fragmentIdentifierFromIRIString(referenceFilterOperation.url(), element.document()); if (setFilter(getRenderSVGResourceById<RenderSVGResourceFilter>(document, id))) foundResources = true; else registerPendingResource(extensions, id, element); } } } if (svgStyle.hasMasker()) { AtomicString id(svgStyle.maskerResource()); if (setMasker(getRenderSVGResourceById<RenderSVGResourceMasker>(document, id))) foundResources = true; else registerPendingResource(extensions, id, element); } } if (markerTags().contains(tagName) && svgStyle.hasMarkers()) { AtomicString markerStartId(svgStyle.markerStartResource()); if (setMarkerStart(getRenderSVGResourceById<RenderSVGResourceMarker>(document, markerStartId))) foundResources = true; else registerPendingResource(extensions, markerStartId, element); AtomicString markerMidId(svgStyle.markerMidResource()); if (setMarkerMid(getRenderSVGResourceById<RenderSVGResourceMarker>(document, markerMidId))) foundResources = true; else registerPendingResource(extensions, markerMidId, element); AtomicString markerEndId(svgStyle.markerEndResource()); if (setMarkerEnd(getRenderSVGResourceById<RenderSVGResourceMarker>(document, markerEndId))) foundResources = true; else registerPendingResource(extensions, markerEndId, element); } if (fillAndStrokeTags().contains(tagName)) { if (svgStyle.hasFill()) { bool hasPendingResource = false; AtomicString id; if (setFill(paintingResourceFromSVGPaint(document, svgStyle.fillPaintType(), svgStyle.fillPaintUri(), id, hasPendingResource))) foundResources = true; else if (hasPendingResource) registerPendingResource(extensions, id, element); } if (svgStyle.hasStroke()) { bool hasPendingResource = false; AtomicString id; if (setStroke(paintingResourceFromSVGPaint(document, svgStyle.strokePaintType(), svgStyle.strokePaintUri(), id, hasPendingResource))) foundResources = true; else if (hasPendingResource) registerPendingResource(extensions, id, element); } } if (chainableResourceTags().contains(tagName)) { AtomicString id(targetReferenceFromResource(element)); if (setLinkedResource(getRenderSVGResourceContainerById(document, id))) foundResources = true; else registerPendingResource(extensions, id, element); } return foundResources; }
ImageData::ImageData(const IntSize& size, PassRefPtr<DOMUint8ClampedArray> byteArray) : m_size(size) , m_data(byteArray) { ASSERT_WITH_SECURITY_IMPLICATION(static_cast<unsigned>(size.width() * size.height() * 4) <= m_data->length()); }
IDBCursorWithValue* IDBAny::idbCursorWithValue() const { ASSERT(m_type == IDBCursorWithValueType); ASSERT_WITH_SECURITY_IMPLICATION(m_idbCursor->isCursorWithValue()); return toIDBCursorWithValue(m_idbCursor.get()); }
void SVGAnimationElement::currentValuesForValuesAnimation(float percent, float& effectivePercent, String& from, String& to) { unsigned valuesCount = m_values.size(); ASSERT(m_animationValid); ASSERT(valuesCount >= 1); if (percent == 1 || valuesCount == 1) { from = m_values[valuesCount - 1]; to = m_values[valuesCount - 1]; effectivePercent = 1; return; } CalcMode calcMode = this->calcMode(); if (hasTagName(SVGNames::animateTag) || hasTagName(SVGNames::animateColorTag)) { AnimatedPropertyType attributeType = toSVGAnimateElement(this)->determineAnimatedPropertyType(targetElement()); // Fall back to discrete animations for Strings. if (attributeType == AnimatedBoolean || attributeType == AnimatedEnumeration || attributeType == AnimatedPreserveAspectRatio || attributeType == AnimatedString) calcMode = CalcModeDiscrete; } if (!m_keyPoints.isEmpty() && calcMode != CalcModePaced) return currentValuesFromKeyPoints(percent, effectivePercent, from, to); unsigned keyTimesCount = m_keyTimes.size(); ASSERT(!keyTimesCount || valuesCount == keyTimesCount); ASSERT(!keyTimesCount || (keyTimesCount > 1 && !m_keyTimes[0])); unsigned index = calculateKeyTimesIndex(percent); if (calcMode == CalcModeDiscrete) { if (!keyTimesCount) index = static_cast<unsigned>(percent * valuesCount); from = m_values[index]; to = m_values[index]; effectivePercent = 0; return; } float fromPercent; float toPercent; if (keyTimesCount) { fromPercent = m_keyTimes[index]; toPercent = m_keyTimes[index + 1]; } else { index = static_cast<unsigned>(floorf(percent * (valuesCount - 1))); fromPercent = static_cast<float>(index) / (valuesCount - 1); toPercent = static_cast<float>(index + 1) / (valuesCount - 1); } if (index == valuesCount - 1) --index; from = m_values[index]; to = m_values[index + 1]; ASSERT_WITH_SECURITY_IMPLICATION(toPercent > fromPercent); effectivePercent = (percent - fromPercent) / (toPercent - fromPercent); if (calcMode == CalcModeSpline) { ASSERT(m_keySplines.size() == m_values.size() - 1); effectivePercent = calculatePercentForSpline(effectivePercent, index); } }
DataCue* toDataCue(TextTrackCue* cue) { ASSERT_WITH_SECURITY_IMPLICATION(cue->cueType() == TextTrackCue::Data); return static_cast<DataCue*>(cue); }
WindowFeatures::WindowFeatures(const String& features) : xSet(false) , ySet(false) , widthSet(false) , heightSet(false) , fullscreen(false) , dialog(false) { /* The IE rule is: all features except for channelmode and fullscreen default to YES, but if the user specifies a feature string, all features default to NO. (There is no public standard that applies to this method.) <http://msdn.microsoft.com/workshop/author/dhtml/reference/methods/open_0.asp> We always allow a window to be resized, which is consistent with Firefox. */ if (features.length() == 0) { menuBarVisible = true; statusBarVisible = true; toolBarVisible = true; locationBarVisible = true; scrollbarsVisible = true; resizable = true; return; } menuBarVisible = false; statusBarVisible = false; toolBarVisible = false; locationBarVisible = false; scrollbarsVisible = false; resizable = true; // Tread lightly in this code -- it was specifically designed to mimic Win IE's parsing behavior. int keyBegin, keyEnd; int valueBegin, valueEnd; int i = 0; int length = features.length(); String buffer = features.lower(); while (i < length) { // skip to first non-separator, but don't skip past the end of the string while (i < length && isWindowFeaturesSeparator(buffer[i])) i++; keyBegin = i; // skip to first separator while (i < length && !isWindowFeaturesSeparator(buffer[i])) i++; keyEnd = i; ASSERT_WITH_SECURITY_IMPLICATION(i <= length); // skip to first '=', but don't skip past a ',' or the end of the string while (i < length && buffer[i] != '=') { if (buffer[i] == ',') break; i++; } ASSERT_WITH_SECURITY_IMPLICATION(i <= length); // skip to first non-separator, but don't skip past a ',' or the end of the string while (i < length && isWindowFeaturesSeparator(buffer[i])) { if (buffer[i] == ',') break; i++; } valueBegin = i; ASSERT_WITH_SECURITY_IMPLICATION(i <= length); // skip to first separator while (i < length && !isWindowFeaturesSeparator(buffer[i])) i++; valueEnd = i; ASSERT_WITH_SECURITY_IMPLICATION(i <= length); String keyString(buffer.substring(keyBegin, keyEnd - keyBegin)); String valueString(buffer.substring(valueBegin, valueEnd - valueBegin)); setWindowFeature(keyString, valueString); } }
void IDBDatabase::indexCreated(int64_t objectStoreId, const IDBIndexMetadata& metadata) { IDBDatabaseMetadata::ObjectStoreMap::iterator it = m_metadata.objectStores.find(objectStoreId); ASSERT_WITH_SECURITY_IMPLICATION(it != m_metadata.objectStores.end()); it->value.indexes.set(metadata.id, metadata); }
void WEBPImageDecoder::applyPostProcessing(size_t frameIndex) { ImageFrame& buffer = m_frameBufferCache[frameIndex]; int width; int decodedHeight; if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 if (decodedHeight <= 0) return; const IntRect& frameRect = buffer.originalFrameRect(); ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width()); ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height()); const int left = frameRect.x(); const int top = frameRect.y(); #if USE(QCMSLIB) if (qcms_transform* transform = colorTransform()) { for (int y = m_decodedHeight; y < decodedHeight; ++y) { const int canvasY = top + y; uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canvasY)); qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX); uint8_t* pixel = row; for (int x = 0; x < width; ++x, pixel += 4) { const int canvasX = left + x; buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], pixel[3]); } } } #endif // USE(QCMSLIB) // During the decoding of current frame, we may have set some pixels to be transparent (i.e. alpha < 255). // However, the value of each of these pixels should have been determined by blending it against the value // of that pixel in the previous frame if alpha blend source was 'BlendAtopPreviousFrame'. So, we correct these // pixels based on disposal method of the previous frame and the previous frame buffer. // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame // to do the alpha-blending by itself. if ((m_formatFlags & ANIMATION_FLAG) && frameIndex && buffer.alphaBlendSource() == ImageFrame::BlendAtopPreviousFrame && buffer.requiredPreviousFrameIndex() != kNotFound) { ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1]; ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMethod(); if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Blend transparent pixels with pixels in previous canvas. for (int y = m_decodedHeight; y < decodedHeight; ++y) { m_blendFunction(buffer, prevBuffer, top + y, left, width); } } else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) { const IntRect& prevRect = prevBuffer.originalFrameRect(); // We need to blend a transparent pixel with its value just after initFrame() call. That is: // * Blend with fully transparent pixel if it belongs to prevRect <-- This is a no-op. // * Blend with the pixel in the previous canvas otherwise <-- Needs alpha-blending. for (int y = m_decodedHeight; y < decodedHeight; ++y) { int canvasY = top + y; int left1, width1, left2, width2; findBlendRangeAtRow(frameRect, prevRect, canvasY, left1, width1, left2, width2); if (width1 > 0) m_blendFunction(buffer, prevBuffer, canvasY, left1, width1); if (width2 > 0) m_blendFunction(buffer, prevBuffer, canvasY, left2, width2); } } } m_decodedHeight = decodedHeight; buffer.setPixelsChanged(true); }
bool DataTransfer::hasStringOfType(const String& type) { ASSERT_WITH_SECURITY_IMPLICATION(canReadTypes()); return !type.isNull() && types().contains(type); }
Entry& entryFor(ContextFeatures::FeatureType type) { size_t index = static_cast<size_t>(type); ASSERT_WITH_SECURITY_IMPLICATION(index < ContextFeatures::FeatureTypeSize); return m_entries[index]; }
PassRefPtr<InspectorValue> InspectorArrayBase::get(size_t index) { ASSERT_WITH_SECURITY_IMPLICATION(index < m_data.size()); return m_data[index]; }
void ScriptExecutionContext::willDestroyActiveDOMObject(ActiveDOMObject& activeDOMObject) { ASSERT_WITH_SECURITY_IMPLICATION(!m_activeDOMObjectRemovalForbidden); m_activeDOMObjects.remove(&activeDOMObject); }
Document* XMLHttpRequest::document() const { ASSERT_WITH_SECURITY_IMPLICATION(scriptExecutionContext()->isDocument()); return static_cast<Document*>(scriptExecutionContext()); }
IDBCursor* IDBAny::idbCursor() const { ASSERT(m_type == IDBCursorType); ASSERT_WITH_SECURITY_IMPLICATION(m_idbCursor->isKeyCursor()); return m_idbCursor.get(); }
void RTCStatsResponse::addStatistic(size_t report, String name, String value) { ASSERT_WITH_SECURITY_IMPLICATION(report < m_result.size()); m_result[report]->addStatistic(name, value); }
void IDBDatabase::indexDeleted(int64_t objectStoreId, int64_t indexId) { IDBDatabaseMetadata::ObjectStoreMap::iterator it = m_metadata.objectStores.find(objectStoreId); ASSERT_WITH_SECURITY_IMPLICATION(it != m_metadata.objectStores.end()); it->value.indexes.remove(indexId); }
FilterEffect* FilterEffect::inputEffect(unsigned number) const { ASSERT_WITH_SECURITY_IMPLICATION(number < m_inputEffects.size()); return m_inputEffects.at(number).get(); }
static CSSPropertyInfo cssPropertyIDForJSCSSPropertyName(PropertyName propertyName) { CSSPropertyInfo propertyInfo = {CSSPropertyInvalid, false}; bool hadPixelOrPosPrefix = false; StringImpl* propertyNameString = propertyName.publicName(); if (!propertyNameString) return propertyInfo; unsigned length = propertyNameString->length(); if (!length) return propertyInfo; String stringForCache = String(propertyNameString); typedef HashMap<String, CSSPropertyInfo> CSSPropertyInfoMap; static NeverDestroyed<CSSPropertyInfoMap> propertyInfoCache; propertyInfo = propertyInfoCache.get().get(stringForCache); if (propertyInfo.propertyID) return propertyInfo; const size_t bufferSize = maxCSSPropertyNameLength + 1; char buffer[bufferSize]; char* bufferPtr = buffer; const char* name = bufferPtr; unsigned i = 0; // Prefixes CSS, Pixel, Pos are ignored. // Prefixes Apple, KHTML and Webkit are transposed to "-webkit-". // The prefix "Epub" becomes "-epub-". switch (getCSSPropertyNamePrefix(*propertyNameString)) { case PropertyNamePrefixNone: if (isASCIIUpper((*propertyNameString)[0])) return propertyInfo; break; case PropertyNamePrefixCSS: i += 3; break; case PropertyNamePrefixPixel: i += 5; hadPixelOrPosPrefix = true; break; case PropertyNamePrefixPos: i += 3; hadPixelOrPosPrefix = true; break; #if ENABLE(LEGACY_CSS_VENDOR_PREFIXES) case PropertyNamePrefixApple: case PropertyNamePrefixKHTML: ASSERT(RuntimeEnabledFeatures::sharedFeatures().legacyCSSVendorPrefixesEnabled()); writeWebKitPrefix(bufferPtr); i += 5; break; #endif case PropertyNamePrefixEpub: writeEpubPrefix(bufferPtr); i += 4; break; case PropertyNamePrefixWebKit: writeWebKitPrefix(bufferPtr); i += 6; break; } *bufferPtr++ = toASCIILower((*propertyNameString)[i++]); char* bufferEnd = buffer + bufferSize; char* stringEnd = bufferEnd - 1; size_t bufferSizeLeft = stringEnd - bufferPtr; size_t propertySizeLeft = length - i; if (propertySizeLeft > bufferSizeLeft) return propertyInfo; for (; i < length; ++i) { UChar c = (*propertyNameString)[i]; if (!c || c >= 0x7F) return propertyInfo; // illegal character if (isASCIIUpper(c)) { size_t bufferSizeLeft = stringEnd - bufferPtr; size_t propertySizeLeft = length - i + 1; if (propertySizeLeft > bufferSizeLeft) return propertyInfo; *bufferPtr++ = '-'; *bufferPtr++ = toASCIILower(c); } else *bufferPtr++ = c; ASSERT_WITH_SECURITY_IMPLICATION(bufferPtr < bufferEnd); } ASSERT_WITH_SECURITY_IMPLICATION(bufferPtr < bufferEnd); *bufferPtr = '\0'; unsigned outputLength = bufferPtr - buffer; #if PLATFORM(IOS) cssPropertyNameIOSAliasing(buffer, name, outputLength); #endif const Property* hashTableEntry = findProperty(name, outputLength); int propertyID = hashTableEntry ? hashTableEntry->id : 0; if (propertyID) { propertyInfo.hadPixelOrPosPrefix = hadPixelOrPosPrefix; propertyInfo.propertyID = static_cast<CSSPropertyID>(propertyID); propertyInfoCache.get().add(stringForCache, propertyInfo); } return propertyInfo; }