AADistanceFieldPathBatch(GrColor color, const GrShape& shape, bool antiAlias, const SkMatrix& viewMatrix, GrBatchAtlas* atlas, ShapeCache* shapeCache, ShapeDataList* shapeList, bool gammaCorrect) : INHERITED(ClassID()) { SkASSERT(shape.hasUnstyledKey()); fBatch.fViewMatrix = viewMatrix; fGeoData.emplace_back(Geometry{color, shape, antiAlias}); fAtlas = atlas; fShapeCache = shapeCache; fShapeList = shapeList; fGammaCorrect = gammaCorrect; // Compute bounds this->setTransformedBounds(shape.bounds(), viewMatrix, HasAABloat::kYes, IsZeroArea::kNo); }
GrCCPathCache::OnFlushEntryRef GrCCPathCache::find( GrOnFlushResourceProvider* onFlushRP, const GrShape& shape, const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix, SkIVector* maskShift) { if (!shape.hasUnstyledKey()) { return OnFlushEntryRef(); } WriteKeyHelper writeKeyHelper(shape); if (writeKeyHelper.allocCountU32() > kMaxKeyDataCountU32) { return OnFlushEntryRef(); } SkASSERT(fScratchKey->unique()); fScratchKey->resetDataCountU32(writeKeyHelper.allocCountU32()); writeKeyHelper.write(shape, fScratchKey->data()); MaskTransform m(viewMatrix, maskShift); GrCCPathCacheEntry* entry = nullptr; if (HashNode* node = fHashTable.find(*fScratchKey)) { entry = node->entry(); SkASSERT(fLRU.isInList(entry)); if (!fuzzy_equals(m, entry->fMaskTransform)) { // The path was reused with an incompatible matrix. if (entry->unique()) { // This entry is unique: recycle it instead of deleting and malloc-ing a new one. SkASSERT(0 == entry->fOnFlushRefCnt); // Because we are unique. entry->fMaskTransform = m; entry->fHitCount = 0; entry->fHitRect = SkIRect::MakeEmpty(); entry->releaseCachedAtlas(this); } else { this->evict(*fScratchKey); entry = nullptr; } } } if (!entry) { if (fHashTable.count() >= kMaxCacheCount) { SkDEBUGCODE(HashNode* node = fHashTable.find(*fLRU.tail()->fCacheKey)); SkASSERT(node && node->entry() == fLRU.tail()); this->evict(*fLRU.tail()->fCacheKey); // We've exceeded our limit. } // Create a new entry in the cache. sk_sp<Key> permanentKey = Key::Make(fInvalidatedKeysInbox.uniqueID(), writeKeyHelper.allocCountU32(), fScratchKey->data()); SkASSERT(*permanentKey == *fScratchKey); SkASSERT(!fHashTable.find(*permanentKey)); entry = fHashTable.set(HashNode(this, std::move(permanentKey), m, shape))->entry(); SkASSERT(fHashTable.count() <= kMaxCacheCount); } else { fLRU.remove(entry); // Will be re-added at head. } SkDEBUGCODE(HashNode* node = fHashTable.find(*fScratchKey)); SkASSERT(node && node->entry() == entry); fLRU.addToHead(entry); if (0 == entry->fOnFlushRefCnt) { // Only update the time stamp and hit count if we haven't seen this entry yet during the // current flush. entry->fTimestamp = this->quickPerFlushTimestamp(); ++entry->fHitCount; if (entry->fCachedAtlas) { SkASSERT(SkToBool(entry->fCachedAtlas->peekOnFlushRefCnt()) == SkToBool(entry->fCachedAtlas->getOnFlushProxy())); if (!entry->fCachedAtlas->getOnFlushProxy()) { if (sk_sp<GrTextureProxy> onFlushProxy = onFlushRP->findOrCreateProxyByUniqueKey( entry->fCachedAtlas->textureKey(), GrCCAtlas::kTextureOrigin)) { onFlushProxy->priv().setIgnoredByResourceAllocator(); entry->fCachedAtlas->setOnFlushProxy(std::move(onFlushProxy)); } } if (!entry->fCachedAtlas->getOnFlushProxy()) { // Our atlas's backing texture got purged from the GrResourceCache. Release the // cached atlas. entry->releaseCachedAtlas(this); } } } entry->fHitRect.join(clippedDrawBounds.makeOffset(-maskShift->x(), -maskShift->y())); SkASSERT(!entry->fCachedAtlas || entry->fCachedAtlas->getOnFlushProxy()); return OnFlushEntryRef::OnFlushRef(entry); }