void ImageDecodingStore::removeFromCacheInternal(const CacheEntry* cacheEntry, Vector<OwnPtr<CacheEntry> >* deletionList) { if (cacheEntry->type() == CacheEntry::TypeImage) { removeFromCacheInternal(static_cast<const ImageCacheEntry*>(cacheEntry), &m_imageCacheMap, &m_imageCacheKeyMap, deletionList); } else if (cacheEntry->type() == CacheEntry::TypeDecoder) { removeFromCacheInternal(static_cast<const DecoderCacheEntry*>(cacheEntry), &m_decoderCacheMap, &m_decoderCacheKeyMap, deletionList); } else { ASSERT(false); } }
void ImageDecodingStore::removeCacheIndexedByGenerator(const ImageFrameGenerator* generator) { Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete; { MutexLocker lock(m_mutex); CachedSizeMap::iterator iter = m_cachedSizeMap.find(generator); if (iter == m_cachedSizeMap.end()) return; // Get all cached sizes indexed by generator. Vector<SkISize> cachedSizeList; copyToVector(iter->value, cachedSizeList); // For each cached size find the corresponding CacheEntry and remove it from // m_cacheMap. for (size_t i = 0; i < cachedSizeList.size(); ++i) { CacheIdentifier key = std::make_pair(generator, cachedSizeList[i]); ASSERT(m_cacheMap.contains(key)); const CacheEntry* cacheEntry = m_cacheMap.get(key); ASSERT(!cacheEntry->useCount()); removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); } // Remove from cache list as well. removeFromCacheListInternal(cacheEntriesToDelete); } }
void ImageDecodingStore::prune() { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("blink.image_decoding"), "ImageDecodingStore::prune"); Vector<std::unique_ptr<CacheEntry>> cacheEntriesToDelete; { MutexLocker lock(m_mutex); // Head of the list is the least recently used entry. const CacheEntry* cacheEntry = m_orderedCacheList.head(); // Walk the list of cache entries starting from the least recently used // and then keep them for deletion later. while (cacheEntry) { const bool isPruneNeeded = m_heapMemoryUsageInBytes > m_heapLimitInBytes || !m_heapLimitInBytes; if (!isPruneNeeded) break; // Cache is not used; Remove it. if (!cacheEntry->useCount()) removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); cacheEntry = cacheEntry->next(); } // Remove from cache list as well. removeFromCacheListInternal(cacheEntriesToDelete); } }
void ImageDecodingStore::prune() { TRACE_EVENT0("webkit", "ImageDecodingStore::prune"); Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete; { MutexLocker lock(m_mutex); // Head of the list is the least recently used entry. const CacheEntry* cacheEntry = m_orderedCacheList.head(); // Walk the list of cache entries starting from the least recently used // and then keep them for deletion later. while (cacheEntry) { const bool isPruneNeeded = m_heapMemoryUsageInBytes > m_heapLimitInBytes || !m_heapLimitInBytes || m_discardableMemoryUsageInBytes > maxTotalSizeOfDiscardableEntries; if (!isPruneNeeded) break; // Cache is not used; Remove it. if (!cacheEntry->useCount()) removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); cacheEntry = cacheEntry->next(); } // Remove from cache list as well. removeFromCacheListInternal(cacheEntriesToDelete); } }
void ImageDecodingStore::unlockCache(const ImageFrameGenerator* generator, const ScaledImageFragment* cachedImage) { Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete; { MutexLocker lock(m_mutex); cachedImage->bitmap().unlockPixels(); ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, cachedImage->scaledSize(), cachedImage->index(), cachedImage->generation())); ASSERT_WITH_SECURITY_IMPLICATION(iter != m_imageCacheMap.end()); CacheEntry* cacheEntry = iter->value.get(); cacheEntry->decrementUseCount(); // Put the entry to the end of list. m_orderedCacheList.remove(cacheEntry); m_orderedCacheList.append(cacheEntry); // FIXME: This code is temporary such that in the new Skia // discardable memory path we do not cache images. // Once the transition is complete the logic to handle // image caching should be removed entirely. if (!s_imageCachingEnabled && !cacheEntry->useCount()) { removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); removeFromCacheListInternal(cacheEntriesToDelete); } } }
void ImageDecodingStore::removeCacheIndexedByGeneratorInternal(U* cacheMap, V* identifierMap, const ImageFrameGenerator* generator, Vector<OwnPtr<CacheEntry> >* deletionList) { typename V::iterator iter = identifierMap->find(generator); if (iter == identifierMap->end()) return; // Get all cache identifiers associated with generator. Vector<typename U::KeyType> cacheIdentifierList; copyToVector(iter->value, cacheIdentifierList); // For each cache identifier find the corresponding CacheEntry and remove it. for (size_t i = 0; i < cacheIdentifierList.size(); ++i) { ASSERT(cacheMap->contains(cacheIdentifierList[i])); const typename U::MappedType::PtrType cacheEntry = cacheMap->get(cacheIdentifierList[i]); ASSERT(!cacheEntry->useCount()); removeFromCacheInternal(cacheEntry, cacheMap, identifierMap, deletionList); } }
bool ImageDecodingStore::lockCacheEntryInternal(ImageCacheEntry* cacheEntry, const ScaledImageFragment** cachedImage, Vector<OwnPtr<CacheEntry> >* deletionList) { ScaledImageFragment* image = cacheEntry->cachedImage(); image->bitmap().lockPixels(); // Memory for this image entry might be discarded already. // In this case remove the entry. if (!image->bitmap().getPixels()) { image->bitmap().unlockPixels(); removeFromCacheInternal(cacheEntry, &m_imageCacheMap, &m_imageCacheKeyMap, deletionList); removeFromCacheListInternal(*deletionList); return false; } cacheEntry->incrementUseCount(); *cachedImage = image; return true; }
void ImageDecodingStore::removeDecoder(const ImageFrameGenerator* generator, const ImageDecoder* decoder) { Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete; { MutexLocker lock(m_mutex); DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, decoder)); ASSERT_WITH_SECURITY_IMPLICATION(iter != m_decoderCacheMap.end()); CacheEntry* cacheEntry = iter->value.get(); ASSERT(cacheEntry->useCount()); cacheEntry->decrementUseCount(); // Delete only one decoder cache entry. Ownership of the cache entry // is transfered to cacheEntriesToDelete such that object can be deleted // outside of the lock. removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); // Remove from LRU list. removeFromCacheListInternal(cacheEntriesToDelete); } }
bool ImageDecodingStore::lockCache(const ImageFrameGenerator* generator, const SkISize& scaledSize, CacheCondition condition, const ScaledImageFragment** cachedImage, ImageDecoder** decoder) { ASSERT(cachedImage); CacheEntry* cacheEntry = 0; Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete; { MutexLocker lock(m_mutex); CacheMap::iterator iter = m_cacheMap.find(std::make_pair(generator, scaledSize)); if (iter == m_cacheMap.end()) return false; cacheEntry = iter->value.get(); ScaledImageFragment* image = cacheEntry->cachedImage(); if (condition == CacheMustBeComplete && !image->isComplete()) return false; // Incomplete cache entry cannot be used more than once. ASSERT(image->isComplete() || !cacheEntry->useCount()); image->bitmap().lockPixels(); if (image->bitmap().getPixels()) { // Increment use count such that it doesn't get evicted. cacheEntry->incrementUseCount(); // Complete cache entry doesn't have a decoder. ASSERT(!image->isComplete() || !cacheEntry->cachedDecoder()); if (decoder) *decoder = cacheEntry->cachedDecoder(); *cachedImage = image; } else { image->bitmap().unlockPixels(); removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete); removeFromCacheListInternal(cacheEntriesToDelete); return false; } } return true; }