bool SkBitmapProcState::lockBaseBitmap() { SkPixelRef* pr = fOrigBitmap.pixelRef(); if (pr->isLocked() || !pr->implementsDecodeInto()) { // fast-case, no need to look in our cache fScaledBitmap = fOrigBitmap; fScaledBitmap.lockPixels(); if (NULL == fScaledBitmap.getPixels()) { return false; } } else { if (!SkBitmapCache::Find(fOrigBitmap, 1, 1, &fScaledBitmap)) { if (!get_locked_pixels(fOrigBitmap, 0, &fScaledBitmap)) { return false; } // TODO: if fScaled comes back at a different width/height than fOrig, // we need to update the matrix we are using to sample from this guy. SkBitmapCache::Add(fOrigBitmap, 1, 1, fScaledBitmap); } } fBitmap = &fScaledBitmap; return true; }
bool SkBitmapProcState::lockBaseBitmap() { AutoScaledCacheUnlocker unlocker(&fScaledCacheID); SkPixelRef* pr = fOrigBitmap.pixelRef(); SkASSERT(NULL == fScaledCacheID); if (pr->isLocked() || !pr->implementsDecodeInto()) { // fast-case, no need to look in our cache fScaledBitmap = fOrigBitmap; fScaledBitmap.lockPixels(); if (NULL == fScaledBitmap.getPixels()) { return false; } } else { fScaledCacheID = SkScaledImageCache::FindAndLock(fOrigBitmap, SK_Scalar1, SK_Scalar1, &fScaledBitmap); if (fScaledCacheID) { fScaledBitmap.lockPixels(); if (!fScaledBitmap.getPixels()) { fScaledBitmap.unlockPixels(); // found a purged entry (discardablememory?), release it SkScaledImageCache::Unlock(fScaledCacheID); fScaledCacheID = NULL; // fall through to rebuild } } if (NULL == fScaledCacheID) { if (!get_locked_pixels(fOrigBitmap, 0, &fScaledBitmap)) { return false; } // TODO: if fScaled comes back at a different width/height than fOrig, // we need to update the matrix we are using to sample from this guy. fScaledCacheID = SkScaledImageCache::AddAndLock(fOrigBitmap, SK_Scalar1, SK_Scalar1, fScaledBitmap); if (!fScaledCacheID) { fScaledBitmap.reset(); return false; } } } fBitmap = &fScaledBitmap; unlocker.release(); return true; }