void assert_empty(const HeapBitmap *hb) { assert(hb->bits != NULL); assert(hb->bitsLen >= HB_OFFSET_TO_INDEX(HEAP_SIZE)); assert(hb->base == (uintptr_t)HEAP_BASE); assert(hb->max < hb->base); assert(is_zeroed(hb)); assert(!dvmHeapBitmapMayContainObject(hb, HEAP_BASE)); assert(!dvmHeapBitmapMayContainObject(hb, HEAP_BASE + HB_OBJECT_ALIGNMENT)); assert(!dvmHeapBitmapMayContainObject(hb, HEAP_BASE + HEAP_SIZE - HB_OBJECT_ALIGNMENT)); assert(!dvmHeapBitmapMayContainObject(hb, HEAP_BASE + HEAP_SIZE)); assert(!dvmHeapBitmapIsObjectBitSet(hb, HEAP_BASE)); assert(!dvmHeapBitmapIsObjectBitSet(hb, HEAP_BASE + HB_OBJECT_ALIGNMENT)); assert(!dvmHeapBitmapIsObjectBitSet(hb, HEAP_BASE + HEAP_SIZE - HB_OBJECT_ALIGNMENT)); }
/* * Return true iff <obj> is within the range of pointers that this * bitmap could potentially cover, even if a bit has not been set * for it. */ bool dvmHeapBitmapCoversAddress(const HeapBitmap *hb, const void *obj) { assert(hb != NULL); if (obj != NULL) { const uintptr_t offset = (uintptr_t)obj - hb->base; const size_t index = HB_OFFSET_TO_INDEX(offset); return index < hb->bitsLen / sizeof(*hb->bits); } return false; }
void test_init() { HeapBitmap hb; bool ok; memset(&hb, 0x55, sizeof(hb)); ok = dvmHeapBitmapInit(&hb, HEAP_BASE, HEAP_SIZE, "test"); assert(ok); assert(hb.bits != NULL); assert(hb.bitsLen >= HB_OFFSET_TO_INDEX(HEAP_SIZE)); assert(hb.base == (uintptr_t)HEAP_BASE); assert(hb.max < hb.base); /* Make sure hb.bits is mapped. */ *hb.bits = 0x55; assert(*hb.bits = 0x55); *hb.bits = 0; #define TEST_UNMAP 0 #if TEST_UNMAP /* Hold onto this to make sure it's unmapped later. */ unsigned long int *bits = hb.bits; #endif dvmHeapBitmapDelete(&hb); assert(hb.bits == NULL); assert(hb.bitsLen == 0); assert(hb.base == 0); assert(hb.max == 0); #if TEST_UNMAP /* This pointer shouldn't be mapped anymore. */ *bits = 0x55; assert(!"Should have segfaulted"); #endif }
/* * Initialize a HeapBitmap so that it points to a bitmap large * enough to cover a heap at <base> of <maxSize> bytes, where * objects are guaranteed to be HB_OBJECT_ALIGNMENT-aligned. */ bool dvmHeapBitmapInit(HeapBitmap *hb, const void *base, size_t maxSize, const char *name) { void *bits; size_t bitsLen; size_t allocLen; int fd; char nameBuf[ASHMEM_NAME_LEN] = HB_ASHMEM_NAME; assert(hb != NULL); bitsLen = HB_OFFSET_TO_INDEX(maxSize) * sizeof(*hb->bits); allocLen = ALIGN_UP_TO_PAGE_SIZE(bitsLen); // required by ashmem if (name != NULL) { snprintf(nameBuf, sizeof(nameBuf), HB_ASHMEM_NAME "/%s", name); } fd = ashmem_create_region(nameBuf, allocLen); if (fd < 0) { LOGE("Could not create %zu-byte ashmem region \"%s\" to cover " "%zu-byte heap (%d)\n", allocLen, nameBuf, maxSize, fd); return false; } bits = mmap(NULL, bitsLen, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0); close(fd); if (bits == MAP_FAILED) { LOGE("Could not mmap %d-byte ashmem region \"%s\"\n", bitsLen, nameBuf); return false; } memset(hb, 0, sizeof(*hb)); hb->bits = bits; hb->bitsLen = bitsLen; hb->base = (uintptr_t)base; hb->max = hb->base - 1; return true; }
/* * Visits set bits in address order. The callback is not permitted to * change the bitmap bits or max during the traversal. */ void dvmHeapBitmapWalk(const HeapBitmap *bitmap, BitmapCallback *callback, void *arg) { assert(bitmap != NULL); assert(bitmap->bits != NULL); assert(callback != NULL); uintptr_t end = HB_OFFSET_TO_INDEX(bitmap->max - bitmap->base); for (uintptr_t i = 0; i <= end; ++i) { unsigned long word = bitmap->bits[i]; if (UNLIKELY(word != 0)) { unsigned long highBit = 1 << (HB_BITS_PER_WORD - 1); uintptr_t ptrBase = HB_INDEX_TO_OFFSET(i) + bitmap->base; while (word != 0) { const int shift = CLZ(word); Object* obj = (Object *)(ptrBase + shift * HB_OBJECT_ALIGNMENT); (*callback)(obj, arg); word &= ~(highBit >> shift); } } }
/* * Initialize a HeapBitmap so that it points to a bitmap large * enough to cover a heap at <base> of <maxSize> bytes, where * objects are guaranteed to be HB_OBJECT_ALIGNMENT-aligned. */ bool dvmHeapBitmapInit(HeapBitmap *hb, const void *base, size_t maxSize, const char *name) { void *bits; size_t bitsLen; assert(hb != NULL); assert(name != NULL); bitsLen = HB_OFFSET_TO_INDEX(maxSize) * sizeof(*hb->bits); bits = dvmAllocRegion(bitsLen, PROT_READ | PROT_WRITE, name); if (bits == NULL) { ALOGE("Could not mmap %zd-byte ashmem region '%s'", bitsLen, name); return false; } hb->bits = (unsigned long *)bits; hb->bitsLen = hb->allocLen = bitsLen; hb->base = (uintptr_t)base; hb->max = hb->base - 1; return true; }
void dvmMarkImmuneObjects(const char *immuneLimit) { /* * Copy the contents of the live bit vector for immune object * range into the mark bit vector. */ /* The only values generated by dvmHeapSourceGetImmuneLimit() */ assert(immuneLimit == gHs->heaps[0].base || immuneLimit == NULL); assert(gHs->liveBits.base == gHs->markBits.base); assert(gHs->liveBits.bitsLen == gHs->markBits.bitsLen); /* heap[0] is never immune */ assert(gHs->heaps[0].base >= immuneLimit); assert(gHs->heaps[0].limit > immuneLimit); for (size_t i = 1; i < gHs->numHeaps; ++i) { if (gHs->heaps[i].base < immuneLimit) { assert(gHs->heaps[i].limit <= immuneLimit); /* Compute the number of words to copy in the bitmap. */ size_t index = HB_OFFSET_TO_INDEX( (uintptr_t)gHs->heaps[i].base - gHs->liveBits.base); /* Compute the starting offset in the live and mark bits. */ char *src = (char *)(gHs->liveBits.bits + index); char *dst = (char *)(gHs->markBits.bits + index); /* Compute the number of bytes of the live bitmap to copy. */ size_t length = HB_OFFSET_TO_BYTE_INDEX( gHs->heaps[i].limit - gHs->heaps[i].base); /* Do the copy. */ memcpy(dst, src, length); /* Make sure max points to the address of the highest set bit. */ if (gHs->markBits.max < (uintptr_t)gHs->heaps[i].limit) { gHs->markBits.max = (uintptr_t)gHs->heaps[i].limit; } } } }
/* * Walk through the bitmaps in increasing address order, and find the * object pointers that correspond to places where the bitmaps differ. * Call <callback> zero or more times with lists of these object pointers. * * The <finger> argument to the callback indicates the next-highest * address that hasn't been visited yet; setting bits for objects whose * addresses are less than <finger> are not guaranteed to be seen by * the current XorWalk. <finger> will be set to ULONG_MAX when the * end of the bitmap is reached. */ bool dvmHeapBitmapXorWalk(const HeapBitmap *hb1, const HeapBitmap *hb2, bool (*callback)(size_t numPtrs, void **ptrs, const void *finger, void *arg), void *callbackArg) { static const size_t kPointerBufSize = 128; void *pointerBuf[kPointerBufSize]; void **pb = pointerBuf; size_t index; size_t i; #define FLUSH_POINTERBUF(finger_) \ do { \ if (!callback(pb - pointerBuf, (void **)pointerBuf, \ (void *)(finger_), callbackArg)) \ { \ LOGW("dvmHeapBitmapXorWalk: callback failed\n"); \ return false; \ } \ pb = pointerBuf; \ } while (false) #define DECODE_BITS(hb_, bits_, update_index_) \ do { \ if (UNLIKELY(bits_ != 0)) { \ static const unsigned long kHighBit = \ (unsigned long)1 << (HB_BITS_PER_WORD - 1); \ const uintptr_t ptrBase = HB_INDEX_TO_OFFSET(i) + hb_->base; \ /*TODO: hold onto ptrBase so we can shrink max later if possible */ \ /*TODO: see if this is likely or unlikely */ \ while (bits_ != 0) { \ const int rshift = CLZ(bits_); \ bits_ &= ~(kHighBit >> rshift); \ *pb++ = (void *)(ptrBase + rshift * HB_OBJECT_ALIGNMENT); \ } \ /* Make sure that there are always enough slots available */ \ /* for an entire word of 1s. */ \ if (kPointerBufSize - (pb - pointerBuf) < HB_BITS_PER_WORD) { \ FLUSH_POINTERBUF(ptrBase + \ HB_BITS_PER_WORD * HB_OBJECT_ALIGNMENT); \ if (update_index_) { \ /* The callback may have caused hb_->max to grow. */ \ index = HB_OFFSET_TO_INDEX(hb_->max - hb_->base); \ } \ } \ } \ } while (false) assert(hb1 != NULL); assert(hb1->bits != NULL); assert(hb2 != NULL); assert(hb2->bits != NULL); assert(callback != NULL); if (hb1->base != hb2->base) { LOGW("dvmHeapBitmapXorWalk: bitmaps cover different heaps " "(0x%08x != 0x%08x)\n", (uintptr_t)hb1->base, (uintptr_t)hb2->base); return false; } if (hb1->bitsLen != hb2->bitsLen) { LOGW("dvmHeapBitmapXorWalk: size of bitmaps differ (%zd != %zd)\n", hb1->bitsLen, hb2->bitsLen); return false; } if (hb1->max < hb1->base && hb2->max < hb2->base) { /* Easy case; both are obviously empty. */ return true; } /* First, walk along the section of the bitmaps that may be the same. */ if (hb1->max >= hb1->base && hb2->max >= hb2->base) { unsigned long int *p1, *p2; uintptr_t offset; offset = ((hb1->max < hb2->max) ? hb1->max : hb2->max) - hb1->base; //TODO: keep track of which (and whether) one is longer for later index = HB_OFFSET_TO_INDEX(offset); p1 = hb1->bits; p2 = hb2->bits; for (i = 0; i <= index; i++) { //TODO: unroll this. pile up a few in locals? unsigned long int diff = *p1++ ^ *p2++; DECODE_BITS(hb1, diff, false); //BUG: if the callback was called, either max could have changed. } /* The next index to look at. */ index++; } else { /* One of the bitmaps is empty. */ index = 0; } /* If one bitmap's max is larger, walk through the rest of the * set bits. */ const HeapBitmap *longHb; unsigned long int *p; //TODO: may be the same size, in which case this is wasted work longHb = (hb1->max > hb2->max) ? hb1 : hb2; i = index; index = HB_OFFSET_TO_INDEX(longHb->max - longHb->base); p = longHb->bits + i; for (/* i = i */; i <= index; i++) { //TODO: unroll this unsigned long bits = *p++; DECODE_BITS(longHb, bits, true); } if (pb > pointerBuf) { /* Set the finger to the end of the heap (rather than longHb->max) * so that the callback doesn't expect to be called again * if it happens to change the current max. */ FLUSH_POINTERBUF(longHb->base + HB_MAX_OFFSET(longHb)); } return true; #undef FLUSH_POINTERBUF #undef DECODE_BITS }