void MM_HeapVirtualMemory::tearDown(MM_EnvironmentBase* env) { MM_MemoryManager* memoryManager = env->getExtensions()->memoryManager; MM_HeapRegionManager* manager = getHeapRegionManager(); if (NULL != manager) { manager->destroyRegionTable(env); } memoryManager->destroyVirtualMemory(env, &_vmemHandle); MM_Heap::tearDown(env); }
void MM_SweepSchemeSegregated::incrementalCoalesceFreeRegions(MM_EnvironmentBase *env) { resetCoalesceFreeRegionCount(env); MM_GCExtensionsBase *ext = env->getExtensions(); MM_HeapRegionManager *regionManager = ext->heap->getHeapRegionManager(); uintptr_t regionCount = regionManager->getTableRegionCount(); MM_RegionPoolSegregated *regionPool = _memoryPool->getRegionPool(); MM_FreeHeapRegionList *coalesceFreeList = regionPool->getCoalesceFreeList(); uintptr_t yieldSlackTime = resetCoalesceFreeRegionCount(env); yieldFromSweep(env, yieldSlackTime); coalesceFreeList->push(regionPool->getSingleFreeList()); coalesceFreeList->push(regionPool->getMultiFreeList()); MM_HeapRegionDescriptorSegregated *coalescing = NULL; MM_HeapRegionDescriptorSegregated *currentRegion = NULL; for (uintptr_t i=0; i< regionCount; ) { currentRegion = (MM_HeapRegionDescriptorSegregated *)regionManager->mapRegionTableIndexToDescriptor(i); uintptr_t range = currentRegion->getRange(); i += range; bool shouldYield = updateCoalesceFreeRegionCount(range); bool shouldClose = shouldYield || (i >= regionCount); if (currentRegion->isFree()) { coalesceFreeList->detach(currentRegion); bool joined = (range < MAX_REGION_COALESCE) && (coalescing != NULL && coalescing->joinFreeRangeInit(currentRegion)); if (joined) { currentRegion = NULL; } else { shouldClose = true; } } else { currentRegion = NULL; } if (shouldClose && coalescing != NULL) { coalescing->joinFreeRangeComplete(); regionPool->addFreeRegion(env, coalescing, true); coalescing = NULL; } if (shouldYield) { if (currentRegion != NULL) { regionPool->addFreeRegion(env, currentRegion, true); currentRegion = NULL; } yieldFromSweep(env, yieldSlackTime); } else { if (coalescing == NULL) { coalescing = currentRegion; } } } if (currentRegion != NULL) { regionPool->addFreeRegion(env, currentRegion, true); currentRegion = NULL; } yieldFromSweep(env); }
MMINLINE uintptr_t divideUpRegion(uintptr_t size) { /* TODO: use shift operation once region log2 size is introduced */ return (size + _heapRegionManager->getRegionSize() - 1) / _heapRegionManager->getRegionSize(); }
MMINLINE uintptr_t roundDownRegion(uintptr_t size) { return (size) & (~(_heapRegionManager->getRegionSize() - 1)); }
bool MM_HeapVirtualMemory::initialize(MM_EnvironmentBase* env, uintptr_t size) { /* call the superclass to inialize before we do any work */ if (!MM_Heap::initialize(env)) { return false; } MM_GCExtensionsBase* extensions = env->getExtensions(); uintptr_t padding = extensions->heapTailPadding; uintptr_t effectiveHeapAlignment = _heapAlignment; /* we need to ensure that we allocate the heap with region alignment since the region table requires that */ MM_HeapRegionManager* manager = getHeapRegionManager(); effectiveHeapAlignment = MM_Math::roundToCeiling(manager->getRegionSize(), effectiveHeapAlignment); MM_MemoryManager* memoryManager = extensions->memoryManager; bool created = false; bool forcedOverflowProtection = false; /* Under -Xaggressive ensure a full page of padding -- see JAZZ103 45254 */ if (extensions->padToPageSize) { #if (defined(AIXPPC) && !defined(PPC64)) /* * An attempt to allocate heap with top at 0xffffffff * In this case extra padding is not required because of overflow protection padding can be used instead */ uintptr_t effectiveSize = MM_Math::roundToCeiling(manager->getRegionSize(), size); void *preferredHeapBase = (void *)((uintptr_t)0 - effectiveSize); created = memoryManager->createVirtualMemoryForHeap(env, &_vmemHandle, effectiveHeapAlignment, size, padding, preferredHeapBase, (void *)(extensions->heapCeiling)); if (created) { /* overflow protection must be there to play role of padding even top is not so close to the end of the memory */ forcedOverflowProtection = true; } else #endif /* (defined(AIXPPC) && !defined(PPC64)) */ { /* Ignore extra full page padding if page size is too large (hard coded here for 1G or larger) */ #define ONE_GB ((uintptr_t)1 * 1024 * 1024 * 1024) if (extensions->requestedPageSize < ONE_GB) { if (padding < extensions->requestedPageSize) { padding = extensions->requestedPageSize; } } } } if (!created && !memoryManager->createVirtualMemoryForHeap(env, &_vmemHandle, effectiveHeapAlignment, size, padding, (void*)(extensions->preferredHeapBase), (void*)(extensions->heapCeiling))) { return false; } /* Check we haven't overflowed the address range */ if (forcedOverflowProtection || (HIGH_ADDRESS - ((uintptr_t)memoryManager->getHeapTop(&_vmemHandle)) < (OVERFLOW_ROUNDING)) || extensions->fvtest_alwaysApplyOverflowRounding) { /* Address range overflow */ memoryManager->roundDownTop(&_vmemHandle, OVERFLOW_ROUNDING); } extensions->overflowSafeAllocSize = ((HIGH_ADDRESS - (uintptr_t)(memoryManager->getHeapTop(&_vmemHandle))) + 1); /* The memory returned might be less than we asked for -- get the actual size */ _maximumMemorySize = memoryManager->getMaximumSize(&_vmemHandle); return true; }