/** * Allocate and initialize the receivers internal structures. * @return true on success, false on failure. */ bool MM_ParallelSweepChunkArray::initialize(MM_EnvironmentBase* env, bool useVmem) { bool result = false; MM_GCExtensionsBase* extensions = env->getExtensions(); _useVmem = useVmem; if (extensions->isFvtestForceSweepChunkArrayCommitFailure()) { Trc_MM_SweepHeapSectioning_parallelSweepChunkArrayCommitFailureForced(env->getLanguageVMThread()); } else { if (useVmem) { MM_MemoryManager* memoryManager = extensions->memoryManager; if (memoryManager->createVirtualMemoryForMetadata(env, &_memoryHandle, extensions->heapAlignment, _size * sizeof(MM_ParallelSweepChunk))) { void* base = memoryManager->getHeapBase(&_memoryHandle); result = memoryManager->commitMemory(&_memoryHandle, base, _size * sizeof(MM_ParallelSweepChunk)); if (!result) { Trc_MM_SweepHeapSectioning_parallelSweepChunkArrayCommitFailed(env->getLanguageVMThread(), base, _size * sizeof(MM_ParallelSweepChunk)); } _array = (MM_ParallelSweepChunk*)base; } } else { if (0 != _size) { _array = (MM_ParallelSweepChunk*)env->getForge()->allocate(_size * sizeof(MM_ParallelSweepChunk), MM_AllocationCategory::FIXED, OMR_GET_CALLSITE()); result = (NULL != _array); } else { result = true; } } } return result; }
/** * Attach a physical arena of the specified size to the receiver. * This reserves the address space within the receiver for the arena, and connects the arena to the list * of those associated to the receiver (in address order). * * @return true if the arena was attached successfully, false otherwise. * @note The memory reseved is not commited. */ bool MM_HeapVirtualMemory::attachArena(MM_EnvironmentBase* env, MM_PhysicalArena* arena, uintptr_t size) { /* Sanity check of the size */ if (getMaximumMemorySize() < size) { return false; } MM_GCExtensionsBase* extensions = env->getExtensions(); MM_MemoryManager* memoryManager = extensions->memoryManager; /* Find the insertion point for the currentArena */ void* candidateBase = memoryManager->getHeapBase(&_vmemHandle); MM_PhysicalArena* insertionHead = NULL; MM_PhysicalArena* insertionTail = _physicalArena; MM_PhysicalArena* currentArena = arena; while (insertionTail) { if ((((uintptr_t)insertionTail->getLowAddress()) - ((uintptr_t)candidateBase)) >= size) { break; } candidateBase = insertionTail->getHighAddress(); insertionHead = insertionTail; insertionTail = insertionTail->getNextArena(); } /* If we have reached the end of the currentArena list, check if there is room between the candidateBase * and the end of virtual memory */ if (!insertionTail) { if ((memoryManager->calculateOffsetToHeapTop(&_vmemHandle, candidateBase)) < size) { return false; } } /* Connect the physical currentArena into the list at the appropriate point */ currentArena->setPreviousArena(insertionHead); currentArena->setNextArena(insertionTail); if (insertionTail) { insertionTail->setPreviousArena(currentArena); } if (insertionHead) { insertionHead->setNextArena(currentArena); } else { _physicalArena = currentArena; } currentArena->setLowAddress(candidateBase); currentArena->setHighAddress((void*)(((uint8_t*)candidateBase) + size)); /* Set the arena state to being attached */ arena->setAttached(true); return true; }
bool MM_HeapRegionManagerTarok::enableRegionsInTable(MM_EnvironmentBase *env, MM_MemoryHandle *handle) { bool result = true; MM_GCExtensionsBase *extensions = env->getExtensions(); MM_MemoryManager *memoryManager = extensions->memoryManager; void *lowHeapEdge = memoryManager->getHeapBase(handle); void *highHeapEdge = memoryManager->getHeapTop(handle); /* maintained for RTJ */ setNodeAndLinkRegions(env, lowHeapEdge, highHeapEdge, 0); return result; }
bool MM_HeapVirtualMemory::initializeHeapRegionManager(MM_EnvironmentBase* env, MM_HeapRegionManager* manager) { bool result = false; /* since this kind of heap is backed by contiguous memory, tell the heap region manager (which was just * initialized by super) that we want to enable this range of regions for later use. */ MM_MemoryManager* memoryManager = MM_GCExtensionsBase::getExtensions(_omrVM)->memoryManager; void* heapBase = memoryManager->getHeapBase(&_vmemHandle); void* heapTop = memoryManager->getHeapTop(&_vmemHandle); if (manager->setContiguousHeapRange(env, heapBase, heapTop)) { result = manager->enableRegionsInTable(env, &_vmemHandle); } return result; }
/** * Find and return the backing store addresses base. * This routine uses the backing store of the base array and uses this memory as the return value. * @return base address of the backing store. */ void* MM_SweepHeapSectioning::getBackingStoreAddress() { MM_MemoryManager* memoryManager = _extensions->memoryManager; return (void*)memoryManager->getHeapBase(&_baseArray->_memoryHandle); }
/** * Answer the lowest possible address for the heap that will ever be possible. * @return Lowest address possible for the heap. */ void* MM_HeapVirtualMemory::getHeapBase() { MM_MemoryManager* memoryManager = MM_GCExtensionsBase::getExtensions(_omrVM)->memoryManager; return memoryManager->getHeapBase(&_vmemHandle); }