/** * Attempt to allocate an object in this TLH. */ void * MM_TLHAllocationSupport::allocateFromTLH(MM_EnvironmentBase *env, MM_AllocateDescription *allocDescription, bool shouldCollectOnFailure) { void *memPtr = NULL; Assert_MM_true(!env->getExtensions()->isSegregatedHeap()); uintptr_t sizeInBytesRequired = allocDescription->getContiguousBytes(); /* If there's insufficient space, refresh the current TLH */ if (sizeInBytesRequired > getSize()) { refresh(env, allocDescription, shouldCollectOnFailure); } /* Try to fit the allocate into the current TLH */ if(sizeInBytesRequired <= getSize()) { memPtr = (void *)getAlloc(); setAlloc((void *)((uintptr_t)getAlloc() + sizeInBytesRequired)); #if defined(OMR_GC_TLH_PREFETCH_FTA) if (*_pointerToTlhPrefetchFTA < (intptr_t)sizeInBytesRequired) { *_pointerToTlhPrefetchFTA = 0; } else { *_pointerToTlhPrefetchFTA -= (intptr_t)sizeInBytesRequired; } #endif /* OMR_GC_TLH_PREFETCH_FTA */ allocDescription->setObjectFlags(getObjectFlags()); allocDescription->setMemorySubSpace((MM_MemorySubSpace *)_tlh->memorySubSpace); allocDescription->completedFromTlh(); } return memPtr; };
/* * free */ void free (void *ptr) { unsigned oldS;//store original size that ptr points to unsigned neighS;//neighbor size void *nextPtr;//used for forwards just for ease of typing if(!ptr || !in_heap(ptr)) return; ptr--; setAlloc(ptr, 0); setAlloc(ptr+(block_size(ptr)+1), 0); /* Coalesce Backwards: */ if(!is_alloc((ptr-1))){ oldS=block_size(ptr); neighS=block_size(ptr-1); ptr-=(neighS+2); createBlock(ptr, neighS+oldS+2, 0); } /* Coalesce Forwards: *independent from backwards, just use ptr */ nextPtr=ptr+block_size(ptr)+2; if(!is_alloc(nextPtr)) createBlock(ptr, block_size(nextPtr)+2, 0); }
uSHORT baseArray_C::addAt(void *newObj_P,uSHORT offset) { uSHORT retVal = 0; // If the offset is within bounds... if (offset <= objCount) { // If there is no space left... if ((objCount >= totalObjs) && (expandBy > 0)) { // Attempt to allocate space for "expandBy" more objects setAlloc(totalObjs+expandBy); // If more space was not allocated... if ((objCount >= totalObjs) && (expandBy > 1)) // Attempt to allocate space for 1 more object setAlloc(totalObjs+1); } // If there is space for another object... if (objCount < totalObjs) { retVal = 1; objCount++; // Shift all existing objects towards the end for (uSHORT i=objCount*objSize-1;i>=(offset+1)*objSize;i--) buff_P[i] = buff_P[i-objSize]; // Copy the new object to the front of the array memcpy(buff_P+(offset*objSize),newObj_P,objSize); } } return (retVal); }
/* Given a pointer to the block this will allocate the data and return * a pointer to be returned by malloc. * Assumes that the data can fit inside the block, will set alloc=1. * Split if it can. */ void *malloc_here(void *currentBlock, unsigned size){ unsigned blockSize=block_size(currentBlock); unsigned newBlockSize; void *workingPtr=currentBlock; /* if there is space for a header,footer,and at least * another byte of data then split */ if((blockSize-size-2)>0){ newBlockSize=blockSize-size-2; workingPtr=createBlock(workingPtr, size, 1); createBlock(workingPtr, newBlockSize, 0); return (currentBlock+1); } else{ setAlloc(currentBlock, 1); setAlloc(currentBlock+blockSize+1, 1); return (currentBlock+1); }//good size block }
void MM_TLHAllocationSupport::setupTLH(MM_EnvironmentBase *env, void *addrBase, void *addrTop, MM_MemorySubSpace *memorySubSpace, MM_MemoryPool *memoryPool) { MM_GCExtensionsBase *extensions = env->getExtensions(); if (extensions->doFrequentObjectAllocationSampling){ updateFrequentObjectsStats(env); } /* Set the new TLH values */ setBase(addrBase); setAlloc(addrBase); setTop(addrTop); if (NULL != memorySubSpace) { setObjectFlags(memorySubSpace->getObjectFlags()); } setMemoryPool(memoryPool); setMemorySubSpace(memorySubSpace); #if defined(OMR_GC_TLH_PREFETCH_FTA) *_pointerToTlhPrefetchFTA = 0; #endif /* OMR_GC_TLH_PREFETCH_FTA */ }