CFIndex CFAllocatorGetPreferredSizeForSize(CFAllocatorRef allocator, CFIndex size, CFOptionFlags hint) { CFAllocatorPreferredSizeCallBack prefFunc; CFIndex newsize = 0; if (kCFAllocatorSystemDefaultGCRefZero == allocator) { allocator = kCFAllocatorSystemDefault; } else if (kCFAllocatorDefaultGCRefZero == allocator) { // Under GC, we can't use just any old allocator when the GCRefZero allocator was requested allocator = kCFUseCollectableAllocator ? kCFAllocatorSystemDefault : __CFGetDefaultAllocator(); } else if (NULL == allocator) { allocator = __CFGetDefaultAllocator(); } #if defined(DEBUG) && (DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI) if (allocator->_base._cfisa == __CFISAForTypeID(__kCFAllocatorTypeID)) { __CFGenericValidateType(allocator, __kCFAllocatorTypeID); } #else __CFGenericValidateType(allocator, __kCFAllocatorTypeID); #endif #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI if (allocator->_base._cfisa != __CFISAForTypeID(__kCFAllocatorTypeID)) { // malloc_zone_t * return malloc_good_size(size); } #endif prefFunc = __CFAllocatorGetPreferredSizeFunction(&allocator->_context); if (0 < size && NULL != prefFunc) { newsize = (CFIndex)(INVOKE_CALLBACK3(prefFunc, size, hint, allocator->_context.info)); } if (newsize < size) newsize = size; return newsize; }
static size_t MallocGoodSize(size_t aSize) { #if defined(MOZ_MEMORY) return malloc_good_size(aSize); #else return aSize; #endif }
CFIndex CFAllocatorGetPreferredSizeForSize(CFAllocatorRef allocator, CFIndex size, CFOptionFlags hint) { CFAllocatorPreferredSizeCallBack prefFunc; CFIndex newsize = 0; if (NULL == allocator) { allocator = __CFGetDefaultAllocator(); } #if defined(DEBUG) && (DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI) if (allocator->_base._cfisa == __CFISAForCFAllocator()) { __CFGenericValidateType(allocator, _kCFRuntimeIDCFAllocator); } #else __CFGenericValidateType(allocator, _kCFRuntimeIDCFAllocator); #endif #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI if (allocator->_base._cfisa != __CFISAForCFAllocator()) { // malloc_zone_t * return malloc_good_size(size); } #endif prefFunc = __CFAllocatorGetPreferredSizeFunction(&allocator->_context); if (0 < size && NULL != prefFunc) { newsize = (CFIndex)(INVOKE_CALLBACK3(prefFunc, size, hint, allocator->_context.info)); } if (newsize < size) newsize = size; return newsize; }
// put these early so they can be inlined inline int UString::expandedSize(int size, int otherSize) const { int s = (size * 11 / 10) + 1 + otherSize; #if APPLE_CHANGES s = malloc_good_size(s * sizeof(UChar)) / sizeof(UChar); #endif return s; }
size_t fastMallocGoodSize(size_t bytes) { #if OS(DARWIN) return malloc_good_size(bytes); #else return bytes; #endif }
static inline bool TestOne(size_t size) { size_t req = size; size_t adv = malloc_good_size(req); char* p = (char*)malloc(req); size_t usable = moz_malloc_usable_size(p); if (adv != usable) { fail("malloc_good_size(%d) --> %d; " "malloc_usable_size(%d) --> %d", req, adv, req, usable); return false; } free(p); return true; }
void SkVarAlloc::makeSpace(size_t bytes, unsigned flags) { SkASSERT(SkIsAlignPtr(bytes)); size_t alloc = 1<<fLgSize++; while (alloc < bytes + sizeof(Block)) { alloc *= 2; } fBlock = Block::Alloc(fBlock, alloc, flags); fByte = fBlock->data(); fRemaining = alloc - sizeof(Block); #if defined(SK_BUILD_FOR_MAC) SkASSERT(alloc == malloc_good_size(alloc)); #elif defined(SK_BUILD_FOR_UNIX) // TODO(mtklein): tune so we can assert something like this //SkASSERT(alloc == malloc_usable_size(fBlock)); #endif }
static size_t __RSAllocatorCustomGoodSize(malloc_zone_t *zone, size_t size) { RSAllocatorRef allocator __unused = (RSAllocatorRef)zone; return malloc_good_size(size);//RSAllocatorGetPreferredSizeForSize(allocator, size, 0); }