__private_extern__ void *_CFAllocatorAllocateGC(CFAllocatorRef allocator, CFIndex size, CFOptionFlags hint) { if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) return auto_zone_allocate_object((auto_zone_t*)kCFAllocatorSystemDefault->_context.info, size, CF_GET_GC_MEMORY_TYPE(hint), false, false); else return CFAllocatorAllocate(allocator, size, hint); }
void *CFAllocatorAllocate(CFAllocatorRef allocator, CFIndex size, CFOptionFlags hint) { CFAllocatorAllocateCallBack allocateFunc; void *newptr = NULL; allocator = (NULL == allocator) ? __CFGetDefaultAllocator() : allocator; #if (DEPLOYMENT_TARGET_MACOSX) && defined(DEBUG) if (allocator->_base._cfisa == __CFISAForTypeID(__kCFAllocatorTypeID)) { __CFGenericValidateType(allocator, __kCFAllocatorTypeID); } #else __CFGenericValidateType(allocator, __kCFAllocatorTypeID); #endif if (0 == size) return NULL; #if DEPLOYMENT_TARGET_MACOSX if (allocator->_base._cfisa != __CFISAForTypeID(__kCFAllocatorTypeID)) { // malloc_zone_t * return malloc_zone_malloc((malloc_zone_t *)allocator, size); } #endif if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { newptr = auto_zone_allocate_object((auto_zone_t*)allocator->_context.info, size, CF_GET_GC_MEMORY_TYPE(hint), true, false); } else { newptr = NULL; allocateFunc = __CFAllocatorGetAllocateFunction(&allocator->_context); if (allocateFunc) { newptr = (void *)INVOKE_CALLBACK3(allocateFunc, size, hint, allocator->_context.info); } } return newptr; }
static void allocate_objects(long n) { long i; //printf("allocate %ld objects\n", n); for (i = 0; i < n; i++) { pointers[i] = auto_zone_allocate_object(gc_zone, 100, AUTO_OBJECT_SCANNED, 1, 0); pointers[i][0] = 10; } }
void * rb_objc_newobj(size_t size) { void *obj; obj = auto_zone_allocate_object(__auto_zone, size, AUTO_OBJECT_SCANNED, 0, 0); assert(obj != NULL); RBASIC(obj)->klass = (VALUE)__nsobject; return obj; }
__private_extern__ void *_CFAllocatorReallocateGC(CFAllocatorRef allocator, void *ptr, CFIndex newsize, CFOptionFlags hint) { if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { if (ptr && (newsize == 0)) { return NULL; // equivalent to _CFAllocatorDeallocateGC. } if (ptr == NULL) { return auto_zone_allocate_object((auto_zone_t*)kCFAllocatorSystemDefault->_context.info, newsize, CF_GET_GC_MEMORY_TYPE(hint), false, false); // eq. to _CFAllocator } } // otherwise, auto_realloc() now preserves layout type and refCount. return CFAllocatorReallocate(allocator, ptr, newsize, hint); }
static inline void * ruby_xmalloc_memory(size_t size, int type) { assert(size > 0); if (__auto_zone == NULL) { rb_objc_no_gc_error(); } void *mem = auto_zone_allocate_object(__auto_zone, size, type, 0, 0); if (mem == NULL) { rb_memerror(); } return mem; }
void * rb_objc_newobj(size_t size) { void *obj; if (stress_gc && !dont_gc) { garbage_collect(); } obj = auto_zone_allocate_object(__auto_zone, size, AUTO_OBJECT_SCANNED, 0, 0); assert(obj != NULL); RBASIC(obj)->klass = (VALUE)__nsobject; return obj; }
// Check __CFDataShouldAllocateCleared before passing true. static void *__CFDataAllocate(CFDataRef data, CFIndex size, Boolean clear) { void *bytes = NULL; if (__CFDataUseAllocator(data)) { CFAllocatorRef allocator = __CFGetAllocator(data); bytes = CFAllocatorAllocate(allocator, size, 0); if (clear) memset((uint8_t *)bytes, 0, size); } else { if (__CFDataAllocatesCollectable(data)) { bytes = auto_zone_allocate_object(objc_collectableZone(), size, AUTO_MEMORY_UNSCANNED, 0, clear); } else { if (clear) { bytes = calloc(1, size); } else { bytes = malloc(size); } } } return bytes; }
void *CFAllocatorAllocate(CFAllocatorRef allocator, CFIndex size, CFOptionFlags hint) { CFAllocatorAllocateCallBack allocateFunc; void *newptr = NULL; Boolean initialRefcountOne = true; if (kCFAllocatorSystemDefaultGCRefZero == allocator) { allocator = kCFAllocatorSystemDefault; initialRefcountOne = false; } else if (kCFAllocatorDefaultGCRefZero == allocator) { // Under GC, we can't use just any old allocator when the GCRefZero allocator was requested allocator = kCFUseCollectableAllocator ? kCFAllocatorSystemDefault : __CFGetDefaultAllocator(); if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) initialRefcountOne = false; } else if (NULL == allocator) { allocator = __CFGetDefaultAllocator(); } #if defined(DEBUG) && (DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI) if (allocator->_base._cfisa == __CFISAForTypeID(__kCFAllocatorTypeID)) { __CFGenericValidateType(allocator, __kCFAllocatorTypeID); } #else __CFGenericValidateType(allocator, __kCFAllocatorTypeID); #endif if (0 == size) return NULL; #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI if (allocator->_base._cfisa != __CFISAForTypeID(__kCFAllocatorTypeID)) { // malloc_zone_t * return malloc_zone_malloc((malloc_zone_t *)allocator, size); } #endif if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { newptr = auto_zone_allocate_object((auto_zone_t*)allocator->_context.info, size, CF_GET_GC_MEMORY_TYPE(hint), initialRefcountOne, false); } else { newptr = NULL; allocateFunc = __CFAllocatorGetAllocateFunction(&allocator->_context); if (allocateFunc) { newptr = (void *)INVOKE_CALLBACK3(allocateFunc, size, hint, allocator->_context.info); } } return newptr; }
void * ruby_xmalloc(size_t size) { void *mem; if (size < 0) { rb_raise(rb_eNoMemError, "negative allocation size (or too big)"); } if (size == 0) { size = 1; } if (__auto_zone == NULL) { rb_objc_no_gc_error(); } mem = auto_zone_allocate_object(__auto_zone, size, AUTO_MEMORY_SCANNED, 0, 0); if (mem == NULL) { rb_memerror(); } return mem; }
static inline void * ruby_xmalloc_memory(size_t size, int type) { if ((ssize_t)size < 0) { negative_size_allocation_error("negative allocation size (or too big)"); } if (size == 0) { size = 1; } if (__auto_zone == NULL) { rb_objc_no_gc_error(); } if (stress_gc && !dont_gc) { garbage_collect(); } void *mem = auto_zone_allocate_object(__auto_zone, size, type, 0, 0); if (mem == NULL) { rb_memerror(); } return mem; }