static void __CFBinaryHeapGrow(CFBinaryHeapRef heap, CFIndex numNewValues) { CFIndex oldCount = __CFBinaryHeapCount(heap); CFIndex capacity = __CFBinaryHeapRoundUpCapacity(oldCount + numNewValues); CFAllocatorRef allocator = CFGetAllocator(heap); __CFBinaryHeapSetCapacity(heap, capacity); __CFBinaryHeapSetNumBuckets(heap, __CFBinaryHeapNumBucketsForCapacity(capacity)); void *buckets = __CFSafelyReallocateWithAllocator(allocator, heap->_buckets, __CFBinaryHeapNumBuckets(heap) * sizeof(struct __CFBinaryHeapBucket), 0, NULL); *((void **)&heap->_buckets) = buckets; if (__CFOASafe) __CFSetLastAllocationEventName(heap->_buckets, "CFBinaryHeap (store)"); }
static void __CFBinaryHeapGrow(CFBinaryHeapRef heap, CFIndex numNewValues) { CFIndex oldCount = __CFBinaryHeapCount(heap); CFIndex capacity = __CFBinaryHeapRoundUpCapacity(oldCount + numNewValues); CFAllocatorRef allocator = CFGetAllocator(heap); __CFBinaryHeapSetCapacity(heap, capacity); __CFBinaryHeapSetNumBuckets(heap, __CFBinaryHeapNumBucketsForCapacity(capacity)); void *buckets = _CFAllocatorReallocateGC(allocator, heap->_buckets, __CFBinaryHeapNumBuckets(heap) * sizeof(struct __CFBinaryHeapBucket), isStrongMemory_Heap(heap) ? __kCFAllocatorGCScannedMemory : 0); __CFAssignWithWriteBarrier((void **)&heap->_buckets, buckets); if (__CFOASafe) __CFSetLastAllocationEventName(heap->_buckets, "CFBinaryHeap (store)"); if (NULL == heap->_buckets) HALT; }
static CFBinaryHeapRef __CFBinaryHeapInit(CFAllocatorRef allocator, UInt32 flags, CFIndex capacity, const void **values, CFIndex numValues, const CFBinaryHeapCallBacks *callBacks, const CFBinaryHeapCompareContext *compareContext) { CFBinaryHeapRef memory; CFIndex idx; CFIndex size; CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity); CFAssert2(0 <= numValues, __kCFLogAssertion, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__, numValues); size = sizeof(struct __CFBinaryHeap) - sizeof(CFRuntimeBase); if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { if (!callBacks || (callBacks->retain == NULL && callBacks->release == NULL)) { __CFBitfieldSetValue(flags, 4, 4, 1); // setWeak } } memory = (CFBinaryHeapRef)_CFRuntimeCreateInstance(allocator, __kCFBinaryHeapTypeID, size, NULL); if (NULL == memory) { return NULL; } __CFBinaryHeapSetCapacity(memory, __CFBinaryHeapRoundUpCapacity(1)); __CFBinaryHeapSetNumBuckets(memory, __CFBinaryHeapNumBucketsForCapacity(__CFBinaryHeapRoundUpCapacity(1))); void *buckets = _CFAllocatorAllocateGC(allocator, __CFBinaryHeapNumBuckets(memory) * sizeof(struct __CFBinaryHeapBucket), isStrongMemory_Heap(memory) ? __kCFAllocatorGCScannedMemory : 0); __CFAssignWithWriteBarrier((void **)&memory->_buckets, buckets); if (__CFOASafe) __CFSetLastAllocationEventName(memory->_buckets, "CFBinaryHeap (store)"); if (NULL == memory->_buckets) { CFRelease(memory); return NULL; } __CFBinaryHeapSetNumBucketsUsed(memory, 0); __CFBinaryHeapSetCount(memory, 0); if (NULL != callBacks) { memory->_callbacks.retain = callBacks->retain; memory->_callbacks.release = callBacks->release; memory->_callbacks.copyDescription = callBacks->copyDescription; memory->_callbacks.compare = callBacks->compare; } else { memory->_callbacks.retain = 0; memory->_callbacks.release = 0; memory->_callbacks.copyDescription = 0; memory->_callbacks.compare = 0; } if (compareContext) memcpy(&memory->_context, compareContext, sizeof(CFBinaryHeapCompareContext)); // CF: retain info for proper operation __CFBinaryHeapSetMutableVariety(memory, kCFBinaryHeapMutable); for (idx = 0; idx < numValues; idx++) { CFBinaryHeapAddValue(memory, values[idx]); } __CFBinaryHeapSetMutableVariety(memory, __CFBinaryHeapMutableVarietyFromFlags(flags)); return memory; }
static CFBinaryHeapRef __CFBinaryHeapInit(CFAllocatorRef allocator, UInt32 flags, CFIndex capacity, const void **values, CFIndex numValues, const CFBinaryHeapCallBacks *callBacks, const CFBinaryHeapCompareContext *compareContext) { CFBinaryHeapRef memory; CFIndex idx; CFIndex size; CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%ld) cannot be less than zero", __PRETTY_FUNCTION__, capacity); CFAssert2(0 <= numValues, __kCFLogAssertion, "%s(): numValues (%ld) cannot be less than zero", __PRETTY_FUNCTION__, numValues); size = sizeof(struct __CFBinaryHeap) - sizeof(CFRuntimeBase); memory = (CFBinaryHeapRef)_CFRuntimeCreateInstance(allocator, CFBinaryHeapGetTypeID(), size, NULL); if (NULL == memory) { return NULL; } __CFBinaryHeapSetCapacity(memory, __CFBinaryHeapRoundUpCapacity(1)); __CFBinaryHeapSetNumBuckets(memory, __CFBinaryHeapNumBucketsForCapacity(__CFBinaryHeapRoundUpCapacity(1))); void *buckets = CFAllocatorAllocate(allocator, __CFBinaryHeapNumBuckets(memory) * sizeof(struct __CFBinaryHeapBucket), 0); *((void **)&memory->_buckets) = buckets; if (__CFOASafe) __CFSetLastAllocationEventName(memory->_buckets, "CFBinaryHeap (store)"); if (NULL == memory->_buckets) { CFRelease(memory); return NULL; } __CFBinaryHeapSetNumBucketsUsed(memory, 0); __CFBinaryHeapSetCount(memory, 0); if (NULL != callBacks) { memory->_callbacks.retain = callBacks->retain; memory->_callbacks.release = callBacks->release; memory->_callbacks.copyDescription = callBacks->copyDescription; memory->_callbacks.compare = callBacks->compare; } else { memory->_callbacks.retain = 0; memory->_callbacks.release = 0; memory->_callbacks.copyDescription = 0; memory->_callbacks.compare = 0; } if (compareContext) memcpy(&memory->_context, compareContext, sizeof(CFBinaryHeapCompareContext)); // CF: retain info for proper operation __CFBinaryHeapSetMutableVariety(memory, kCFBinaryHeapMutable); for (idx = 0; idx < numValues; idx++) { CFBinaryHeapAddValue(memory, values[idx]); } __CFBinaryHeapSetMutableVariety(memory, __CFBinaryHeapMutableVarietyFromFlags(flags)); return memory; }