static CFBinaryHeapRef __CFBinaryHeapInit(CFAllocatorRef allocator, UInt32 flags, CFIndex capacity, const void **values, CFIndex numValues, const CFBinaryHeapCallBacks *callBacks, const CFBinaryHeapCompareContext *compareContext) { CFBinaryHeapRef memory; CFIndex idx; CFIndex size; CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity); CFAssert2(0 <= numValues, __kCFLogAssertion, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__, numValues); size = sizeof(struct __CFBinaryHeap) - sizeof(CFRuntimeBase); if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { if (!callBacks || (callBacks->retain == NULL && callBacks->release == NULL)) { __CFBitfieldSetValue(flags, 4, 4, 1); // setWeak } } memory = (CFBinaryHeapRef)_CFRuntimeCreateInstance(allocator, __kCFBinaryHeapTypeID, size, NULL); if (NULL == memory) { return NULL; } __CFBinaryHeapSetCapacity(memory, __CFBinaryHeapRoundUpCapacity(1)); __CFBinaryHeapSetNumBuckets(memory, __CFBinaryHeapNumBucketsForCapacity(__CFBinaryHeapRoundUpCapacity(1))); void *buckets = _CFAllocatorAllocateGC(allocator, __CFBinaryHeapNumBuckets(memory) * sizeof(struct __CFBinaryHeapBucket), isStrongMemory_Heap(memory) ? __kCFAllocatorGCScannedMemory : 0); __CFAssignWithWriteBarrier((void **)&memory->_buckets, buckets); if (__CFOASafe) __CFSetLastAllocationEventName(memory->_buckets, "CFBinaryHeap (store)"); if (NULL == memory->_buckets) { CFRelease(memory); return NULL; } __CFBinaryHeapSetNumBucketsUsed(memory, 0); __CFBinaryHeapSetCount(memory, 0); if (NULL != callBacks) { memory->_callbacks.retain = callBacks->retain; memory->_callbacks.release = callBacks->release; memory->_callbacks.copyDescription = callBacks->copyDescription; memory->_callbacks.compare = callBacks->compare; } else { memory->_callbacks.retain = 0; memory->_callbacks.release = 0; memory->_callbacks.copyDescription = 0; memory->_callbacks.compare = 0; } if (compareContext) memcpy(&memory->_context, compareContext, sizeof(CFBinaryHeapCompareContext)); // CF: retain info for proper operation __CFBinaryHeapSetMutableVariety(memory, kCFBinaryHeapMutable); for (idx = 0; idx < numValues; idx++) { CFBinaryHeapAddValue(memory, values[idx]); } __CFBinaryHeapSetMutableVariety(memory, __CFBinaryHeapMutableVarietyFromFlags(flags)); return memory; }
void CFTreeSetContext(CFTreeRef tree, const CFTreeContext *context) { uint32_t newtype, oldtype = __CFTreeGetCallBacksType(tree); struct __CFTreeCallBacks *oldcb = (struct __CFTreeCallBacks *)__CFTreeGetCallBacks(tree); struct __CFTreeCallBacks *newcb; void *oldinfo = tree->_info; CFAllocatorRef allocator = CFGetAllocator(tree); if (__CFTreeCallBacksMatchNull(context)) { newtype = __kCFTreeHasNullCallBacks; } else if (__CFTreeCallBacksMatchCFType(context)) { newtype = __kCFTreeHasCFTypeCallBacks; } else { newtype = __kCFTreeHasCustomCallBacks; __CFAssignWithWriteBarrier((void **)&tree->_callbacks, _CFAllocatorAllocateGC(allocator, sizeof(struct __CFTreeCallBacks), 0)); if (__CFOASafe) __CFSetLastAllocationEventName(tree->_callbacks, "CFTree (callbacks)"); tree->_callbacks->retain = context->retain; tree->_callbacks->release = context->release; tree->_callbacks->copyDescription = context->copyDescription; FAULT_CALLBACK((void **)&(tree->_callbacks->retain)); FAULT_CALLBACK((void **)&(tree->_callbacks->release)); FAULT_CALLBACK((void **)&(tree->_callbacks->copyDescription)); } __CFBitfieldSetValue(tree->_base._cfinfo[CF_INFO_BITS], 1, 0, newtype); newcb = (struct __CFTreeCallBacks *)__CFTreeGetCallBacks(tree); if (NULL != newcb->retain) { tree->_info = (void *)INVOKE_CALLBACK1(newcb->retain, context->info); } else { __CFAssignWithWriteBarrier((void **)&tree->_info, context->info); } if (NULL != oldcb->release) { INVOKE_CALLBACK1(oldcb->release, oldinfo); } if (oldtype == __kCFTreeHasCustomCallBacks) { _CFAllocatorDeallocateGC(allocator, oldcb); } }