__private_extern__ void __CFTypeCollectionRelease(CFAllocatorRef allocator, const void *ptr) { CFTypeRef cf = (CFTypeRef)ptr; // only collections allocated in the GC zone can opt-out of reference counting. if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { if (CFTYPE_IS_OBJC(cf)) return; // do nothing for OBJC objects. if (auto_zone_is_valid_pointer(__CFCollectableZone, cf)) { #if !DEPLOYMENT_TARGET_WINDOWS // GC: If this a CF object in the GC heap that is marked uncollectable, then // must balance the retain done in __CFTypeCollectionRetain(). // We're basically inlining CFRelease() here, to avoid an extra heap membership test. CFRuntimeClass *cfClass = __CFRuntimeClassTable[__CFGenericTypeID_inline(cf)]; if (cfClass->version & _kCFRuntimeResourcefulObject && auto_zone_release(__CFCollectableZone, (void*)cf) == 0) { // ResourceFull objects trigger 'reclaim' on transition to zero if (cfClass->reclaim) cfClass->reclaim(cf); } else // avoid releasing normal CF objects. Like other collections, for example ; return; #endif } else { // support constant CFTypeRef objects. #if __LP64__ uint32_t lowBits = ((CFRuntimeBase *)cf)->_rc; #else uint32_t lowBits = ((CFRuntimeBase *)cf)->_cfinfo[CF_RC_BITS]; #endif if (lowBits == 0) return; } } CFRelease(cf); }
static VALUE id2ref(VALUE obj, SEL sel, VALUE objid) { #if SIZEOF_LONG == SIZEOF_VOIDP #define NUM2PTR(x) NUM2ULONG(x) #elif SIZEOF_LONG_LONG == SIZEOF_VOIDP #define NUM2PTR(x) NUM2ULL(x) #endif VALUE ptr; void *p0; rb_secure(4); ptr = NUM2PTR(objid); p0 = (void *)ptr; if (ptr == Qtrue) return Qtrue; if (ptr == Qfalse) return Qfalse; if (ptr == Qnil) return Qnil; if (FIXNUM_P(ptr) || SYMBOL_P(ptr)) return ptr; if (auto_zone_is_valid_pointer(__auto_zone, p0)) { auto_memory_type_t type = auto_zone_get_layout_type(__auto_zone, p0); if ((type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED) && (NATIVE((VALUE)p0) || (BUILTIN_TYPE(p0) < T_FIXNUM && BUILTIN_TYPE(p0) != T_ICLASS))) return (VALUE)p0; } rb_raise(rb_eRangeError, "%p is not id value", p0); }
__private_extern__ const void *__CFTypeCollectionRetain(CFAllocatorRef allocator, const void *ptr) { CFTypeRef cf = (CFTypeRef)ptr; // only collections allocated in the GC zone can opt-out of reference counting. if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) { if (CFTYPE_IS_OBJC(cf)) return cf; // do nothing for OBJC objects. if (auto_zone_is_valid_pointer(__CFCollectableZone, ptr)) { CFRuntimeClass *cfClass = __CFRuntimeClassTable[__CFGenericTypeID_inline(cf)]; if (cfClass->version & _kCFRuntimeResourcefulObject) { // GC: If this a CF object in the GC heap that is marked resourceful, then // it must be retained keep it alive in a CF collection. // We're basically inlining CFRetain() here, to avoid an extra heap membership test. auto_zone_retain(__CFCollectableZone, (void*)cf); } else ; // don't retain normal CF objects return cf; } else { // support constant CFTypeRef objects. #if __LP64__ uint32_t lowBits = ((CFRuntimeBase *)cf)->_rc; #else uint32_t lowBits = ((CFRuntimeBase *)cf)->_cfinfo[CF_RC_BITS]; #endif if (lowBits == 0) return cf; // complain about non-GC objects in GC containers. CFLog(kCFLogLevelWarning, CFSTR("storing a non-GC object %p in a GC collection, break on CFCollection_non_gc_storage_error to debug."), cf); CFCollection_non_gc_storage_error(); // XXX should halt, except Patrick is using this somewhere. // HALT; } } return CFRetain(cf); }