static VALUE id2ref(VALUE obj, SEL sel, VALUE objid) { #if SIZEOF_LONG == SIZEOF_VOIDP #define NUM2PTR(x) NUM2ULONG(x) #elif SIZEOF_LONG_LONG == SIZEOF_VOIDP #define NUM2PTR(x) NUM2ULL(x) #endif VALUE ptr; void *p0; rb_secure(4); ptr = NUM2PTR(objid); p0 = (void *)ptr; if (ptr == Qtrue) return Qtrue; if (ptr == Qfalse) return Qfalse; if (ptr == Qnil) return Qnil; if (FIXNUM_P(ptr) || SYMBOL_P(ptr)) return ptr; if (auto_zone_is_valid_pointer(__auto_zone, p0)) { auto_memory_type_t type = auto_zone_get_layout_type(__auto_zone, p0); if ((type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED) && (NATIVE((VALUE)p0) || (BUILTIN_TYPE(p0) < T_FIXNUM && BUILTIN_TYPE(p0) != T_ICLASS))) return (VALUE)p0; } rb_raise(rb_eRangeError, "%p is not id value", p0); }
static void weak_clear_entry_no_lock(azone_t *azone, weak_entry_t *entry, uintptr_t *weak_refs_count, auto_weak_callback_block_t **head) { // clear referrers, update counters, update lists unsigned count = entry->referrers.num_allocated; unsigned index = 0; for (; index < count; ++index) { weak_referrer_t *ref = &entry->referrers.refs[index]; if (ref->referrer) { if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: clearing ref to %p at %p (value was %p)\n", auto_prelude(), entry->referent, ref->referrer, *ref->referrer); if (*ref->referrer != entry->referent) { malloc_printf("__weak value %p at location %p not equal to %p and so will not be cleared\n", *ref->referrer, ref->referrer, entry->referent); void **base = (void **)auto_zone_base_pointer((auto_zone_t*)azone, ref->referrer); if (base) { auto_memory_type_t type = auto_zone_get_layout_type((auto_zone_t*)azone, base); malloc_printf("...location is %s starting at %p with first slot value %p\n", (type & AUTO_OBJECT) ? "an object" : "a data block", base, *base); } continue; } *ref->referrer = NULL; ++*weak_refs_count; if (ref->block && ref->block->callback_function && !ref->block->next) { // chain it if isn't already chained & there is a callout to call ref->block->next = *head; *head = ref->block; } } } weak_entry_remove_no_lock(azone, entry); }
static void rb_objc_recorder(task_t task, void *context, unsigned type_mask, vm_range_t *ranges, unsigned range_count) { struct rb_objc_recorder_context *ctx; vm_range_t *r, *end; ctx = (struct rb_objc_recorder_context *)context; for (r = ranges, end = ranges + range_count; r < end; r++) { auto_memory_type_t type = auto_zone_get_layout_type(__auto_zone, (void *)r->address); if (type != AUTO_OBJECT_SCANNED && type != AUTO_OBJECT_UNSCANNED) { continue; } if (*(Class *)r->address == NULL) { continue; } if (ctx->class_of != 0) { Class c; bool ok = false; for (c = *(Class *)r->address; c != NULL; c = class_getSuperclass(c)) { if (c == (Class)ctx->class_of) { ok = true; break; } } if (!ok) { continue; } } switch (TYPE(r->address)) { case T_NONE: case T_NODE: continue; case T_ICLASS: case T_CLASS: case T_MODULE: rb_bug("object %p of type %d should not be recorded", (void *)r->address, TYPE(r->address)); case T_NATIVE: if (rb_objc_is_placeholder((void *)r->address)) { continue; } } rb_yield((VALUE)r->address); ctx->break_value = rb_vm_pop_broken_value(); ctx->count++; } }
static void print_memory_object(task_t task, void *context, unsigned type_mask, vm_range_t *ranges, unsigned range_count) { const size_t min_size = *(size_t *)context; for (vm_range_t *r = ranges, *end = ranges + range_count; r < end; r++) { const size_t size = auto_zone_size(__auto_zone, (void *)r->address); if (size >= min_size) { printf("address %p size %ld rc %d layout type ", (void *)r->address, size, auto_zone_retain_count(__auto_zone, (void *)r->address)); switch (auto_zone_get_layout_type(__auto_zone, (void *)r->address)) { case AUTO_OBJECT: printf("object (class %s)\n", class_getName(object_getClass((void *)r->address))); break; default: printf("memory\n"); break; } } } }