Tuple* VM::new_young_tuple_dirty(size_t fields) { State state(this); size_t bytes = Tuple::fields_offset + (sizeof(Object*) * fields); if(unlikely(bytes > om->large_object_threshold)) { return 0; } Tuple* tup = local_slab().allocate(bytes).as<Tuple>(); if(unlikely(!tup)) { if(shared.om->refill_slab(&state, local_slab())) { tup = local_slab().allocate(bytes).as<Tuple>(); } if(!tup) return 0; } tup->init_header(G(tuple), YoungObjectZone, Tuple::type); tup->full_size_ = bytes; #ifdef RBX_GC_STRESS state.shared().gc_soon(); #endif return tup; }
Tuple* VM::new_young_tuple_dirty(size_t fields) { size_t bytes = sizeof(Tuple) + (sizeof(Object*) * fields); if(unlikely(bytes > om->large_object_threshold)) { return 0; } Tuple* tup = local_slab().allocate(bytes).as<Tuple>(); if(unlikely(!tup)) { State state(this); if(shared.om->refill_slab(&state, local_slab())) { tup = local_slab().allocate(bytes).as<Tuple>(); } if(!tup) return 0; } tup->init_header(G(tuple), YoungObjectZone, Tuple::type); tup->full_size_ = bytes; return tup; }