Ejemplo n.º 1
0
  Tuple* VM::new_young_tuple_dirty(size_t fields) {
    State state(this);
    size_t bytes = Tuple::fields_offset + (sizeof(Object*) * fields);

    if(unlikely(bytes > om->large_object_threshold)) {
      return 0;
    }

    Tuple* tup = local_slab().allocate(bytes).as<Tuple>();

    if(unlikely(!tup)) {

      if(shared.om->refill_slab(&state, local_slab())) {
        tup = local_slab().allocate(bytes).as<Tuple>();
      }

      if(!tup) return 0;
    }

    tup->init_header(G(tuple), YoungObjectZone, Tuple::type);
    tup->full_size_ = bytes;
#ifdef RBX_GC_STRESS
    state.shared().gc_soon();
#endif
    return tup;
  }
Ejemplo n.º 2
0
  Object* VM::new_object_typed_dirty(Class* cls, size_t size, object_type type) {
    State state(this);

    if(unlikely(size > om->large_object_threshold)) {
      return om->new_object_typed_enduring_dirty(&state, cls, size, type);
    }

    Object* obj = local_slab().allocate(size).as<Object>();

    if(unlikely(!obj)) {
      if(shared.om->refill_slab(&state, local_slab())) {
        obj = local_slab().allocate(size).as<Object>();
      }

      // If refill_slab fails, obj will still be NULL.

      if(!obj) {
        return om->new_object_typed_dirty(&state, cls, size, type);
      }
    }

    obj->init_header(cls, YoungObjectZone, type);
#ifdef RBX_GC_STRESS
    state.shared().gc_soon();
#endif
    return obj;
  }
Ejemplo n.º 3
0
  Object* VM::new_object_typed(Class* cls, size_t size, object_type type) {
    State state(this);

    if(unlikely(size > om->large_object_threshold)) {
      return om->new_object_typed_enduring(&state, cls, size, type);
    }

    Object* obj = local_slab().allocate(size).as<Object>();

    if(unlikely(!obj)) {
      if(shared.om->refill_slab(&state, local_slab())) {
        obj = local_slab().allocate(size).as<Object>();
      }

      // If refill_slab fails, obj will still be NULL.

      if(!obj) {
        return om->new_object_typed(&state, cls, size, type);
      }
    }

    obj->init_header(cls, YoungObjectZone, type);
    obj->clear_fields(size);

    return obj;
  }
Ejemplo n.º 4
0
  String* VM::new_young_string_dirty() {
    State state(this);
    String* str = local_slab().allocate(sizeof(String)).as<String>();

    if(unlikely(!str)) {

      if(shared.om->refill_slab(&state, local_slab())) {
        str = local_slab().allocate(sizeof(String)).as<String>();
      }

      if(!str) return 0;
    }

    str->init_header(G(string), YoungObjectZone, String::type);
#ifdef RBX_GC_STRESS
    state.shared().gc_soon();
#endif
    return str;
  }
Ejemplo n.º 5
0
Object* VM::new_object_typed(Class* cls, size_t size, object_type type) {
    Object* obj = reinterpret_cast<Object*>(local_slab().allocate(size));

    if(unlikely(!obj)) {
        if(shared.om->refill_slab(local_slab())) {
            obj = reinterpret_cast<Object*>(local_slab().allocate(size));
        }

        // If refill_slab fails, obj will still be NULL.

        if(!obj) {
            return om->new_object_typed(cls, size, type);
        }
    }

    obj->init_header(cls, YoungObjectZone, type);
    obj->clear_fields(size);

    return obj;
}
Ejemplo n.º 6
0
  Tuple* VM::new_young_tuple_dirty(size_t fields) {
    size_t bytes = sizeof(Tuple) + (sizeof(Object*) * fields);

    if(unlikely(bytes > om->large_object_threshold)) {
      return 0;
    }

    Tuple* tup = local_slab().allocate(bytes).as<Tuple>();

    if(unlikely(!tup)) {
      State state(this);

      if(shared.om->refill_slab(&state, local_slab())) {
        tup = local_slab().allocate(bytes).as<Tuple>();
      }

      if(!tup) return 0;
    }

    tup->init_header(G(tuple), YoungObjectZone, Tuple::type);
    tup->full_size_ = bytes;

    return tup;
  }