Ejemplo n.º 1
0
void Allocator::retain(void *ptr, int object_style)
{
#ifdef DEBUG
  AllocatorNode *dbg_ptr = isMine(ptr);
  if(dbg_ptr)
  {
    if(object_style != 2)
      error("Allocator [debug mode]: try to retain a previously retained pointer! You'll destruct an inexistant object.");    
    if(object_style == dbg_ptr->object_style)
      error("Allocator [debug mode]: try to retain a previously retained pointer with same mode [%d]!", object_style);
  }
#endif

  // Create a new node to be placed *before* the root
  AllocatorNode *ptrs_ = (AllocatorNode *)sysAlloc(sizeof(AllocatorNode));
  if(!ptrs_)
    error("Allocator: not enough memory. Buy new ram.");
  ptrs_->prev = NULL;
  ptrs_->next = ptrs;
  if(ptrs)
    ptrs->prev = ptrs_;
  
  // Save the root
  ptrs = ptrs_;

  // Save the pointer
  ptrs->ptr = ptr;
  ptrs->object_style = object_style;
}
Ejemplo n.º 2
0
void *Allocator::alloc(size_t size, int object_style)
{
  // Are you stupid ?
  if(size <= 0)
    return(NULL);

  // Allocate what you need
  void *ptr = sysAlloc(size);
  if(!ptr)
    error("Allocator: not enough memory. Buy new ram.");

  // Save the pointer
  retain(ptr, object_style);
  return(ptr);
}
Ejemplo n.º 3
0
Archivo: malloc.c Proyecto: 8l/golang
runtime·MHeap_SysAlloc(MHeap *h, uintptr n)
{
	byte *p, *p_end;
	uintptr p_size;
	bool reserved;

	if(n > h->arena_end - h->arena_used) {
		// We are in 32-bit mode, maybe we didn't use all possible address space yet.
		// Reserve some more space.
		byte *new_end;

		p_size = ROUND(n + PageSize, 256<<20);
		new_end = h->arena_end + p_size;
		if(new_end <= h->arena_start + MaxArena32) {
			// TODO: It would be bad if part of the arena
			// is reserved and part is not.
			p = runtime·SysReserve(h->arena_end, p_size, &reserved);
			if(p == h->arena_end) {
				h->arena_end = new_end;
				h->arena_reserved = reserved;
			}
			else if(p+p_size <= h->arena_start + MaxArena32) {
				// Keep everything page-aligned.
				// Our pages are bigger than hardware pages.
				h->arena_end = p+p_size;
				h->arena_used = p + (-(uintptr)p&(PageSize-1));
				h->arena_reserved = reserved;
			} else {
				uint64 stat;
				stat = 0;
				runtime·SysFree(p, p_size, &stat);
			}
		}
	}
	if(n <= h->arena_end - h->arena_used) {
		// Keep taking from our reservation.
		p = h->arena_used;
		runtime·SysMap(p, n, h->arena_reserved, &mstats.heap_sys);
		h->arena_used += n;
		runtime·MHeap_MapBits(h);
		runtime·MHeap_MapSpans(h);
		if(raceenabled)
			runtime·racemapshadow(p, n);
		
		if(((uintptr)p & (PageSize-1)) != 0)
			runtime·throw("misrounded allocation in MHeap_SysAlloc");
		return p;
	}
	
	// If using 64-bit, our reservation is all we have.
	if(h->arena_end - h->arena_start >= MaxArena32)
		return nil;

	// On 32-bit, once the reservation is gone we can
	// try to get memory at a location chosen by the OS
	// and hope that it is in the range we allocated bitmap for.
	p_size = ROUND(n, PageSize) + PageSize;
	p = runtime·sysAlloc(p_size, &mstats.heap_sys);
	if(p == nil)
		return nil;

	if(p < h->arena_start || p+p_size - h->arena_start >= MaxArena32) {
		runtime·printf("runtime: memory allocated by OS (%p) not in usable range [%p,%p)\n",
			p, h->arena_start, h->arena_start+MaxArena32);
		runtime·SysFree(p, p_size, &mstats.heap_sys);
		return nil;
	}
	
	p_end = p + p_size;
	p += -(uintptr)p & (PageSize-1);
	if(p+n > h->arena_used) {
		h->arena_used = p+n;
		if(p_end > h->arena_end)
			h->arena_end = p_end;
		runtime·MHeap_MapBits(h);
		runtime·MHeap_MapSpans(h);
		if(raceenabled)
			runtime·racemapshadow(p, n);
	}
	
	if(((uintptr)p & (PageSize-1)) != 0)
		runtime·throw("misrounded allocation in MHeap_SysAlloc");
	return p;
}