void heap_init(void) { LTRACE_ENTRY; // create a mutex mutex_init(&theheap.lock); // initialize the free list list_initialize(&theheap.free_list); // initialize the delayed free list list_initialize(&theheap.delayed_free_list); spin_lock_init(&theheap.delayed_free_lock); // set the heap range #if WITH_KERNEL_VM theheap.base = pmm_alloc_kpages(HEAP_GROW_SIZE / PAGE_SIZE, NULL); theheap.len = HEAP_GROW_SIZE; if (theheap.base == 0) { panic("HEAP: error allocating initial heap size\n"); } #else theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; #endif theheap.remaining = 0; // will get set by heap_insert_free_chunk() theheap.low_watermark = theheap.len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len, false)); }
void heap_init(void) { int z; LTRACE_ENTRY; // set the heap range in normal zone theheap[ZONE_NORMAL].base = (void *)HEAP_START; theheap[ZONE_NORMAL].len = HEAP_LEN; #ifdef WITH_DMA_ZONE theheap[ZONE_DMA].base = (void *)DMA_START; theheap[ZONE_DMA].len = DMA_LEN; #endif for (z = 0; z < MAX_ZONES; z++) { LTRACEF("base %p size %zd bytes (%s)\n", theheap[z].base, theheap[z].len, zone_name(z)); // initialize the free list list_initialize(&theheap[z].free_list); // create an initial free chunk heap_insert_free_chunk(z, heap_create_free_chunk(theheap[z].base, theheap[z].len)); } // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
static ssize_t heap_grow(size_t size) { #if WITH_KERNEL_VM size = ROUNDUP(size, PAGE_SIZE); void *ptr = pmm_alloc_kpages(size / PAGE_SIZE, NULL); if (!ptr) return ERR_NO_MEMORY; LTRACEF("growing heap by 0x%zx bytes, new ptr %p\n", size, ptr); heap_insert_free_chunk(heap_create_free_chunk(ptr, size, true)); /* change the heap start and end variables */ if ((uintptr_t)ptr < (uintptr_t)theheap.base) theheap.base = ptr; uintptr_t endptr = (uintptr_t)ptr + size; if (endptr > (uintptr_t)theheap.base + theheap.len) { theheap.len = (uintptr_t)endptr - (uintptr_t)theheap.base; } return size; #else return ERR_NO_MEMORY; #endif }
void heap_free(void *ptr) { if (ptr == 0) return; LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); #if DEBUG_HEAP { uint i; uint8_t *pad = (uint8_t *)as->padding_start; for (i = 0; i < as->padding_size; i++) { if (pad[i] != PADDING_FILL) { printf("free at %p scribbled outside the lines:\n", ptr); hexdump(pad, as->padding_size); panic("die\n"); } } } #endif LTRACEF("allocation was %zd bytes long at ptr %p\n", as->size, as->ptr); // looks good, create a free chunk and add it to the pool heap_insert_free_chunk(heap_create_free_chunk(as->ptr, as->size, true)); }
void heap_init(void) { LTRACE_ENTRY; // set the heap range theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; theheap.remaining =0; // will get set by heap_insert_free_chunk() theheap.low_watermark = theheap.len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // create a mutex mutex_init(&theheap.lock); // initialize the free list list_initialize(&theheap.free_list); // initialize the delayed free list list_initialize(&theheap.delayed_free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len, false)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
void heap_delayed_free(void *ptr) { LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); struct free_heap_chunk *chunk = heap_create_free_chunk(as->ptr, as->size, false); enter_critical_section(); list_add_head(&theheap.delayed_free_list, &chunk->node); exit_critical_section(); }
void heap_delayed_free(void *ptr) { LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); struct free_heap_chunk *chunk = heap_create_free_chunk(as->ptr, as->size, false); spin_lock_saved_state_t state; spin_lock_irqsave(&theheap.delayed_free_lock, state); list_add_head(&theheap.delayed_free_list, &chunk->node); spin_unlock_irqrestore(&theheap.delayed_free_lock, state); }
void heap_init(void) { LTRACE_ENTRY; // set the heap range theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // initialize the free list list_initialize(&theheap.free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
void heap_free(zone_type zone, void *ptr) { if (ptr == 0) return; LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); LTRACEF("allocation was %zd bytes long at ptr %p\n", as->size, as->ptr); // looks good, create a free chunk and add it to the pool enter_critical_section(); heap_insert_free_chunk(zone, heap_create_free_chunk(as->ptr, as->size)); exit_critical_section(); // heap_dump(); }
void libboot_platform_heap_init(void* base, size_t len) { LTRACE_ENTRY; // set the heap range theheap.base = base; theheap.len = len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // initialize the free list list_initialize(&theheap.free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
/* add a new block of memory to the heap */ void heap_add_block(void *ptr, size_t len) { heap_insert_free_chunk(heap_create_free_chunk(ptr, len, false)); }
void *heap_alloc(size_t size, unsigned int alignment) { void *ptr; #if DEBUG_HEAP size_t original_size = size; #endif LTRACEF("size %zd, align %d\n", size, alignment); // deal with the pending free list if (unlikely(!list_is_empty(&theheap.delayed_free_list))) { heap_free_delayed_list(); } // alignment must be power of 2 if (alignment & (alignment - 1)) return NULL; // we always put a size field + base pointer + magic in front of the allocation size += sizeof(struct alloc_struct_begin); #if DEBUG_HEAP size += PADDING_SIZE; #endif // make sure we allocate at least the size of a struct free_heap_chunk so that // when we free it, we can create a struct free_heap_chunk struct and stick it // in the spot if (size < sizeof(struct free_heap_chunk)) size = sizeof(struct free_heap_chunk); // round up size to a multiple of native pointer size size = ROUNDUP(size, sizeof(void *)); // deal with nonzero alignments if (alignment > 0) { if (alignment < 16) alignment = 16; // add alignment for worst case fit size += alignment; } #if WITH_KERNEL_VM int retry_count = 0; retry: #endif mutex_acquire(&theheap.lock); // walk through the list ptr = NULL; struct free_heap_chunk *chunk; list_for_every_entry(&theheap.free_list, chunk, struct free_heap_chunk, node) { DEBUG_ASSERT((chunk->len % sizeof(void *)) == 0); // len should always be a multiple of pointer size // is it big enough to service our allocation? if (chunk->len >= size) { ptr = chunk; // remove it from the list struct list_node *next_node = list_next(&theheap.free_list, &chunk->node); list_delete(&chunk->node); if (chunk->len > size + sizeof(struct free_heap_chunk)) { // there's enough space in this chunk to create a new one after the allocation struct free_heap_chunk *newchunk = heap_create_free_chunk((uint8_t *)ptr + size, chunk->len - size, true); // truncate this chunk chunk->len -= chunk->len - size; // add the new one where chunk used to be if (next_node) list_add_before(next_node, &newchunk->node); else list_add_tail(&theheap.free_list, &newchunk->node); } // the allocated size is actually the length of this chunk, not the size requested DEBUG_ASSERT(chunk->len >= size); size = chunk->len; #if DEBUG_HEAP memset(ptr, ALLOC_FILL, size); #endif ptr = (void *)((addr_t)ptr + sizeof(struct alloc_struct_begin)); // align the output if requested if (alignment > 0) { ptr = (void *)ROUNDUP((addr_t)ptr, (addr_t)alignment); } struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; #if LK_DEBUGLEVEL > 1 as->magic = HEAP_MAGIC; #endif as->ptr = (void *)chunk; as->size = size; theheap.remaining -= size; if (theheap.remaining < theheap.low_watermark) { theheap.low_watermark = theheap.remaining; } #if DEBUG_HEAP as->padding_start = ((uint8_t *)ptr + original_size); as->padding_size = (((addr_t)chunk + size) - ((addr_t)ptr + original_size)); // printf("padding start %p, size %u, chunk %p, size %u\n", as->padding_start, as->padding_size, chunk, size); memset(as->padding_start, PADDING_FILL, as->padding_size); #endif break; } } mutex_release(&theheap.lock); #if WITH_KERNEL_VM /* try to grow the heap if we can */ if (ptr == NULL && retry_count == 0) { size_t growby = MAX(HEAP_GROW_SIZE, ROUNDUP(size, PAGE_SIZE)); ssize_t err = heap_grow(growby); if (err >= 0) { retry_count++; goto retry; } } #endif LTRACEF("returning ptr %p\n", ptr); return ptr; }
void *heap_alloc(size_t size, unsigned int alignment) { void *ptr; #if DEBUG_HEAP size_t original_size = size; #endif LTRACEF("size %zd, align %d\n", size, alignment); // alignment must be power of 2 if (alignment & (alignment - 1)) return NULL; // we always put a size field + base pointer + magic in front of the allocation size += sizeof(struct alloc_struct_begin); #if DEBUG_HEAP size += PADDING_SIZE; #endif // make sure we allocate at least the size of a struct free_heap_chunk so that // when we free it, we can create a struct free_heap_chunk struct and stick it // in the spot if (size < sizeof(struct free_heap_chunk)) size = sizeof(struct free_heap_chunk); // round up size to a multiple of native pointer size size = ROUNDUP(size, sizeof(void *)); // deal with nonzero alignments if (alignment > 0) { if (alignment < 16) alignment = 16; // add alignment for worst case fit size += alignment; } // critical section enter_critical_section(); // walk through the list ptr = NULL; struct free_heap_chunk *chunk; list_for_every_entry(&theheap.free_list, chunk, struct free_heap_chunk, node) { DEBUG_ASSERT((chunk->len % sizeof(void *)) == 0); // len should always be a multiple of pointer size // is it big enough to service our allocation? if (chunk->len >= size) { ptr = chunk; // remove it from the list struct list_node *next_node = list_next(&theheap.free_list, &chunk->node); list_delete(&chunk->node); if (chunk->len > size + sizeof(struct free_heap_chunk)) { // there's enough space in this chunk to create a new one after the allocation struct free_heap_chunk *newchunk = heap_create_free_chunk((uint8_t *)ptr + size, chunk->len - size); // truncate this chunk chunk->len -= chunk->len - size; // add the new one where chunk used to be if (next_node) list_add_before(next_node, &newchunk->node); else list_add_tail(&theheap.free_list, &newchunk->node); } // the allocated size is actually the length of this chunk, not the size requested DEBUG_ASSERT(chunk->len >= size); size = chunk->len; #if DEBUG_HEAP memset(ptr, ALLOC_FILL, size); #endif ptr = (void *)((addr_t)ptr + sizeof(struct alloc_struct_begin)); // align the output if requested if (alignment > 0) { ptr = (void *)ROUNDUP((addr_t)ptr, alignment); } struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; as->magic = HEAP_MAGIC; as->ptr = (void *)chunk; as->size = size; #if DEBUG_HEAP as->padding_start = ((uint8_t *)ptr + original_size); as->padding_size = (((addr_t)chunk + size) - ((addr_t)ptr + original_size)); // printf("padding start %p, size %u, chunk %p, size %u\n", as->padding_start, as->padding_size, chunk, size); memset(as->padding_start, PADDING_FILL, as->padding_size); #endif break; } } LTRACEF("returning ptr %p\n", ptr); // heap_dump(); exit_critical_section(); return ptr; }