void heap_init(void) { LTRACE_ENTRY; // create a mutex mutex_init(&theheap.lock); // initialize the free list list_initialize(&theheap.free_list); // initialize the delayed free list list_initialize(&theheap.delayed_free_list); spin_lock_init(&theheap.delayed_free_lock); // set the heap range #if WITH_KERNEL_VM theheap.base = pmm_alloc_kpages(HEAP_GROW_SIZE / PAGE_SIZE, NULL); theheap.len = HEAP_GROW_SIZE; if (theheap.base == 0) { panic("HEAP: error allocating initial heap size\n"); } #else theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; #endif theheap.remaining = 0; // will get set by heap_insert_free_chunk() theheap.low_watermark = theheap.len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len, false)); }
void heap_free(void *ptr) { if (ptr == 0) return; LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); #if DEBUG_HEAP { uint i; uint8_t *pad = (uint8_t *)as->padding_start; for (i = 0; i < as->padding_size; i++) { if (pad[i] != PADDING_FILL) { printf("free at %p scribbled outside the lines:\n", ptr); hexdump(pad, as->padding_size); panic("die\n"); } } } #endif LTRACEF("allocation was %zd bytes long at ptr %p\n", as->size, as->ptr); // looks good, create a free chunk and add it to the pool heap_insert_free_chunk(heap_create_free_chunk(as->ptr, as->size, true)); }
static ssize_t heap_grow(size_t size) { #if WITH_KERNEL_VM size = ROUNDUP(size, PAGE_SIZE); void *ptr = pmm_alloc_kpages(size / PAGE_SIZE, NULL); if (!ptr) return ERR_NO_MEMORY; LTRACEF("growing heap by 0x%zx bytes, new ptr %p\n", size, ptr); heap_insert_free_chunk(heap_create_free_chunk(ptr, size, true)); /* change the heap start and end variables */ if ((uintptr_t)ptr < (uintptr_t)theheap.base) theheap.base = ptr; uintptr_t endptr = (uintptr_t)ptr + size; if (endptr > (uintptr_t)theheap.base + theheap.len) { theheap.len = (uintptr_t)endptr - (uintptr_t)theheap.base; } return size; #else return ERR_NO_MEMORY; #endif }
void heap_init(void) { int z; LTRACE_ENTRY; // set the heap range in normal zone theheap[ZONE_NORMAL].base = (void *)HEAP_START; theheap[ZONE_NORMAL].len = HEAP_LEN; #ifdef WITH_DMA_ZONE theheap[ZONE_DMA].base = (void *)DMA_START; theheap[ZONE_DMA].len = DMA_LEN; #endif for (z = 0; z < MAX_ZONES; z++) { LTRACEF("base %p size %zd bytes (%s)\n", theheap[z].base, theheap[z].len, zone_name(z)); // initialize the free list list_initialize(&theheap[z].free_list); // create an initial free chunk heap_insert_free_chunk(z, heap_create_free_chunk(theheap[z].base, theheap[z].len)); } // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
void heap_init(void) { LTRACE_ENTRY; // set the heap range theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; theheap.remaining =0; // will get set by heap_insert_free_chunk() theheap.low_watermark = theheap.len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // create a mutex mutex_init(&theheap.lock); // initialize the free list list_initialize(&theheap.free_list); // initialize the delayed free list list_initialize(&theheap.delayed_free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len, false)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
static void heap_free_delayed_list(void) { struct list_node list; list_initialize(&list); enter_critical_section(); struct free_heap_chunk *chunk; while ((chunk = list_remove_head_type(&theheap.delayed_free_list, struct free_heap_chunk, node))) { list_add_head(&list, &chunk->node); } exit_critical_section(); while ((chunk = list_remove_head_type(&list, struct free_heap_chunk, node))) { LTRACEF("freeing chunk %p\n", chunk); heap_insert_free_chunk(chunk); } }
static void heap_free_delayed_list(void) { struct list_node list; list_initialize(&list); spin_lock_saved_state_t state; spin_lock_irqsave(&theheap.delayed_free_lock, state); struct free_heap_chunk *chunk; while ((chunk = list_remove_head_type(&theheap.delayed_free_list, struct free_heap_chunk, node))) { list_add_head(&list, &chunk->node); } spin_unlock_irqrestore(&theheap.delayed_free_lock, state); while ((chunk = list_remove_head_type(&list, struct free_heap_chunk, node))) { LTRACEF("freeing chunk %p\n", chunk); heap_insert_free_chunk(chunk); } }
void heap_init(void) { LTRACE_ENTRY; // set the heap range theheap.base = (void *)HEAP_START; theheap.len = HEAP_LEN; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // initialize the free list list_initialize(&theheap.free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
void heap_free(zone_type zone, void *ptr) { if (ptr == 0) return; LTRACEF("ptr %p\n", ptr); // check for the old allocation structure struct alloc_struct_begin *as = (struct alloc_struct_begin *)ptr; as--; DEBUG_ASSERT(as->magic == HEAP_MAGIC); LTRACEF("allocation was %zd bytes long at ptr %p\n", as->size, as->ptr); // looks good, create a free chunk and add it to the pool enter_critical_section(); heap_insert_free_chunk(zone, heap_create_free_chunk(as->ptr, as->size)); exit_critical_section(); // heap_dump(); }
void libboot_platform_heap_init(void* base, size_t len) { LTRACE_ENTRY; // set the heap range theheap.base = base; theheap.len = len; LTRACEF("base %p size %zd bytes\n", theheap.base, theheap.len); // initialize the free list list_initialize(&theheap.free_list); // create an initial free chunk heap_insert_free_chunk(heap_create_free_chunk(theheap.base, theheap.len)); // dump heap info // heap_dump(); // dprintf(INFO, "running heap tests\n"); // heap_test(); }
/* add a new block of memory to the heap */ void heap_add_block(void *ptr, size_t len) { heap_insert_free_chunk(heap_create_free_chunk(ptr, len, false)); }