void *gc_alloc(machine_uint_t n_bytes, bool has_finaliser) { machine_uint_t n_blocks = ((n_bytes + BYTES_PER_BLOCK - 1) & (~(BYTES_PER_BLOCK - 1))) / BYTES_PER_BLOCK; DEBUG_printf("gc_alloc(" UINT_FMT " bytes -> " UINT_FMT " blocks)\n", n_bytes, n_blocks); // check if GC is locked if (gc_lock_depth > 0) { return NULL; } // check for 0 allocation if (n_blocks == 0) { return NULL; } machine_uint_t i; machine_uint_t end_block; machine_uint_t start_block; machine_uint_t n_free = 0; int collected = 0; for (;;) { // look for a run of n_blocks available blocks for (i = 0; i < gc_alloc_table_byte_len; i++) { byte a = gc_alloc_table_start[i]; if (ATB_0_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 0; goto found; } } else { n_free = 0; } if (ATB_1_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 1; goto found; } } else { n_free = 0; } if (ATB_2_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 2; goto found; } } else { n_free = 0; } if (ATB_3_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 3; goto found; } } else { n_free = 0; } } // nothing found! if (collected) { return NULL; } DEBUG_printf("gc_alloc(" UINT_FMT "): no free mem, triggering GC\n", n_bytes); gc_collect(); collected = 1; } // found, ending at block i inclusive found: // get starting and end blocks, both inclusive end_block = i; start_block = i - n_free + 1; // mark first block as used head ATB_FREE_TO_HEAD(start_block); // mark rest of blocks as used tail // TODO for a run of many blocks can make this more efficient for (machine_uint_t bl = start_block + 1; bl <= end_block; bl++) { ATB_FREE_TO_TAIL(bl); } // get pointer to first block void *ret_ptr = (void*)(gc_pool_start + start_block * WORDS_PER_BLOCK); DEBUG_printf("gc_alloc(%p)\n", ret_ptr); // zero out the additional bytes of the newly allocated blocks // This is needed because the blocks may have previously held pointers // to the heap and will not be set to something else if the caller // doesn't actually use the entire block. As such they will continue // to point to the heap and may prevent other blocks from being reclaimed. memset((byte*)ret_ptr + n_bytes, 0, (end_block - start_block + 1) * BYTES_PER_BLOCK - n_bytes); #if MICROPY_ENABLE_FINALISER if (has_finaliser) { // clear type pointer in case it is never set ((mp_obj_base_t*)ret_ptr)->type = MP_OBJ_NULL; // set mp_obj flag only if it has a finaliser FTB_SET(start_block); } #endif return ret_ptr; }
void *gc_alloc(size_t n_bytes, bool has_finaliser) { size_t n_blocks = ((n_bytes + BYTES_PER_BLOCK - 1) & (~(BYTES_PER_BLOCK - 1))) / BYTES_PER_BLOCK; DEBUG_printf("gc_alloc(" UINT_FMT " bytes -> " UINT_FMT " blocks)\n", n_bytes, n_blocks); // check if GC is locked if (MP_STATE_MEM(gc_lock_depth) > 0) { return NULL; } // check for 0 allocation if (n_blocks == 0) { return NULL; } size_t i; size_t end_block; size_t start_block; size_t n_free = 0; int collected = !MP_STATE_MEM(gc_auto_collect_enabled); for (;;) { // look for a run of n_blocks available blocks for (i = MP_STATE_MEM(gc_last_free_atb_index); i < MP_STATE_MEM(gc_alloc_table_byte_len); i++) { byte a = MP_STATE_MEM(gc_alloc_table_start)[i]; if (ATB_0_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 0; goto found; } } else { n_free = 0; } if (ATB_1_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 1; goto found; } } else { n_free = 0; } if (ATB_2_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 2; goto found; } } else { n_free = 0; } if (ATB_3_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 3; goto found; } } else { n_free = 0; } } // nothing found! if (collected) { return NULL; } DEBUG_printf("gc_alloc(" UINT_FMT "): no free mem, triggering GC\n", n_bytes); gc_collect(); collected = 1; } // found, ending at block i inclusive found: // get starting and end blocks, both inclusive end_block = i; start_block = i - n_free + 1; // Set last free ATB index to block after last block we found, for start of // next scan. To reduce fragmentation, we only do this if we were looking // for a single free block, which guarantees that there are no free blocks // before this one. Also, whenever we free or shink a block we must check // if this index needs adjusting (see gc_realloc and gc_free). if (n_free == 1) { MP_STATE_MEM(gc_last_free_atb_index) = (i + 1) / BLOCKS_PER_ATB; } // mark first block as used head ATB_FREE_TO_HEAD(start_block); // mark rest of blocks as used tail // TODO for a run of many blocks can make this more efficient for (size_t bl = start_block + 1; bl <= end_block; bl++) { ATB_FREE_TO_TAIL(bl); } // get pointer to first block void *ret_ptr = (void*)(MP_STATE_MEM(gc_pool_start) + start_block * BYTES_PER_BLOCK); DEBUG_printf("gc_alloc(%p)\n", ret_ptr); // Zero out all the bytes of the newly allocated blocks. // This is needed because the blocks may have previously held pointers // to the heap and will not be set to something else if the caller // doesn't actually use the entire block. As such they will continue // to point to the heap and may prevent other blocks from being reclaimed. memset((byte*)ret_ptr, 0, (end_block - start_block + 1) * BYTES_PER_BLOCK); #if MICROPY_ENABLE_FINALISER if (has_finaliser) { // clear type pointer in case it is never set ((mp_obj_base_t*)ret_ptr)->type = NULL; // set mp_obj flag only if it has a finaliser FTB_SET(start_block); } #else (void)has_finaliser; #endif #if EXTENSIVE_HEAP_PROFILING gc_dump_alloc_table(); #endif return ret_ptr; }