void mem_heap_free_block_free( /*=====================*/ mem_heap_t* heap) /* in: heap */ { if (heap->free_block) { buf_frame_free(heap->free_block); heap->free_block = NULL; } }
void mem_heap_block_free( /*================*/ mem_heap_t* heap, /* in: heap */ mem_block_t* block) /* in: block to free */ { ulint type; ulint len; ibool init_block; if (block->magic_n != MEM_BLOCK_MAGIC_N) { mem_analyze_corruption((byte*)block); } UT_LIST_REMOVE(list, heap->base, block); #ifdef MEM_PERIODIC_CHECK mem_pool_mutex_enter(); UT_LIST_REMOVE(mem_block_list, mem_block_list, block); mem_pool_mutex_exit(); #endif type = heap->type; len = block->len; init_block = block->init_block; block->magic_n = MEM_FREED_BLOCK_MAGIC_N; #ifdef UNIV_MEM_DEBUG /* In the debug version we set the memory to a random combination of hex 0xDE and 0xAD. */ mem_erase_buf((byte*)block, len); #endif if (init_block) { /* Do not have to free: do nothing */ } else if (type == MEM_HEAP_DYNAMIC) { mem_area_free(block, mem_comm_pool); } else { ut_ad(type & MEM_HEAP_BUFFER); if (len >= UNIV_PAGE_SIZE / 2) { buf_frame_free((byte*)block); } else { mem_area_free(block, mem_comm_pool); } } }
/********************************************************************* This function should be called before reserving any btr search mutex, if the intended operation might add nodes to the search system hash table. Because of the latching order, once we have reserved the btr search system latch, we cannot allocate a free frame from the buffer pool. Checks that there is a free buffer frame allocated for hash table heap in the btr search system. If not, allocates a free frames for the heap. This check makes it probable that, when have reserved the btr search system latch and we need to allocate a new node to the hash table, it will succeed. However, the check will not guarantee success. */ static void btr_search_check_free_space_in_heap(void) /*=====================================*/ { buf_frame_t* frame; hash_table_t* table; mem_heap_t* heap; #ifdef UNIV_SYNC_DEBUG ut_ad(!rw_lock_own(&btr_search_latch, RW_LOCK_SHARED)); ut_ad(!rw_lock_own(&btr_search_latch, RW_LOCK_EX)); #endif /* UNIV_SYNC_DEBUG */ table = btr_search_sys->hash_index; heap = table->heap; /* Note that we peek the value of heap->free_block without reserving the latch: this is ok, because we will not guarantee that there will be enough free space in the hash table. */ if (heap->free_block == NULL) { frame = buf_frame_alloc(); rw_lock_x_lock(&btr_search_latch); if (heap->free_block == NULL) { heap->free_block = frame; } else { buf_frame_free(frame); } rw_lock_x_unlock(&btr_search_latch); } }