/** * Free the chunk */ void __attr_always_inline___ mem_pools_free (uint8_t *chunk_p) /**< pointer to the chunk */ { mem_check_pools (); mem_pool_chunk_t *chunk_to_free_p = (mem_pool_chunk_t *) chunk_p; chunk_to_free_p->u.free.next_p = mem_free_chunk_p; mem_free_chunk_p = chunk_to_free_p; VALGRIND_FREYA_FREELIKE_SPACE (chunk_to_free_p); VALGRIND_NOACCESS_SPACE (chunk_to_free_p, MEM_POOL_CHUNK_SIZE); #ifndef JERRY_NDEBUG mem_free_chunks_number++; #endif /* !JERRY_NDEBUG */ MEM_POOLS_STAT_FREE_CHUNK (); mem_check_pools (); } /* mem_pools_free */
/** * Free the memory block. */ void __attr_hot___ jmem_heap_free_block (void *ptr, /**< pointer to beginning of data space of the block */ const size_t size) /**< size of allocated region */ { VALGRIND_FREYA_CHECK_MEMPOOL_REQUEST; /* checking that ptr points to the heap */ JERRY_ASSERT (jmem_is_heap_pointer (ptr)); JERRY_ASSERT (size > 0); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_limit) >= JERRY_CONTEXT (jmem_heap_allocated_size)); VALGRIND_FREYA_FREELIKE_SPACE (ptr); VALGRIND_NOACCESS_SPACE (ptr, size); JMEM_HEAP_STAT_FREE_ITER (); jmem_heap_free_t *block_p = (jmem_heap_free_t *) ptr; jmem_heap_free_t *prev_p; jmem_heap_free_t *next_p; VALGRIND_DEFINED_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); if (block_p > JERRY_CONTEXT (jmem_heap_list_skip_p)) { prev_p = JERRY_CONTEXT (jmem_heap_list_skip_p); JMEM_HEAP_STAT_SKIP (); } else { prev_p = &JERRY_HEAP_CONTEXT (first); JMEM_HEAP_STAT_NONSKIP (); } JERRY_ASSERT (jmem_is_heap_pointer (block_p)); const uint32_t block_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (block_p); VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); // Find position of region in the list while (prev_p->next_offset < block_offset) { jmem_heap_free_t *const next_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (prev_p->next_offset); JERRY_ASSERT (jmem_is_heap_pointer (next_p)); VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); prev_p = next_p; JMEM_HEAP_STAT_FREE_ITER (); } next_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (prev_p->next_offset); VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); /* Realign size */ const size_t aligned_size = (size + JMEM_ALIGNMENT - 1) / JMEM_ALIGNMENT * JMEM_ALIGNMENT; VALGRIND_DEFINED_SPACE (block_p, sizeof (jmem_heap_free_t)); VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); // Update prev if (jmem_heap_get_region_end (prev_p) == block_p) { // Can be merged prev_p->size += (uint32_t) aligned_size; VALGRIND_NOACCESS_SPACE (block_p, sizeof (jmem_heap_free_t)); block_p = prev_p; } else { block_p->size = (uint32_t) aligned_size; prev_p->next_offset = block_offset; } VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); // Update next if (jmem_heap_get_region_end (block_p) == next_p) { if (unlikely (next_p == JERRY_CONTEXT (jmem_heap_list_skip_p))) { JERRY_CONTEXT (jmem_heap_list_skip_p) = block_p; } // Can be merged block_p->size += next_p->size; block_p->next_offset = next_p->next_offset; } else { block_p->next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (next_p); } JERRY_CONTEXT (jmem_heap_list_skip_p) = prev_p; VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); VALGRIND_NOACCESS_SPACE (block_p, size); VALGRIND_NOACCESS_SPACE (next_p, sizeof (jmem_heap_free_t)); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_allocated_size) > 0); JERRY_CONTEXT (jmem_heap_allocated_size) -= aligned_size; while (JERRY_CONTEXT (jmem_heap_allocated_size) + CONFIG_MEM_HEAP_DESIRED_LIMIT <= JERRY_CONTEXT (jmem_heap_limit)) { JERRY_CONTEXT (jmem_heap_limit) -= CONFIG_MEM_HEAP_DESIRED_LIMIT; } VALGRIND_NOACCESS_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_limit) >= JERRY_CONTEXT (jmem_heap_allocated_size)); JMEM_HEAP_STAT_FREE (size); } /* jmem_heap_free_block */