/** * Compress pointer * * @return packed pointer */ inline jmem_cpointer_t JERRY_ATTR_PURE JERRY_ATTR_ALWAYS_INLINE jmem_compress_pointer (const void *pointer_p) /**< pointer to compress */ { JERRY_ASSERT (pointer_p != NULL); JERRY_ASSERT (jmem_is_heap_pointer (pointer_p)); uintptr_t uint_ptr = (uintptr_t) pointer_p; JERRY_ASSERT (uint_ptr % JMEM_ALIGNMENT == 0); #if defined (ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY) && defined (JERRY_CPOINTER_32_BIT) JERRY_ASSERT (((jmem_cpointer_t) uint_ptr) == uint_ptr); #else /* !ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY || !JERRY_CPOINTER_32_BIT */ const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); uint_ptr -= heap_start; uint_ptr >>= JMEM_ALIGNMENT_LOG; #ifdef JERRY_CPOINTER_32_BIT JERRY_ASSERT (uint_ptr <= UINT32_MAX); #else /* !JERRY_CPOINTER_32_BIT */ JERRY_ASSERT (uint_ptr <= UINT16_MAX); #endif /* JERRY_CPOINTER_32_BIT */ JERRY_ASSERT (uint_ptr != JMEM_CP_NULL); #endif /* ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY && JERRY_CPOINTER_32_BIT */ return (jmem_cpointer_t) uint_ptr; } /* jmem_compress_pointer */
/** * Startup initialization of heap */ void jmem_heap_init (void) { #ifndef JERRY_CPOINTER_32_BIT JERRY_STATIC_ASSERT (((UINT16_MAX + 1) << JMEM_ALIGNMENT_LOG) >= JMEM_HEAP_SIZE, maximum_heap_size_for_16_bit_compressed_pointers_is_512K); #endif /* !JERRY_CPOINTER_32_BIT */ JERRY_ASSERT ((uintptr_t) JERRY_HEAP_CONTEXT (area) % JMEM_ALIGNMENT == 0); JERRY_CONTEXT (jmem_heap_limit) = CONFIG_MEM_HEAP_DESIRED_LIMIT; jmem_heap_free_t *const region_p = (jmem_heap_free_t *) JERRY_HEAP_CONTEXT (area); region_p->size = JMEM_HEAP_AREA_SIZE; region_p->next_offset = JMEM_HEAP_END_OF_LIST; JERRY_HEAP_CONTEXT (first).size = 0; JERRY_HEAP_CONTEXT (first).next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (region_p); JERRY_CONTEXT (jmem_heap_list_skip_p) = &JERRY_HEAP_CONTEXT (first); VALGRIND_NOACCESS_SPACE (JERRY_HEAP_CONTEXT (area), JMEM_HEAP_AREA_SIZE); JMEM_HEAP_STAT_INIT (); } /* jmem_heap_init */
jmem_heap_decompress_pointer (uintptr_t compressed_pointer) /**< pointer to decompress */ { JERRY_ASSERT (compressed_pointer != JMEM_CP_NULL); uintptr_t int_ptr = compressed_pointer; const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); int_ptr <<= JMEM_ALIGNMENT_LOG; int_ptr += heap_start; JERRY_ASSERT (jmem_is_heap_pointer ((void *) int_ptr)); return (void *) int_ptr; } /* jmem_heap_decompress_pointer */
jmem_heap_compress_pointer (const void *pointer_p) /**< pointer to compress */ { JERRY_ASSERT (pointer_p != NULL); JERRY_ASSERT (jmem_is_heap_pointer (pointer_p)); uintptr_t int_ptr = (uintptr_t) pointer_p; const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); JERRY_ASSERT (int_ptr % JMEM_ALIGNMENT == 0); int_ptr -= heap_start; int_ptr >>= JMEM_ALIGNMENT_LOG; JERRY_ASSERT ((int_ptr & ~((1u << JMEM_HEAP_OFFSET_LOG) - 1)) == 0); JERRY_ASSERT (int_ptr != JMEM_CP_NULL); return int_ptr; } /* jmem_heap_compress_pointer */
/** * Decompress pointer * * @return unpacked pointer */ inline void * JERRY_ATTR_PURE JERRY_ATTR_ALWAYS_INLINE jmem_decompress_pointer (uintptr_t compressed_pointer) /**< pointer to decompress */ { JERRY_ASSERT (compressed_pointer != JMEM_CP_NULL); uintptr_t uint_ptr = compressed_pointer; JERRY_ASSERT (((jmem_cpointer_t) uint_ptr) == uint_ptr); #if defined (ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY) && defined (JERRY_CPOINTER_32_BIT) JERRY_ASSERT (uint_ptr % JMEM_ALIGNMENT == 0); #else /* !ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY || !JERRY_CPOINTER_32_BIT */ const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); uint_ptr <<= JMEM_ALIGNMENT_LOG; uint_ptr += heap_start; JERRY_ASSERT (jmem_is_heap_pointer ((void *) uint_ptr)); #endif /* ECMA_VALUE_CAN_STORE_UINTPTR_VALUE_DIRECTLY && JERRY_CPOINTER_32_BIT */ return (void *) uint_ptr; } /* jmem_decompress_pointer */
/** * Decompress pointer * * @return unpacked pointer */ inline void * __attr_always_inline___ jmem_decompress_pointer (uintptr_t compressed_pointer) /**< pointer to decompress */ { JERRY_ASSERT (compressed_pointer != JMEM_CP_NULL); uintptr_t uint_ptr = compressed_pointer; JERRY_ASSERT (((jmem_cpointer_t) uint_ptr) == uint_ptr); #ifdef JERRY_CPOINTER_32_BIT JERRY_ASSERT (uint_ptr % JMEM_ALIGNMENT == 0); #else /* !JERRY_CPOINTER_32_BIT */ const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); uint_ptr <<= JMEM_ALIGNMENT_LOG; uint_ptr += heap_start; JERRY_ASSERT (jmem_is_heap_pointer ((void *) uint_ptr)); #endif /* JERRY_CPOINTER_32_BIT */ return (void *) uint_ptr; } /* jmem_decompress_pointer */
/** * Compress pointer * * @return packed pointer */ inline jmem_cpointer_t __attr_always_inline___ jmem_compress_pointer (const void *pointer_p) /**< pointer to compress */ { JERRY_ASSERT (pointer_p != NULL); JERRY_ASSERT (jmem_is_heap_pointer (pointer_p)); uintptr_t uint_ptr = (uintptr_t) pointer_p; JERRY_ASSERT (uint_ptr % JMEM_ALIGNMENT == 0); #ifdef JERRY_CPOINTER_32_BIT JERRY_ASSERT (((jmem_cpointer_t) uint_ptr) == uint_ptr); #else /* !JERRY_CPOINTER_32_BIT */ const uintptr_t heap_start = (uintptr_t) &JERRY_HEAP_CONTEXT (first); uint_ptr -= heap_start; uint_ptr >>= JMEM_ALIGNMENT_LOG; JERRY_ASSERT (uint_ptr <= UINT16_MAX); JERRY_ASSERT (uint_ptr != JMEM_CP_NULL); #endif /* JERRY_CPOINTER_32_BIT */ return (jmem_cpointer_t) uint_ptr; } /* jmem_compress_pointer */
/** * Startup initialization of heap */ void jmem_heap_init (void) { JERRY_STATIC_ASSERT ((1u << JMEM_HEAP_OFFSET_LOG) >= JMEM_HEAP_SIZE, two_pow_mem_heap_offset_should_not_be_less_than_mem_heap_size); JERRY_ASSERT ((uintptr_t) JERRY_HEAP_CONTEXT (area) % JMEM_ALIGNMENT == 0); JERRY_CONTEXT (jmem_heap_limit) = CONFIG_MEM_HEAP_DESIRED_LIMIT; jmem_heap_free_t *const region_p = (jmem_heap_free_t *) JERRY_HEAP_CONTEXT (area); region_p->size = JMEM_HEAP_AREA_SIZE; region_p->next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (JMEM_HEAP_END_OF_LIST); JERRY_HEAP_CONTEXT (first).size = 0; JERRY_HEAP_CONTEXT (first).next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (region_p); JERRY_CONTEXT (jmem_heap_list_skip_p) = &JERRY_HEAP_CONTEXT (first); VALGRIND_NOACCESS_SPACE (JERRY_HEAP_CONTEXT (area), JMEM_HEAP_AREA_SIZE); JMEM_HEAP_STAT_INIT (); } /* jmem_heap_init */
/** * Check whether the pointer points to the heap * * Note: * the routine should be used only for assertion checks * * @return true - if pointer points to the heap, * false - otherwise */ bool jmem_is_heap_pointer (const void *pointer) /**< pointer */ { return ((uint8_t *) pointer >= JERRY_HEAP_CONTEXT (area) && (uint8_t *) pointer <= (JERRY_HEAP_CONTEXT (area) + JMEM_HEAP_AREA_SIZE)); } /* jmem_is_heap_pointer */
/** * Free the memory block. */ void __attr_hot___ jmem_heap_free_block (void *ptr, /**< pointer to beginning of data space of the block */ const size_t size) /**< size of allocated region */ { VALGRIND_FREYA_CHECK_MEMPOOL_REQUEST; /* checking that ptr points to the heap */ JERRY_ASSERT (jmem_is_heap_pointer (ptr)); JERRY_ASSERT (size > 0); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_limit) >= JERRY_CONTEXT (jmem_heap_allocated_size)); VALGRIND_FREYA_FREELIKE_SPACE (ptr); VALGRIND_NOACCESS_SPACE (ptr, size); JMEM_HEAP_STAT_FREE_ITER (); jmem_heap_free_t *block_p = (jmem_heap_free_t *) ptr; jmem_heap_free_t *prev_p; jmem_heap_free_t *next_p; VALGRIND_DEFINED_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); if (block_p > JERRY_CONTEXT (jmem_heap_list_skip_p)) { prev_p = JERRY_CONTEXT (jmem_heap_list_skip_p); JMEM_HEAP_STAT_SKIP (); } else { prev_p = &JERRY_HEAP_CONTEXT (first); JMEM_HEAP_STAT_NONSKIP (); } JERRY_ASSERT (jmem_is_heap_pointer (block_p)); const uint32_t block_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (block_p); VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); // Find position of region in the list while (prev_p->next_offset < block_offset) { jmem_heap_free_t *const next_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (prev_p->next_offset); JERRY_ASSERT (jmem_is_heap_pointer (next_p)); VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); prev_p = next_p; JMEM_HEAP_STAT_FREE_ITER (); } next_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (prev_p->next_offset); VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); /* Realign size */ const size_t aligned_size = (size + JMEM_ALIGNMENT - 1) / JMEM_ALIGNMENT * JMEM_ALIGNMENT; VALGRIND_DEFINED_SPACE (block_p, sizeof (jmem_heap_free_t)); VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); // Update prev if (jmem_heap_get_region_end (prev_p) == block_p) { // Can be merged prev_p->size += (uint32_t) aligned_size; VALGRIND_NOACCESS_SPACE (block_p, sizeof (jmem_heap_free_t)); block_p = prev_p; } else { block_p->size = (uint32_t) aligned_size; prev_p->next_offset = block_offset; } VALGRIND_DEFINED_SPACE (next_p, sizeof (jmem_heap_free_t)); // Update next if (jmem_heap_get_region_end (block_p) == next_p) { if (unlikely (next_p == JERRY_CONTEXT (jmem_heap_list_skip_p))) { JERRY_CONTEXT (jmem_heap_list_skip_p) = block_p; } // Can be merged block_p->size += next_p->size; block_p->next_offset = next_p->next_offset; } else { block_p->next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (next_p); } JERRY_CONTEXT (jmem_heap_list_skip_p) = prev_p; VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); VALGRIND_NOACCESS_SPACE (block_p, size); VALGRIND_NOACCESS_SPACE (next_p, sizeof (jmem_heap_free_t)); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_allocated_size) > 0); JERRY_CONTEXT (jmem_heap_allocated_size) -= aligned_size; while (JERRY_CONTEXT (jmem_heap_allocated_size) + CONFIG_MEM_HEAP_DESIRED_LIMIT <= JERRY_CONTEXT (jmem_heap_limit)) { JERRY_CONTEXT (jmem_heap_limit) -= CONFIG_MEM_HEAP_DESIRED_LIMIT; } VALGRIND_NOACCESS_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_limit) >= JERRY_CONTEXT (jmem_heap_allocated_size)); JMEM_HEAP_STAT_FREE (size); } /* jmem_heap_free_block */
/** * Allocation of memory region. * * See also: * jmem_heap_alloc_block * * @return pointer to allocated memory block - if allocation is successful, * NULL - if there is not enough memory. */ static __attr_hot___ void *jmem_heap_alloc_block_internal (const size_t size) { // Align size const size_t required_size = ((size + JMEM_ALIGNMENT - 1) / JMEM_ALIGNMENT) * JMEM_ALIGNMENT; jmem_heap_free_t *data_space_p = NULL; VALGRIND_DEFINED_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); // Fast path for 8 byte chunks, first region is guaranteed to be sufficient if (required_size == JMEM_ALIGNMENT && likely (JERRY_HEAP_CONTEXT (first).next_offset != JMEM_HEAP_END_OF_LIST)) { data_space_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset); JERRY_ASSERT (jmem_is_heap_pointer (data_space_p)); VALGRIND_DEFINED_SPACE (data_space_p, sizeof (jmem_heap_free_t)); JERRY_CONTEXT (jmem_heap_allocated_size) += JMEM_ALIGNMENT; JMEM_HEAP_STAT_ALLOC_ITER (); if (data_space_p->size == JMEM_ALIGNMENT) { JERRY_HEAP_CONTEXT (first).next_offset = data_space_p->next_offset; } else { JERRY_ASSERT (data_space_p->size > JMEM_ALIGNMENT); jmem_heap_free_t *remaining_p; remaining_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset) + 1; VALGRIND_DEFINED_SPACE (remaining_p, sizeof (jmem_heap_free_t)); remaining_p->size = data_space_p->size - JMEM_ALIGNMENT; remaining_p->next_offset = data_space_p->next_offset; VALGRIND_NOACCESS_SPACE (remaining_p, sizeof (jmem_heap_free_t)); JERRY_HEAP_CONTEXT (first).next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (remaining_p); } VALGRIND_UNDEFINED_SPACE (data_space_p, sizeof (jmem_heap_free_t)); if (unlikely (data_space_p == JERRY_CONTEXT (jmem_heap_list_skip_p))) { JERRY_CONTEXT (jmem_heap_list_skip_p) = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset); } } // Slow path for larger regions else { uint32_t current_offset = JERRY_HEAP_CONTEXT (first).next_offset; jmem_heap_free_t *prev_p = &JERRY_HEAP_CONTEXT (first); while (current_offset != JMEM_HEAP_END_OF_LIST) { jmem_heap_free_t *current_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (current_offset); JERRY_ASSERT (jmem_is_heap_pointer (current_p)); VALGRIND_DEFINED_SPACE (current_p, sizeof (jmem_heap_free_t)); JMEM_HEAP_STAT_ALLOC_ITER (); const uint32_t next_offset = current_p->next_offset; JERRY_ASSERT (next_offset == JMEM_HEAP_END_OF_LIST || jmem_is_heap_pointer (JMEM_HEAP_GET_ADDR_FROM_OFFSET (next_offset))); if (current_p->size >= required_size) { // Region is sufficiently big, store address data_space_p = current_p; JERRY_CONTEXT (jmem_heap_allocated_size) += required_size; // Region was larger than necessary if (current_p->size > required_size) { // Get address of remaining space jmem_heap_free_t *const remaining_p = (jmem_heap_free_t *) ((uint8_t *) current_p + required_size); // Update metadata VALGRIND_DEFINED_SPACE (remaining_p, sizeof (jmem_heap_free_t)); remaining_p->size = current_p->size - (uint32_t) required_size; remaining_p->next_offset = next_offset; VALGRIND_NOACCESS_SPACE (remaining_p, sizeof (jmem_heap_free_t)); // Update list VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); prev_p->next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (remaining_p); VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); } // Block is an exact fit else { // Remove the region from the list VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t)); prev_p->next_offset = next_offset; VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t)); } JERRY_CONTEXT (jmem_heap_list_skip_p) = prev_p; // Found enough space break; } VALGRIND_NOACCESS_SPACE (current_p, sizeof (jmem_heap_free_t)); // Next in list prev_p = current_p; current_offset = next_offset; } } while (JERRY_CONTEXT (jmem_heap_allocated_size) >= JERRY_CONTEXT (jmem_heap_limit)) { JERRY_CONTEXT (jmem_heap_limit) += CONFIG_MEM_HEAP_DESIRED_LIMIT; } VALGRIND_NOACCESS_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t)); if (unlikely (!data_space_p)) { return NULL; } JERRY_ASSERT ((uintptr_t) data_space_p % JMEM_ALIGNMENT == 0); VALGRIND_UNDEFINED_SPACE (data_space_p, size); JMEM_HEAP_STAT_ALLOC (size); return (void *) data_space_p; } /* jmem_heap_finalize */
/** * Finalize heap */ void jmem_heap_finalize (void) { JERRY_ASSERT (JERRY_CONTEXT (jmem_heap_allocated_size) == 0); VALGRIND_NOACCESS_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_t)); } /* jmem_heap_finalize */