/** * Allocation of memory region, running 'try to give memory back' callbacks, if there is not enough memory. * * Note: * if after running the callbacks, there is still not enough memory, engine is terminated with ERR_OUT_OF_MEMORY. * * Note: * To reduce heap fragmentation there are two allocation modes - short-term and long-term. * * If allocation is short-term then the beginning of the heap is preferred, else - the end of the heap. * * It is supposed, that all short-term allocation is used during relatively short discrete sessions. * After end of the session all short-term allocated regions are supposed to be freed. * * @return pointer to allocated memory block */ static void* mem_heap_alloc_block_try_give_memory_back (size_t size_in_bytes, /**< size of region to allocate in bytes */ mem_block_length_type_t length_type, /**< length type of the block * (one-chunked or general) */ mem_heap_alloc_term_t alloc_term) /**< expected allocation term */ { size_t chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes); if ((mem_heap.allocated_chunks + chunks) * MEM_HEAP_CHUNK_SIZE >= mem_heap.limit) { mem_run_try_to_give_memory_back_callbacks (MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_LOW); } void *data_space_p = mem_heap_alloc_block_internal (size_in_bytes, length_type, alloc_term); if (likely (data_space_p != NULL)) { return data_space_p; } for (mem_try_give_memory_back_severity_t severity = MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_LOW; severity <= MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_CRITICAL; severity = (mem_try_give_memory_back_severity_t) (severity + 1)) { mem_run_try_to_give_memory_back_callbacks (severity); data_space_p = mem_heap_alloc_block_internal (size_in_bytes, length_type, alloc_term); if (data_space_p != NULL) { return data_space_p; } } JERRY_ASSERT (data_space_p == NULL); jerry_fatal (ERR_OUT_OF_MEMORY); } /* mem_heap_alloc_block_try_give_memory_back */
/** * Allocate a chunk of specified size * * @return pointer to allocated chunk, if allocation was successful, * or NULL - if not enough memory. */ uint8_t* __attr_always_inline___ mem_pools_alloc (void) { #ifdef MEM_GC_BEFORE_EACH_ALLOC mem_run_try_to_give_memory_back_callbacks (MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_HIGH); #endif /* MEM_GC_BEFORE_EACH_ALLOC */ mem_check_pools (); do { if (mem_free_chunk_p != NULL) { mem_pool_chunk_t *chunk_p = mem_free_chunk_p; MEM_POOLS_STAT_ALLOC_CHUNK (); #ifndef JERRY_NDEBUG mem_free_chunks_number--; #endif /* !JERRY_NDEBUG */ VALGRIND_DEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE); mem_free_chunk_p = chunk_p->u.free.next_p; VALGRIND_UNDEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE); mem_check_pools (); VALGRIND_FREYA_MALLOCLIKE_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE); return (uint8_t *) chunk_p; } else { mem_pools_alloc_longpath (); /* the assertion guarantees that there will be no more than two iterations */ JERRY_ASSERT (mem_free_chunk_p != NULL); } } while (true); } /* mem_pools_alloc */