/** * Free memory occupied by bytecode data */ static void bc_free_bytecode_data (bytecode_data_header_t *bytecode_data_p) /**< byte-code scope data header */ { bytecode_data_header_t *next_to_handle_list_p = bytecode_data_p; while (next_to_handle_list_p != NULL) { bytecode_data_header_t *bc_header_list_iter_p = next_to_handle_list_p; next_to_handle_list_p = NULL; while (bc_header_list_iter_p != NULL) { bytecode_data_header_t *header_p = bc_header_list_iter_p; bc_header_list_iter_p = MEM_CP_GET_POINTER (bytecode_data_header_t, header_p->next_header_cp); mem_cpointer_t *declarations_p = MEM_CP_GET_POINTER (mem_cpointer_t, header_p->declarations_cp); for (uint32_t index = 0; index < header_p->func_scopes_count; index++) { bytecode_data_header_t *child_scope_header_p = MEM_CP_GET_NON_NULL_POINTER (bytecode_data_header_t, declarations_p[index]); JERRY_ASSERT (child_scope_header_p->next_header_cp == MEM_CP_NULL); MEM_CP_SET_POINTER (child_scope_header_p->next_header_cp, next_to_handle_list_p); next_to_handle_list_p = child_scope_header_p; } mem_heap_free_block (header_p); } JERRY_ASSERT (bc_header_list_iter_p == NULL); } } /* bc_free_bytecode_data */
/** * Free the chunk */ void mem_pools_free (uint8_t *chunk_p) /**< pointer to the chunk */ { mem_pool_state_t *pool_state = mem_pools, *prev_pool_state_p = NULL; /** * Search for the pool containing specified chunk. */ while (!mem_pool_is_chunk_inside (pool_state, chunk_p)) { prev_pool_state_p = pool_state; pool_state = MEM_CP_GET_NON_NULL_POINTER (mem_pool_state_t, pool_state->next_pool_cp); } /** * Free the chunk */ mem_pool_free_chunk (pool_state, chunk_p); mem_free_chunks_number++; MEM_POOLS_STAT_FREE_CHUNK (); /** * If all chunks of the pool are free, free the pool itself. */ if (pool_state->free_chunks_number == MEM_POOL_CHUNKS_NUMBER) { if (prev_pool_state_p != NULL) { prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp; } else { mem_pools = MEM_CP_GET_POINTER (mem_pool_state_t, pool_state->next_pool_cp); } mem_free_chunks_number -= MEM_POOL_CHUNKS_NUMBER; mem_heap_free_block ((uint8_t*) pool_state); MEM_POOLS_STAT_FREE_POOL (); } else if (mem_pools != pool_state) { JERRY_ASSERT (prev_pool_state_p != NULL); prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp; MEM_CP_SET_NON_NULL_POINTER (pool_state->next_pool_cp, mem_pools); mem_pools = pool_state; } } /* mem_pools_free */
/** * Long path for mem_pools_alloc * * @return true - if there is a free chunk in mem_pools, * false - otherwise (not enough memory). */ static bool __attr_noinline___ mem_pools_alloc_longpath (void) { /** * If there are no free chunks, allocate new pool. */ if (mem_free_chunks_number == 0) { mem_pool_state_t *pool_state = (mem_pool_state_t*) mem_heap_alloc_block (MEM_POOL_SIZE, MEM_HEAP_ALLOC_LONG_TERM); JERRY_ASSERT (pool_state != NULL); mem_pool_init (pool_state, MEM_POOL_SIZE); MEM_CP_SET_POINTER (pool_state->next_pool_cp, mem_pools); mem_pools = pool_state; mem_free_chunks_number += MEM_POOL_CHUNKS_NUMBER; MEM_POOLS_STAT_ALLOC_POOL (); } else { /** * There is definitely at least one pool of specified type with at least one free chunk. * * Search for the pool. */ mem_pool_state_t *pool_state = mem_pools, *prev_pool_state_p = NULL; while (pool_state->first_free_chunk == MEM_POOL_CHUNKS_NUMBER) { prev_pool_state_p = pool_state; pool_state = MEM_CP_GET_NON_NULL_POINTER (mem_pool_state_t, pool_state->next_pool_cp); } JERRY_ASSERT (prev_pool_state_p != NULL && pool_state != mem_pools); prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp; MEM_CP_SET_NON_NULL_POINTER (pool_state->next_pool_cp, mem_pools); mem_pools = pool_state; } return true; } /* mem_pools_alloc_longpath */
/** * Abort (finalize) the current stack context, and remove it. * * @return new stack top */ ecma_value_t * vm_stack_context_abort (vm_frame_ctx_t *frame_ctx_p, /**< frame context */ ecma_value_t *vm_stack_top_p) /**< current stack top */ { switch (VM_GET_CONTEXT_TYPE (vm_stack_top_p[-1])) { case VM_CONTEXT_FINALLY_THROW: case VM_CONTEXT_FINALLY_RETURN: { ecma_free_value (vm_stack_top_p[-2], true); VM_MINUS_EQUAL_U16 (frame_ctx_p->context_depth, PARSER_TRY_CONTEXT_STACK_ALLOCATION); vm_stack_top_p -= PARSER_TRY_CONTEXT_STACK_ALLOCATION; break; } case VM_CONTEXT_FINALLY_JUMP: case VM_CONTEXT_TRY: { VM_MINUS_EQUAL_U16 (frame_ctx_p->context_depth, PARSER_TRY_CONTEXT_STACK_ALLOCATION); vm_stack_top_p -= PARSER_TRY_CONTEXT_STACK_ALLOCATION; break; } case VM_CONTEXT_CATCH: case VM_CONTEXT_WITH: { ecma_deref_object (frame_ctx_p->lex_env_p); frame_ctx_p->lex_env_p = ecma_get_object_from_value (vm_stack_top_p[-2]); JERRY_ASSERT (PARSER_TRY_CONTEXT_STACK_ALLOCATION == PARSER_WITH_CONTEXT_STACK_ALLOCATION); VM_MINUS_EQUAL_U16 (frame_ctx_p->context_depth, PARSER_TRY_CONTEXT_STACK_ALLOCATION); vm_stack_top_p -= PARSER_TRY_CONTEXT_STACK_ALLOCATION; break; } case VM_CONTEXT_FOR_IN: { mem_cpointer_t current = (uint16_t) vm_stack_top_p[-2]; while (current != MEM_CP_NULL) { ecma_collection_chunk_t *chunk_p = MEM_CP_GET_NON_NULL_POINTER (ecma_collection_chunk_t, current); ecma_free_value (*(ecma_value_t *) chunk_p->data, true); current = chunk_p->next_chunk_cp; ecma_dealloc_collection_chunk (chunk_p); } ecma_free_value (vm_stack_top_p[-3], true); VM_MINUS_EQUAL_U16 (frame_ctx_p->context_depth, PARSER_FOR_IN_CONTEXT_STACK_ALLOCATION); vm_stack_top_p -= PARSER_FOR_IN_CONTEXT_STACK_ALLOCATION; break; } default: { JERRY_UNREACHABLE (); break; } } return vm_stack_top_p; } /* vm_stack_context_abort */
/** * Dump bytecode and summplementary data of all existing scopes to snapshot * * @return true if snapshot was dumped successfully * false otherwise */ bool bc_save_bytecode_data (uint8_t *buffer_p, /**< buffer to dump to */ size_t buffer_size, /**< buffer size */ size_t *in_out_buffer_offset_p, /**< in-out: buffer write offset */ const bytecode_data_header_t *bytecode_data_p, /**< byte-code data */ const lit_mem_to_snapshot_id_map_entry_t *lit_map_p, /**< map from literal * identifiers in * literal storage * to literal offsets * in snapshot */ uint32_t literals_num, /**< literals number */ uint32_t *out_scopes_num) /**< number of scopes written */ { bytecode_data_header_t *next_to_handle_list_p = first_bytecode_header_p; while (next_to_handle_list_p != NULL) { if (next_to_handle_list_p == bytecode_data_p) { break; } next_to_handle_list_p = MEM_CP_GET_POINTER (bytecode_data_header_t, next_to_handle_list_p->next_header_cp); } JERRY_ASSERT (next_to_handle_list_p); JERRY_ASSERT (next_to_handle_list_p->next_header_cp == MEM_CP_NULL); *out_scopes_num = 0; while (next_to_handle_list_p!= NULL) { bytecode_data_header_t *bc_header_list_iter_p = next_to_handle_list_p; next_to_handle_list_p = NULL; mem_cpointer_t *declarations_p = MEM_CP_GET_POINTER (mem_cpointer_t, bc_header_list_iter_p->declarations_cp); if (!bc_save_bytecode_with_idx_map (buffer_p, buffer_size, in_out_buffer_offset_p, bc_header_list_iter_p, lit_map_p, literals_num)) { return false; } (*out_scopes_num)++; next_to_handle_list_p = MEM_CP_GET_POINTER (bytecode_data_header_t, bc_header_list_iter_p->next_header_cp); for (uint32_t index = bc_header_list_iter_p->func_scopes_count; index > 0 ; index--) { bytecode_data_header_t *child_scope_header_p = MEM_CP_GET_NON_NULL_POINTER (bytecode_data_header_t, declarations_p[index-1]); JERRY_ASSERT (child_scope_header_p->next_header_cp == MEM_CP_NULL); MEM_CP_SET_POINTER (child_scope_header_p->next_header_cp, next_to_handle_list_p); next_to_handle_list_p = child_scope_header_p; } bc_header_list_iter_p->next_header_cp = MEM_CP_NULL; } return true; } /* bc_save_bytecode_data */