/** * Helper function to merge argument lists * * See also: * ECMA-262 v5, 15.3.4.5.1 step 4 * ECMA-262 v5, 15.3.4.5.2 step 4 * * Used by: * - [[Call]] implementation for Function objects. * - [[Construct]] implementation for Function objects. * * @return ecma_value_t* - pointer to the merged argument list. */ static ecma_value_t* ecma_function_bind_merge_arg_lists (ecma_object_t *func_obj_p, /**< Function object */ const ecma_value_t *arguments_list_p, /**< arguments list */ ecma_length_t arguments_list_len, /**< length of arguments list */ ecma_length_t *total_args_count) /**< length of the merged argument list */ { ecma_value_t *arg_list_p; ecma_length_t bound_args_count = 0; ecma_property_t *bound_args_prop_p; bound_args_prop_p = ecma_find_internal_property (func_obj_p, ECMA_INTERNAL_PROPERTY_BOUND_FUNCTION_BOUND_ARGS); if (bound_args_prop_p != NULL) { ecma_collection_header_t *bound_arg_list_p = ECMA_GET_POINTER (ecma_collection_header_t, bound_args_prop_p->u.internal_property.value); ecma_collection_iterator_t bound_args_iterator; ecma_collection_iterator_init (&bound_args_iterator, bound_arg_list_p); bound_args_count = bound_arg_list_p->unit_number; *total_args_count = bound_args_count + arguments_list_len; const size_t arg_list_size = (size_t) *total_args_count * sizeof (ecma_value_t); arg_list_p = static_cast <ecma_value_t *> (mem_heap_alloc_block (arg_list_size, MEM_HEAP_ALLOC_SHORT_TERM)); for (ecma_length_t i = 0; i < bound_args_count; i++) { bool is_moved = ecma_collection_iterator_next (&bound_args_iterator); JERRY_ASSERT (is_moved); arg_list_p[i] = *bound_args_iterator.current_value_p; } } else { *total_args_count = arguments_list_len; const size_t arg_list_size = (size_t) *total_args_count * sizeof (ecma_value_t); arg_list_p = static_cast <ecma_value_t *> (mem_heap_alloc_block (arg_list_size, MEM_HEAP_ALLOC_SHORT_TERM)); } for (ecma_length_t i = 0; i < arguments_list_len; i++) { arg_list_p[i + bound_args_count] = arguments_list_p[i]; } return arg_list_p; } /* ecma_function_bind_merge_arg_lists */
/** * Realloc the bytecode container * * @return current position in RegExp bytecode */ static re_bytecode_t* re_realloc_regexp_bytecode_block (re_bytecode_ctx_t *bc_ctx_p) /**< RegExp bytecode context */ { JERRY_ASSERT (bc_ctx_p->block_end_p - bc_ctx_p->block_start_p >= 0); size_t old_size = static_cast<size_t> (bc_ctx_p->block_end_p - bc_ctx_p->block_start_p); /* If one of the members of RegExp bytecode context is NULL, then all member should be NULL * (it means first allocation), otherwise all of the members should be a non NULL pointer. */ JERRY_ASSERT ((!bc_ctx_p->current_p && !bc_ctx_p->block_end_p && !bc_ctx_p->block_start_p) || (bc_ctx_p->current_p && bc_ctx_p->block_end_p && bc_ctx_p->block_start_p)); size_t new_block_size = old_size + REGEXP_BYTECODE_BLOCK_SIZE; JERRY_ASSERT (bc_ctx_p->current_p - bc_ctx_p->block_start_p >= 0); size_t current_ptr_offset = static_cast<size_t> (bc_ctx_p->current_p - bc_ctx_p->block_start_p); re_bytecode_t *new_block_start_p = (re_bytecode_t *) mem_heap_alloc_block (new_block_size, MEM_HEAP_ALLOC_SHORT_TERM); if (bc_ctx_p->current_p) { memcpy (new_block_start_p, bc_ctx_p->block_start_p, static_cast<size_t> (current_ptr_offset)); mem_heap_free_block (bc_ctx_p->block_start_p); } bc_ctx_p->block_start_p = new_block_start_p; bc_ctx_p->block_end_p = new_block_start_p + new_block_size; bc_ctx_p->current_p = new_block_start_p + current_ptr_offset; return bc_ctx_p->current_p; } /* re_realloc_regexp_bytecode_block */
/** * Insert a new bytecode to the bytecode container */ static void re_bytecode_list_insert (re_bytecode_ctx_t *bc_ctx_p, /**< RegExp bytecode context */ size_t offset, /**< distance from the start of the container */ re_bytecode_t *bytecode_p, /**< input bytecode */ size_t length) /**< length of input */ { JERRY_ASSERT (length <= REGEXP_BYTECODE_BLOCK_SIZE); re_bytecode_t *current_p = bc_ctx_p->current_p; if (current_p + length > bc_ctx_p->block_end_p) { re_realloc_regexp_bytecode_block (bc_ctx_p); } re_bytecode_t *src_p = bc_ctx_p->block_start_p + offset; if ((re_get_bytecode_length (bc_ctx_p) - offset) > 0) { re_bytecode_t *dest_p = src_p + length; re_bytecode_t *tmp_block_start_p; tmp_block_start_p = (re_bytecode_t *) mem_heap_alloc_block ((re_get_bytecode_length (bc_ctx_p) - offset), MEM_HEAP_ALLOC_SHORT_TERM); memcpy (tmp_block_start_p, src_p, (size_t) (re_get_bytecode_length (bc_ctx_p) - offset)); memcpy (dest_p, tmp_block_start_p, (size_t) (re_get_bytecode_length (bc_ctx_p) - offset)); mem_heap_free_block (tmp_block_start_p); } memcpy (src_p, bytecode_p, length); bc_ctx_p->current_p += length; } /* re_bytecode_list_insert */
/** * The String.prototype object's 'trim' routine * * See also: * ECMA-262 v5, 15.5.4.20 * * @return completion value * Returned value must be freed with ecma_free_completion_value. */ static ecma_completion_value_t ecma_builtin_string_prototype_object_trim (ecma_value_t this_arg) /**< this argument */ { ecma_completion_value_t ret_value = ecma_make_empty_completion_value (); /* 1 */ ECMA_TRY_CATCH (check_coercible_val, ecma_op_check_object_coercible (this_arg), ret_value); /* 2 */ ECMA_TRY_CATCH (to_string_val, ecma_op_to_string (this_arg), ret_value); ecma_string_t *original_string_p = ecma_get_string_from_value (to_string_val); /* 3 */ const lit_utf8_size_t size = ecma_string_get_size (original_string_p); const ecma_length_t length = ecma_string_get_size (original_string_p); /* Workaround: avoid repeated call of ecma_string_get_char_at_pos() because its overhead */ lit_utf8_byte_t *original_utf8_str_p = (lit_utf8_byte_t *) mem_heap_alloc_block (size + 1, MEM_HEAP_ALLOC_SHORT_TERM); ecma_string_to_utf8_string (original_string_p, original_utf8_str_p, (ssize_t) size); uint32_t prefix = 0, postfix = 0; uint32_t new_len = 0; while (prefix < length && isspace (lit_utf8_string_code_unit_at (original_utf8_str_p, size, prefix))) { prefix++; } while (postfix < length - prefix && isspace (lit_utf8_string_code_unit_at (original_utf8_str_p, size, length - postfix - 1))) { postfix++; } new_len = prefix < size ? size - prefix - postfix : 0; ecma_string_t *new_str_p = ecma_string_substr (original_string_p, prefix, prefix + new_len); /* 4 */ ret_value = ecma_make_normal_completion_value (ecma_make_string_value (new_str_p)); mem_heap_free_block (original_utf8_str_p); ECMA_FINALIZE (to_string_val); ECMA_FINALIZE (check_coercible_val); return ret_value; } /* ecma_builtin_string_prototype_object_trim */
array_list array_list_init (uint8_t element_size) { size_t size = mem_heap_recommend_allocation_size (sizeof (array_list_header)); array_list_header *header = (array_list_header *) mem_heap_alloc_block (size, MEM_HEAP_ALLOC_SHORT_TERM); memset (header, 0, size); header->element_size = element_size; header->len = 0; header->size = size; return (array_list) header; }
/** * Create external magic string record in the literal storage. * * @return pointer to the created record */ lit_record_t * lit_create_magic_literal_ex (const lit_magic_string_ex_id_t id) /**< id of magic string */ { lit_magic_record_t *rec_p = (lit_magic_record_t *) mem_heap_alloc_block (sizeof (lit_magic_record_t)); rec_p->type = LIT_RECORD_TYPE_MAGIC_STR_EX; rec_p->next = (uint16_t) lit_cpointer_compress (lit_storage); lit_storage = (lit_record_t *) rec_p; rec_p->magic_id = (uint32_t) id; return (lit_record_t *) rec_p; } /* lit_create_magic_literal_ex */
/** * Create number record in the literal storage. * * @return pointer to the created record */ lit_record_t * lit_create_number_literal (const ecma_number_t num) /**< numeric value */ { lit_number_record_t *rec_p = (lit_number_record_t *) mem_heap_alloc_block (sizeof (lit_number_record_t)); rec_p->type = (uint8_t) LIT_RECORD_TYPE_NUMBER; rec_p->next = (uint16_t) lit_cpointer_compress (lit_storage); lit_storage = (lit_record_t *) rec_p; rec_p->number = num; return (lit_record_t *) rec_p; } /* lit_create_number_literal */
linked_list linked_list_init (uint16_t element_size) { size_t size = sizeof (linked_list_header) + linked_list_block_size (element_size); linked_list list = (linked_list) mem_heap_alloc_block (size, MEM_HEAP_ALLOC_SHORT_TERM); if (list == null_list) { printf ("Out of memory"); JERRY_UNREACHABLE (); } memset (list, 0, size); linked_list_header* header = (linked_list_header *) list; header->next = null_list; header->element_size = element_size; return list; }
/** * Long path for mem_pools_alloc * * @return true - if there is a free chunk in mem_pools, * false - otherwise (not enough memory). */ static bool __attr_noinline___ mem_pools_alloc_longpath (void) { /** * If there are no free chunks, allocate new pool. */ if (mem_free_chunks_number == 0) { mem_pool_state_t *pool_state = (mem_pool_state_t*) mem_heap_alloc_block (MEM_POOL_SIZE, MEM_HEAP_ALLOC_LONG_TERM); JERRY_ASSERT (pool_state != NULL); mem_pool_init (pool_state, MEM_POOL_SIZE); MEM_CP_SET_POINTER (pool_state->next_pool_cp, mem_pools); mem_pools = pool_state; mem_free_chunks_number += MEM_POOL_CHUNKS_NUMBER; MEM_POOLS_STAT_ALLOC_POOL (); } else { /** * There is definitely at least one pool of specified type with at least one free chunk. * * Search for the pool. */ mem_pool_state_t *pool_state = mem_pools, *prev_pool_state_p = NULL; while (pool_state->first_free_chunk == MEM_POOL_CHUNKS_NUMBER) { prev_pool_state_p = pool_state; pool_state = MEM_CP_GET_NON_NULL_POINTER (mem_pool_state_t, pool_state->next_pool_cp); } JERRY_ASSERT (prev_pool_state_p != NULL && pool_state != mem_pools); prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp; MEM_CP_SET_NON_NULL_POINTER (pool_state->next_pool_cp, mem_pools); mem_pools = pool_state; } return true; } /* mem_pools_alloc_longpath */
/** * Create charset record in the literal storage * * @return pointer to the created record */ lit_record_t * lit_create_charset_literal (const lit_utf8_byte_t *str_p, /**< string to be placed into the record */ const lit_utf8_size_t buf_size) /**< size in bytes of the buffer which holds the string */ { lit_charset_record_t *rec_p = (lit_charset_record_t *) mem_heap_alloc_block (buf_size + LIT_CHARSET_HEADER_SIZE); rec_p->type = LIT_RECORD_TYPE_CHARSET; rec_p->next = (uint16_t) lit_cpointer_compress (lit_storage); lit_storage = (lit_record_t *) rec_p; rec_p->hash = (uint8_t) lit_utf8_string_calc_hash (str_p, buf_size); rec_p->size = (uint16_t) buf_size; rec_p->length = (uint16_t) lit_utf8_string_length (str_p, buf_size); memcpy (rec_p + 1, str_p, buf_size); return (lit_record_t *) rec_p; } /* lit_create_charset_literal */
/** * Merge scopes tree into bytecode * * @return pointer to generated bytecode */ const bytecode_data_header_t * serializer_merge_scopes_into_bytecode (void) { const size_t buckets_count = scopes_tree_count_literals_in_blocks (current_scope); const vm_instr_counter_t instrs_count = scopes_tree_count_instructions (current_scope); const size_t blocks_count = JERRY_ALIGNUP (instrs_count, BLOCK_SIZE) / BLOCK_SIZE; const size_t bytecode_size = JERRY_ALIGNUP (instrs_count * sizeof (vm_instr_t), MEM_ALIGNMENT); const size_t hash_table_size = lit_id_hash_table_get_size_for_table (buckets_count, blocks_count); const size_t header_and_hash_table_size = JERRY_ALIGNUP (sizeof (bytecode_data_header_t) + hash_table_size, MEM_ALIGNMENT); uint8_t *buffer_p = (uint8_t*) mem_heap_alloc_block (bytecode_size + header_and_hash_table_size, MEM_HEAP_ALLOC_LONG_TERM); lit_id_hash_table *lit_id_hash = lit_id_hash_table_init (buffer_p + sizeof (bytecode_data_header_t), hash_table_size, buckets_count, blocks_count); vm_instr_t *bytecode_p = scopes_tree_raw_data (current_scope, buffer_p + header_and_hash_table_size, bytecode_size, lit_id_hash); bytecode_data_header_t *header_p = (bytecode_data_header_t *) buffer_p; MEM_CP_SET_POINTER (header_p->lit_id_hash_cp, lit_id_hash); header_p->instrs_p = bytecode_p; header_p->instrs_count = instrs_count; MEM_CP_SET_POINTER (header_p->next_header_cp, first_bytecode_header_p); first_bytecode_header_p = header_p; if (print_instrs) { lit_dump_literals (); serializer_print_instrs (header_p); } return header_p; } /* serializer_merge_scopes_into_bytecode */
array_list array_list_append (array_list al, void *element) { array_list_header *h = extract_header (al); if ((h->len + 1) * h->element_size + sizeof (array_list_header) > h->size) { size_t size = mem_heap_recommend_allocation_size (h->size + h->element_size); JERRY_ASSERT (size > h->size); uint8_t* new_block_p = (uint8_t*) mem_heap_alloc_block (size, MEM_HEAP_ALLOC_SHORT_TERM); memcpy (new_block_p, h, h->size); memset (new_block_p + h->size, 0, size - h->size); mem_heap_free_block ((uint8_t *) h); h = (array_list_header *) new_block_p; h->size = size; al = (array_list) h; } memcpy (data (al) + (h->len * h->element_size), element, h->element_size); h->len++; return al; }
const vm_instr_t * serializer_merge_scopes_into_bytecode (void) { bytecode_data.instrs_count = scopes_tree_count_instructions (current_scope); const size_t buckets_count = scopes_tree_count_literals_in_blocks (current_scope); const size_t blocks_count = (size_t) bytecode_data.instrs_count / BLOCK_SIZE + 1; const vm_instr_counter_t instrs_count = scopes_tree_count_instructions (current_scope); const size_t bytecode_array_size = JERRY_ALIGNUP (sizeof (insts_data_header_t) + instrs_count * sizeof (vm_instr_t), MEM_ALIGNMENT); const size_t lit_id_hash_table_size = JERRY_ALIGNUP (lit_id_hash_table_get_size_for_table (buckets_count, blocks_count), MEM_ALIGNMENT); uint8_t *buffer_p = (uint8_t*) mem_heap_alloc_block (bytecode_array_size + lit_id_hash_table_size, MEM_HEAP_ALLOC_LONG_TERM); lit_id_hash_table *lit_id_hash = lit_id_hash_table_init (buffer_p + bytecode_array_size, lit_id_hash_table_size, buckets_count, blocks_count); const vm_instr_t *instrs_p = scopes_tree_raw_data (current_scope, buffer_p, bytecode_array_size, lit_id_hash); insts_data_header_t *header_p = (insts_data_header_t*) buffer_p; MEM_CP_SET_POINTER (header_p->next_instrs_cp, bytecode_data.instrs_p); header_p->instructions_number = instrs_count; bytecode_data.instrs_p = instrs_p; if (print_instrs) { lit_dump_literals (); serializer_print_instrs (instrs_p, bytecode_data.instrs_count); } return instrs_p; }
/** * Register bytecode and idx map from snapshot * * NOTE: * If is_copy flag is set, bytecode is copied from snapshot, else bytecode is referenced directly * from snapshot * * @return pointer to byte-code header, upon success, * NULL - upon failure (i.e., in case snapshot format is not valid) */ const bytecode_data_header_t * serializer_load_bytecode_with_idx_map (const uint8_t *bytecode_and_idx_map_p, /**< buffer with instructions array * and idx to literals map from * snapshot */ uint32_t bytecode_size, /**< size of instructions array */ uint32_t idx_to_lit_map_size, /**< size of the idx to literals map */ const lit_mem_to_snapshot_id_map_entry_t *lit_map_p, /**< map of in-snapshot * literal offsets * to literal identifiers, * created in literal * storage */ uint32_t literals_num, /**< number of literals */ bool is_copy) /** flag, indicating whether the passed in-snapshot data * should be copied to engine's memory (true), * or it can be referenced until engine is stopped * (i.e. until call to jerry_cleanup) */ { const uint8_t *idx_to_lit_map_p = bytecode_and_idx_map_p + bytecode_size; size_t instructions_number = bytecode_size / sizeof (vm_instr_t); size_t blocks_count = JERRY_ALIGNUP (instructions_number, BLOCK_SIZE) / BLOCK_SIZE; uint32_t idx_num_total; size_t idx_to_lit_map_offset = 0; if (!jrt_read_from_buffer_by_offset (idx_to_lit_map_p, idx_to_lit_map_size, &idx_to_lit_map_offset, &idx_num_total)) { return NULL; } const size_t bytecode_alloc_size = JERRY_ALIGNUP (bytecode_size, MEM_ALIGNMENT); const size_t hash_table_size = lit_id_hash_table_get_size_for_table (idx_num_total, blocks_count); const size_t header_and_hash_table_size = JERRY_ALIGNUP (sizeof (bytecode_data_header_t) + hash_table_size, MEM_ALIGNMENT); const size_t alloc_size = header_and_hash_table_size + (is_copy ? bytecode_alloc_size : 0); uint8_t *buffer_p = (uint8_t*) mem_heap_alloc_block (alloc_size, MEM_HEAP_ALLOC_LONG_TERM); bytecode_data_header_t *header_p = (bytecode_data_header_t *) buffer_p; vm_instr_t *instrs_p; vm_instr_t *snapshot_instrs_p = (vm_instr_t *) bytecode_and_idx_map_p; if (is_copy) { instrs_p = (vm_instr_t *) (buffer_p + header_and_hash_table_size); memcpy (instrs_p, snapshot_instrs_p, bytecode_size); } else { instrs_p = snapshot_instrs_p; } uint8_t *lit_id_hash_table_buffer_p = buffer_p + sizeof (bytecode_data_header_t); if (lit_id_hash_table_load_from_snapshot (blocks_count, idx_num_total, idx_to_lit_map_p + idx_to_lit_map_offset, idx_to_lit_map_size - idx_to_lit_map_offset, lit_map_p, literals_num, lit_id_hash_table_buffer_p, hash_table_size) && (vm_instr_counter_t) instructions_number == instructions_number) { MEM_CP_SET_NON_NULL_POINTER (header_p->lit_id_hash_cp, lit_id_hash_table_buffer_p); header_p->instrs_p = instrs_p; header_p->instrs_count = (vm_instr_counter_t) instructions_number; MEM_CP_SET_POINTER (header_p->next_header_cp, first_bytecode_header_p); first_bytecode_header_p = header_p; return header_p; } else { mem_heap_free_block (buffer_p); return NULL; } } /* serializer_load_bytecode_with_idx_map */
/** * 'Native call' opcode handler. */ ecma_completion_value_t opfunc_native_call (opcode_t opdata, /**< operation data */ int_data_t *int_data) /**< interpreter context */ { const idx_t dst_var_idx = opdata.data.native_call.lhs; const idx_t native_call_id_idx = opdata.data.native_call.name; const idx_t args_number = opdata.data.native_call.arg_list; const opcode_counter_t lit_oc = int_data->pos; JERRY_ASSERT (native_call_id_idx < OPCODE_NATIVE_CALL__COUNT); int_data->pos++; JERRY_STATIC_ASSERT (OPCODE_NATIVE_CALL__COUNT < (1u << (sizeof (native_call_id_idx) * JERRY_BITSINBYTE))); ecma_completion_value_t ret_value = ecma_make_empty_completion_value (); MEM_DEFINE_LOCAL_ARRAY (arg_values, args_number, ecma_value_t); ecma_length_t args_read; ecma_completion_value_t get_arg_completion = fill_varg_list (int_data, args_number, arg_values, &args_read); if (ecma_is_completion_value_empty (get_arg_completion)) { JERRY_ASSERT (args_read == args_number); switch ((opcode_native_call_t)native_call_id_idx) { case OPCODE_NATIVE_CALL_LED_TOGGLE: case OPCODE_NATIVE_CALL_LED_ON: case OPCODE_NATIVE_CALL_LED_OFF: case OPCODE_NATIVE_CALL_LED_ONCE: case OPCODE_NATIVE_CALL_WAIT: { JERRY_UNIMPLEMENTED ("Device operations are not implemented."); } case OPCODE_NATIVE_CALL_PRINT: { for (ecma_length_t arg_index = 0; ecma_is_completion_value_empty (ret_value) && arg_index < args_read; arg_index++) { ECMA_TRY_CATCH (str_value, ecma_op_to_string (arg_values[arg_index]), ret_value); ecma_string_t *str_p = ecma_get_string_from_value (str_value); lit_utf8_size_t bytes = ecma_string_get_size (str_p); ssize_t utf8_str_size = (ssize_t) (bytes + 1); lit_utf8_byte_t *utf8_str_p = (lit_utf8_byte_t*) mem_heap_alloc_block ((size_t) utf8_str_size, MEM_HEAP_ALLOC_SHORT_TERM); if (utf8_str_p == NULL) { jerry_fatal (ERR_OUT_OF_MEMORY); } ecma_string_to_utf8_string (str_p, utf8_str_p, utf8_str_size); utf8_str_p[utf8_str_size - 1] = 0; FIXME ("Support unicode in printf."); if (arg_index < args_read - 1) { printf ("%s ", (char*) utf8_str_p); } else { printf ("%s", (char*) utf8_str_p); } mem_heap_free_block (utf8_str_p); ret_value = set_variable_value (int_data, lit_oc, dst_var_idx, ecma_make_simple_value (ECMA_SIMPLE_VALUE_UNDEFINED)); ECMA_FINALIZE (str_value); } printf ("\n"); break; } case OPCODE_NATIVE_CALL__COUNT: { JERRY_UNREACHABLE (); } } } else { JERRY_ASSERT (!ecma_is_completion_value_normal (get_arg_completion)); ret_value = get_arg_completion; } for (ecma_length_t arg_index = 0; arg_index < args_read; arg_index++) { ecma_free_value (arg_values[arg_index], true); } MEM_FINALIZE_LOCAL_ARRAY (arg_values); return ret_value; } /* opfunc_native_call */
/** * Register bytecode and supplementary data of a single scope from snapshot * * NOTE: * If is_copy flag is set, bytecode is copied from snapshot, else bytecode is referenced directly * from snapshot * * @return pointer to byte-code header, upon success, * NULL - upon failure (i.e., in case snapshot format is not valid) */ static bytecode_data_header_t * bc_load_bytecode_with_idx_map (const uint8_t *snapshot_data_p, /**< buffer with instructions array * and idx to literals map from * snapshot */ size_t snapshot_size, /**< remaining size of snapshot */ const lit_mem_to_snapshot_id_map_entry_t *lit_map_p, /**< map of in-snapshot * literal offsets * to literal identifiers, * created in literal * storage */ uint32_t literals_num, /**< number of literals */ bool is_copy, /** flag, indicating whether the passed in-snapshot data * should be copied to engine's memory (true), * or it can be referenced until engine is stopped * (i.e. until call to jerry_cleanup) */ uint32_t *out_bytecode_data_size) /**< out: size occupied by bytecode data * in snapshot */ { size_t buffer_offset = 0; jerry_snapshot_bytecode_header_t bytecode_header; if (!jrt_read_from_buffer_by_offset (snapshot_data_p, snapshot_size, &buffer_offset, &bytecode_header, sizeof (bytecode_header))) { return NULL; } *out_bytecode_data_size = bytecode_header.size; buffer_offset += (JERRY_ALIGNUP (sizeof (jerry_snapshot_bytecode_header_t), MEM_ALIGNMENT) - sizeof (jerry_snapshot_bytecode_header_t)); JERRY_ASSERT (bytecode_header.size <= snapshot_size); /* Read uid->lit_cp hash table size */ const uint8_t *idx_to_lit_map_p = (snapshot_data_p + buffer_offset + + bytecode_header.instrs_size + bytecode_header.var_decls_count * sizeof (uint32_t)); size_t instructions_number = bytecode_header.instrs_size / sizeof (vm_instr_t); size_t blocks_count = JERRY_ALIGNUP (instructions_number, BLOCK_SIZE) / BLOCK_SIZE; uint32_t idx_num_total; size_t idx_to_lit_map_offset = 0; if (!jrt_read_from_buffer_by_offset (idx_to_lit_map_p, bytecode_header.idx_to_lit_map_size, &idx_to_lit_map_offset, &idx_num_total, sizeof (idx_num_total))) { return NULL; } /* Alloc bytecode_header for runtime */ const size_t bytecode_alloc_size = JERRY_ALIGNUP (bytecode_header.instrs_size, MEM_ALIGNMENT); const size_t hash_table_size = lit_id_hash_table_get_size_for_table (idx_num_total, blocks_count); const size_t declarations_area_size = JERRY_ALIGNUP (bytecode_header.func_scopes_count * sizeof (mem_cpointer_t) + bytecode_header.var_decls_count * sizeof (lit_cpointer_t), MEM_ALIGNMENT); const size_t header_and_tables_size = JERRY_ALIGNUP ((sizeof (bytecode_data_header_t) + hash_table_size + declarations_area_size), MEM_ALIGNMENT); const size_t alloc_size = header_and_tables_size + (is_copy ? bytecode_alloc_size : 0); uint8_t *buffer_p = (uint8_t*) mem_heap_alloc_block (alloc_size, MEM_HEAP_ALLOC_LONG_TERM); bytecode_data_header_t *header_p = (bytecode_data_header_t *) buffer_p; vm_instr_t *instrs_p; vm_instr_t *snapshot_instrs_p = (vm_instr_t *) (snapshot_data_p + buffer_offset); if (is_copy) { instrs_p = (vm_instr_t *) (buffer_p + header_and_tables_size); memcpy (instrs_p, snapshot_instrs_p, bytecode_header.instrs_size); } else { instrs_p = snapshot_instrs_p; } buffer_offset += bytecode_header.instrs_size; /* buffer_offset is now offset of variable declarations */ /* Read uid->lit_cp hash table */ uint8_t *lit_id_hash_table_buffer_p = buffer_p + sizeof (bytecode_data_header_t); if (!(lit_id_hash_table_load_from_snapshot (blocks_count, idx_num_total, idx_to_lit_map_p + idx_to_lit_map_offset, bytecode_header.idx_to_lit_map_size - idx_to_lit_map_offset, lit_map_p, literals_num, lit_id_hash_table_buffer_p, hash_table_size) && (vm_instr_counter_t) instructions_number == instructions_number)) { mem_heap_free_block (buffer_p); return NULL; } /* Fill with NULLs child scopes declarations for this scope */ mem_cpointer_t *declarations_p = (mem_cpointer_t *) (buffer_p + sizeof (bytecode_data_header_t) + hash_table_size); memset (declarations_p, 0, bytecode_header.func_scopes_count * sizeof (mem_cpointer_t)); /* Read variable declarations for this scope */ lit_cpointer_t *var_decls_p = (lit_cpointer_t *) (declarations_p + bytecode_header.func_scopes_count); for (uint32_t i = 0; i < bytecode_header.var_decls_count; i++) { uint32_t lit_offset_from_snapshot; if (!jrt_read_from_buffer_by_offset (snapshot_data_p, buffer_offset + bytecode_header.var_decls_count * sizeof (uint32_t), &buffer_offset, &lit_offset_from_snapshot, sizeof (lit_offset_from_snapshot))) { mem_heap_free_block (buffer_p); return NULL; } /** * TODO: implement binary search here */ lit_cpointer_t lit_cp = NOT_A_LITERAL; uint32_t j; for (j = 0; j < literals_num; j++) { if (lit_map_p[j].literal_offset == lit_offset_from_snapshot) { lit_cp.packed_value = lit_map_p[j].literal_id.packed_value; break; } } if (j == literals_num) { mem_heap_free_block (buffer_p); return NULL; } var_decls_p[i] = lit_cp; } /* Fill bytecode_data_header */ bc_fill_bytecode_data_header (header_p, (lit_id_hash_table *) lit_id_hash_table_buffer_p, instrs_p, declarations_p, (uint16_t) bytecode_header.func_scopes_count, (uint16_t) bytecode_header.var_decls_count, bytecode_header.is_strict, bytecode_header.is_ref_arguments_identifier, bytecode_header.is_ref_eval_identifier, bytecode_header.is_args_moved_to_regs, bytecode_header.is_args_moved_to_regs, bytecode_header.is_no_lex_env); return header_p; } /* bc_load_bytecode_with_idx_map */
/** * Dump single scopes tree into bytecode * * @return pointer to bytecode header of the outer most scope */ bytecode_data_header_t * bc_dump_single_scope (scopes_tree scope_p) /**< a node of scopes tree */ { const size_t entries_count = scope_p->max_uniq_literals_num; const vm_instr_counter_t instrs_count = scopes_tree_instrs_num (scope_p); const size_t blocks_count = JERRY_ALIGNUP (instrs_count, BLOCK_SIZE) / BLOCK_SIZE; const size_t func_scopes_count = scopes_tree_child_scopes_num (scope_p); const uint16_t var_decls_count = linked_list_get_length (scope_p->var_decls); const size_t bytecode_size = JERRY_ALIGNUP (instrs_count * sizeof (vm_instr_t), MEM_ALIGNMENT); const size_t hash_table_size = lit_id_hash_table_get_size_for_table (entries_count, blocks_count); const size_t declarations_area_size = JERRY_ALIGNUP (func_scopes_count * sizeof (mem_cpointer_t) + var_decls_count * sizeof (lit_cpointer_t), MEM_ALIGNMENT); const size_t header_and_tables_size = JERRY_ALIGNUP ((sizeof (bytecode_data_header_t) + hash_table_size + declarations_area_size), MEM_ALIGNMENT); uint8_t *buffer_p = (uint8_t *) mem_heap_alloc_block (bytecode_size + header_and_tables_size, MEM_HEAP_ALLOC_LONG_TERM); lit_id_hash_table *lit_id_hash_p = lit_id_hash_table_init (buffer_p + sizeof (bytecode_data_header_t), hash_table_size, entries_count, blocks_count); mem_cpointer_t *declarations_p = (mem_cpointer_t *) (buffer_p + sizeof (bytecode_data_header_t) + hash_table_size); for (size_t i = 0; i < func_scopes_count; i++) { declarations_p[i] = MEM_CP_NULL; } scopes_tree_dump_var_decls (scope_p, (lit_cpointer_t *) (declarations_p + func_scopes_count)); vm_instr_t *bytecode_p = (vm_instr_t *) (buffer_p + header_and_tables_size); JERRY_ASSERT (scope_p->max_uniq_literals_num >= lit_id_hash_p->current_bucket_pos); bytecode_data_header_t *header_p = (bytecode_data_header_t *) buffer_p; if ((uint16_t) func_scopes_count != func_scopes_count) { jerry_fatal (ERR_OUT_OF_MEMORY); } bc_fill_bytecode_data_header (header_p, lit_id_hash_p, bytecode_p, declarations_p, (uint16_t) func_scopes_count, var_decls_count, scope_p->strict_mode, scope_p->ref_arguments, scope_p->ref_eval, scope_p->is_vars_and_args_to_regs_possible, false, false); JERRY_ASSERT (scope_p->bc_header_cp == MEM_CP_NULL); MEM_CP_SET_NON_NULL_POINTER (scope_p->bc_header_cp, header_p); return header_p; } /* bc_dump_single_scope */
int main (int __attr_unused___ argc, char __attr_unused___ **argv) { TEST_INIT (); mem_heap_init (); mem_register_a_try_give_memory_back_callback (test_heap_give_some_memory_back); mem_heap_print (true, false, true); for (uint32_t i = 0; i < test_iters; i++) { for (uint32_t j = 0; j < test_sub_iters; j++) { if (rand () % 2) { size_t size = (size_t) rand () % test_threshold_block_size; ptrs[j] = (uint8_t*) mem_heap_alloc_block (size, (rand () % 2) ? MEM_HEAP_ALLOC_LONG_TERM : MEM_HEAP_ALLOC_SHORT_TERM); sizes[j] = size; is_one_chunked[j] = false; } else { ptrs[j] = (uint8_t*) mem_heap_alloc_chunked_block ((rand () % 2) ? MEM_HEAP_ALLOC_LONG_TERM : MEM_HEAP_ALLOC_SHORT_TERM); sizes[j] = mem_heap_get_chunked_block_data_size (); is_one_chunked[j] = true; } JERRY_ASSERT (sizes[j] == 0 || ptrs[j] != NULL); memset (ptrs[j], 0, sizes[j]); if (is_one_chunked[j]) { JERRY_ASSERT (ptrs[j] != NULL && mem_heap_get_chunked_block_start (ptrs[j] + (size_t) rand () % sizes[j]) == ptrs[j]); } } // mem_heap_print (true); for (uint32_t j = 0; j < test_sub_iters; j++) { if (ptrs[j] != NULL) { for (size_t k = 0; k < sizes[j]; k++) { JERRY_ASSERT (ptrs[j][k] == 0); } if (is_one_chunked[j]) { JERRY_ASSERT (sizes[j] == 0 || mem_heap_get_chunked_block_start (ptrs[j] + (size_t) rand () % sizes[j]) == ptrs[j]); } mem_heap_free_block (ptrs[j]); ptrs[j] = NULL; } } } mem_heap_print (true, false, true); return 0; } /* main */