Exemple #1
0
/**
 * Increase reference counter of Compact
 * Byte Code or regexp byte code.
 */
void
ecma_bytecode_ref (ecma_compiled_code_t *bytecode_p) /**< byte code pointer */
{
  /* Abort program if maximum reference number is reached. */
  if (bytecode_p->refs >= UINT16_MAX)
  {
    jerry_fatal (ERR_REF_COUNT_LIMIT);
  }

  bytecode_p->refs++;
} /* ecma_bytecode_ref */
Exemple #2
0
/**
 * Increase reference counter of an object
 */
void
ecma_ref_object (ecma_object_t *object_p) /**< object */
{
  if (likely (object_p->type_flags_refs < ECMA_OBJECT_MAX_REF))
  {
    object_p->type_flags_refs = (uint16_t) (object_p->type_flags_refs + ECMA_OBJECT_REF_ONE);
  }
  else
  {
    jerry_fatal (ERR_REF_COUNT_LIMIT);
  }
} /* ecma_ref_object */
Exemple #3
0
/**
 * Allocation of memory block, running 'try to give memory back' callbacks, if there is not enough memory.
 *
 * Note:
 *      if there is still not enough memory after running the callbacks
 *        - NULL value will be returned if parmeter 'ret_null_on_error' is true
 *        - the engine will terminate with ERR_OUT_OF_MEMORY if 'ret_null_on_error' is false
 *
 * @return NULL, if the required memory size is 0
 *         also NULL, if 'ret_null_on_error' is true and the allocation fails because of there is not enough memory
 */
static void *
jmem_heap_gc_and_alloc_block (const size_t size,      /**< required memory size */
                              bool ret_null_on_error) /**< indicates whether return null or terminate
                                                           with ERR_OUT_OF_MEMORY on out of memory */
{
  if (unlikely (size == 0))
  {
    return NULL;
  }

  VALGRIND_FREYA_CHECK_MEMPOOL_REQUEST;

#ifdef JMEM_GC_BEFORE_EACH_ALLOC
  jmem_run_free_unused_memory_callbacks (JMEM_FREE_UNUSED_MEMORY_SEVERITY_HIGH);
#endif /* JMEM_GC_BEFORE_EACH_ALLOC */

  if (JERRY_CONTEXT (jmem_heap_allocated_size) + size >= JERRY_CONTEXT (jmem_heap_limit))
  {
    jmem_run_free_unused_memory_callbacks (JMEM_FREE_UNUSED_MEMORY_SEVERITY_LOW);
  }

  void *data_space_p = jmem_heap_alloc_block_internal (size);

  if (likely (data_space_p != NULL))
  {
    VALGRIND_FREYA_MALLOCLIKE_SPACE (data_space_p, size);
    return data_space_p;
  }

  for (jmem_free_unused_memory_severity_t severity = JMEM_FREE_UNUSED_MEMORY_SEVERITY_LOW;
       severity <= JMEM_FREE_UNUSED_MEMORY_SEVERITY_HIGH;
       severity = (jmem_free_unused_memory_severity_t) (severity + 1))
  {
    jmem_run_free_unused_memory_callbacks (severity);

    data_space_p = jmem_heap_alloc_block_internal (size);

    if (likely (data_space_p != NULL))
    {
      VALGRIND_FREYA_MALLOCLIKE_SPACE (data_space_p, size);
      return data_space_p;
    }
  }

  JERRY_ASSERT (data_space_p == NULL);

  if (!ret_null_on_error)
  {
    jerry_fatal (ERR_OUT_OF_MEMORY);
  }

  return data_space_p;
} /* jmem_heap_gc_and_alloc_block */
Exemple #4
0
/**
 * Handle execution of control path that should be unreachable
 */
void __noreturn
jerry_unreachable (const char *file, /**< file name */
                   const char *function, /**< function name */
                   const uint32_t line) /**< line */
{
  JERRY_ERROR_MSG ("ICE: Unreachable control path at %s(%s):%lu was executed.\n",
                   file,
                   function,
                   (unsigned long) line);

  jerry_fatal (ERR_FAILED_INTERNAL_ASSERTION);
} /* jerry_unreachable */
Exemple #5
0
/**
 * Handle failed assertion
 */
void __noreturn
jerry_assert_fail (const char *assertion, /**< assertion condition string */
                   const char *file, /**< file name */
                   const char *function, /**< function name */
                   const uint32_t line) /**< line */
{
  JERRY_ERROR_MSG ("ICE: Assertion '%s' failed at %s(%s):%lu.\n",
                   assertion,
                   file,
                   function,
                   (unsigned long) line);

  jerry_fatal (ERR_FAILED_INTERNAL_ASSERTION);
} /* jerry_assert_fail */
/**
 * Increase reference counter of ecma-string.
 */
void
ecma_ref_ecma_string (ecma_string_t *string_p) /**< string descriptor */
{
  JERRY_ASSERT (string_p != NULL);
  JERRY_ASSERT (string_p->refs_and_container >= ECMA_STRING_REF_ONE);

  if (likely (string_p->refs_and_container < ECMA_STRING_MAX_REF))
  {
    /* Increase reference counter. */
    string_p->refs_and_container = (uint16_t) (string_p->refs_and_container + ECMA_STRING_REF_ONE);
  }
  else
  {
    jerry_fatal (ERR_REF_COUNT_LIMIT);
  }
} /* ecma_ref_ecma_string */
Exemple #7
0
/**
 * Handle failed assertion
 */
void __noreturn
jerry_assert_fail (const char *assertion, /**< assertion condition string */
                   const char *file, /**< file name */
                   const char *function, /**< function name */
                   const uint32_t line) /**< line */
{
#if !defined (JERRY_NDEBUG) || !defined (JERRY_DISABLE_HEAVY_DEBUG)
    printf ("ICE: Assertion '%s' failed at %s(%s):%lu.\n",
            assertion, file, function, (unsigned long) line);
#else /* !JERRY_NDEBUG || !JERRY_DISABLE_HEAVY_DEBUG */
    (void) assertion;
    (void) file;
    (void) function;
    (void) line;
#endif /* JERRY_NDEBUG && JERRY_DISABLE_HEAVY_DEBUG */

    jerry_fatal (ERR_FAILED_INTERNAL_ASSERTION);
} /* jerry_assert_fail */
Exemple #8
0
/**
 * Handle unimplemented case execution
 */
void __noreturn
jerry_unimplemented (const char *comment, /**< comment to unimplemented mark if exists,
                                               NULL - otherwise */
                     const char *file, /**< file name */
                     const char *function, /**< function name */
                     const uint32_t line) /**< line */
{
#ifndef JERRY_NDEBUG
    printf ("SORRY: Unimplemented case at %s(%s):%lu was executed", file, function, (unsigned long) line);
#else /* !JERRY_NDEBUG */
    (void) file;
    (void) function;
    (void) line;
#endif /* JERRY_NDEBUG */

    if (comment != NULL)
    {
        printf ("(%s)", comment);
    }
    printf (".\n");

    jerry_fatal (ERR_UNIMPLEMENTED_CASE);
} /* jerry_unimplemented */
Exemple #9
0
/**
 * Handle execution of control path that should be unreachable
 */
void __noreturn
jerry_unreachable (const char *comment, /**< comment to unreachable mark if exists,
                                             NULL - otherwise */
                   const char *file, /**< file name */
                   const char *function, /**< function name */
                   const uint32_t line) /**< line */
{
#ifndef JERRY_NDEBUG
    printf ("ICE: Unreachable control path at %s(%s):%lu was executed", file, function, (unsigned long) line);
#else /* !JERRY_NDEBUG */
    (void) file;
    (void) function;
    (void) line;
#endif /* JERRY_NDEBUG */

    if (comment != NULL)
    {
        printf ("(%s)", comment);
    }
    printf (".\n");

    jerry_fatal (ERR_FAILED_INTERNAL_ASSERTION);
} /* jerry_unreachable */
Exemple #10
0
/**
 * Allocation of memory region, running 'try to give memory back' callbacks, if there is not enough memory.
 *
 * Note:
 *      if after running the callbacks, there is still not enough memory, engine is terminated with ERR_OUT_OF_MEMORY.
 *
 * Note:
 *      To reduce heap fragmentation there are two allocation modes - short-term and long-term.
 *
 *      If allocation is short-term then the beginning of the heap is preferred, else - the end of the heap.
 *
 *      It is supposed, that all short-term allocation is used during relatively short discrete sessions.
 *      After end of the session all short-term allocated regions are supposed to be freed.
 *
 * @return pointer to allocated memory block
 */
static void*
mem_heap_alloc_block_try_give_memory_back (size_t size_in_bytes, /**< size of region to allocate in bytes */
                                           mem_block_length_type_t length_type, /**< length type of the block
                                                                                 *   (one-chunked or general) */
                                           mem_heap_alloc_term_t alloc_term) /**< expected allocation term */
{
  size_t chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes);
  if ((mem_heap.allocated_chunks + chunks) * MEM_HEAP_CHUNK_SIZE >= mem_heap.limit)
  {
    mem_run_try_to_give_memory_back_callbacks (MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_LOW);
  }

  void *data_space_p = mem_heap_alloc_block_internal (size_in_bytes, length_type, alloc_term);

  if (likely (data_space_p != NULL))
  {
    return data_space_p;
  }

  for (mem_try_give_memory_back_severity_t severity = MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_LOW;
       severity <= MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_CRITICAL;
       severity = (mem_try_give_memory_back_severity_t) (severity + 1))
  {
    mem_run_try_to_give_memory_back_callbacks (severity);

    data_space_p = mem_heap_alloc_block_internal (size_in_bytes, length_type, alloc_term);

    if (data_space_p != NULL)
    {
      return data_space_p;
    }
  }

  JERRY_ASSERT (data_space_p == NULL);

  jerry_fatal (ERR_OUT_OF_MEMORY);
} /* mem_heap_alloc_block_try_give_memory_back */
/**
 * Append new value to ecma-values collection
 */
void
ecma_append_to_values_collection (ecma_collection_header_t *header_p, /**< collection's header */
                                  ecma_value_t v, /**< ecma-value to append */
                                  bool do_ref_if_object) /**< if the value is object value,
                                                              increase reference counter of the object */
{
  const size_t values_in_chunk = sizeof (ecma_collection_chunk_t::data) / sizeof (ecma_value_t);

  size_t values_number = header_p->unit_number;
  size_t pos_of_new_value_in_chunk = values_number % values_in_chunk;

  values_number++;

  if ((ecma_length_t) values_number == values_number)
  {
    header_p->unit_number = (ecma_length_t) values_number;
  }
  else
  {
    jerry_fatal (ERR_OUT_OF_MEMORY);
  }

  ecma_collection_chunk_t *chunk_p = ECMA_GET_POINTER (ecma_collection_chunk_t,
                                                       header_p->last_chunk_cp);

  if (pos_of_new_value_in_chunk == 0)
  {
    /* all chunks are currently filled with values */

    chunk_p = ecma_alloc_collection_chunk ();
    chunk_p->next_chunk_cp = ECMA_NULL_POINTER;

    if (header_p->last_chunk_cp == ECMA_NULL_POINTER)
    {
      JERRY_ASSERT (header_p->first_chunk_cp == ECMA_NULL_POINTER);

      ECMA_SET_NON_NULL_POINTER (header_p->first_chunk_cp, chunk_p);
    }
    else
    {
      ecma_collection_chunk_t *last_chunk_p = ECMA_GET_NON_NULL_POINTER (ecma_collection_chunk_t,
                                                                         header_p->last_chunk_cp);

      JERRY_ASSERT (last_chunk_p->next_chunk_cp == ECMA_NULL_POINTER);

      ECMA_SET_NON_NULL_POINTER (last_chunk_p->next_chunk_cp, chunk_p);
    }

    ECMA_SET_NON_NULL_POINTER (header_p->last_chunk_cp, chunk_p);
  }
  else
  {
    /* last chunk can be appended with the new value */
    JERRY_ASSERT (chunk_p != NULL);
  }

  ecma_value_t *values_p = (ecma_value_t *) chunk_p->data;

  JERRY_ASSERT ((uint8_t *) (values_p + pos_of_new_value_in_chunk + 1) <= (uint8_t *) (chunk_p + 1));

  values_p[pos_of_new_value_in_chunk] = ecma_copy_value (v, do_ref_if_object);
} /* ecma_append_to_values_collection */
/**
 * 'Native call' opcode handler.
 */
ecma_completion_value_t
opfunc_native_call (opcode_t opdata, /**< operation data */
                    int_data_t *int_data) /**< interpreter context */
{
  const idx_t dst_var_idx = opdata.data.native_call.lhs;
  const idx_t native_call_id_idx = opdata.data.native_call.name;
  const idx_t args_number = opdata.data.native_call.arg_list;
  const opcode_counter_t lit_oc = int_data->pos;

  JERRY_ASSERT (native_call_id_idx < OPCODE_NATIVE_CALL__COUNT);

  int_data->pos++;

  JERRY_STATIC_ASSERT (OPCODE_NATIVE_CALL__COUNT < (1u << (sizeof (native_call_id_idx) * JERRY_BITSINBYTE)));

  ecma_completion_value_t ret_value = ecma_make_empty_completion_value ();

  MEM_DEFINE_LOCAL_ARRAY (arg_values, args_number, ecma_value_t);

  ecma_length_t args_read;
  ecma_completion_value_t get_arg_completion = fill_varg_list (int_data,
                                                               args_number,
                                                               arg_values,
                                                               &args_read);

  if (ecma_is_completion_value_empty (get_arg_completion))
  {
    JERRY_ASSERT (args_read == args_number);

    switch ((opcode_native_call_t)native_call_id_idx)
    {
      case OPCODE_NATIVE_CALL_LED_TOGGLE:
      case OPCODE_NATIVE_CALL_LED_ON:
      case OPCODE_NATIVE_CALL_LED_OFF:
      case OPCODE_NATIVE_CALL_LED_ONCE:
      case OPCODE_NATIVE_CALL_WAIT:
      {
        JERRY_UNIMPLEMENTED ("Device operations are not implemented.");
      }

      case OPCODE_NATIVE_CALL_PRINT:
      {
        for (ecma_length_t arg_index = 0;
             ecma_is_completion_value_empty (ret_value) && arg_index < args_read;
             arg_index++)
        {
          ECMA_TRY_CATCH (str_value,
                          ecma_op_to_string (arg_values[arg_index]),
                          ret_value);

          ecma_string_t *str_p = ecma_get_string_from_value (str_value);

          lit_utf8_size_t bytes = ecma_string_get_size (str_p);

          ssize_t utf8_str_size = (ssize_t) (bytes + 1);
          lit_utf8_byte_t *utf8_str_p = (lit_utf8_byte_t*) mem_heap_alloc_block ((size_t) utf8_str_size,
                                                                               MEM_HEAP_ALLOC_SHORT_TERM);
          if (utf8_str_p == NULL)
          {
            jerry_fatal (ERR_OUT_OF_MEMORY);
          }

          ecma_string_to_utf8_string (str_p, utf8_str_p, utf8_str_size);
          utf8_str_p[utf8_str_size - 1] = 0;

          FIXME ("Support unicode in printf.");
          if (arg_index < args_read - 1)
          {
            printf ("%s ", (char*) utf8_str_p);
          }
          else
          {
            printf ("%s", (char*) utf8_str_p);
          }

          mem_heap_free_block (utf8_str_p);

          ret_value = set_variable_value (int_data, lit_oc, dst_var_idx,
                                          ecma_make_simple_value (ECMA_SIMPLE_VALUE_UNDEFINED));

          ECMA_FINALIZE (str_value);
        }
        printf ("\n");
        break;
      }

      case OPCODE_NATIVE_CALL__COUNT:
      {
        JERRY_UNREACHABLE ();
      }
    }
  }
  else
  {
    JERRY_ASSERT (!ecma_is_completion_value_normal (get_arg_completion));

    ret_value = get_arg_completion;
  }

  for (ecma_length_t arg_index = 0;
       arg_index < args_read;
       arg_index++)
  {
    ecma_free_value (arg_values[arg_index], true);
  }

  MEM_FINALIZE_LOCAL_ARRAY (arg_values);

  return ret_value;
} /* opfunc_native_call */
/**
 * Dump single scopes tree into bytecode
 *
 * @return pointer to bytecode header of the outer most scope
 */
bytecode_data_header_t *
bc_dump_single_scope (scopes_tree scope_p) /**< a node of scopes tree */
{
  const size_t entries_count = scope_p->max_uniq_literals_num;
  const vm_instr_counter_t instrs_count = scopes_tree_instrs_num (scope_p);
  const size_t blocks_count = JERRY_ALIGNUP (instrs_count, BLOCK_SIZE) / BLOCK_SIZE;
  const size_t func_scopes_count = scopes_tree_child_scopes_num (scope_p);
  const uint16_t var_decls_count = linked_list_get_length (scope_p->var_decls);
  const size_t bytecode_size = JERRY_ALIGNUP (instrs_count * sizeof (vm_instr_t), MEM_ALIGNMENT);
  const size_t hash_table_size = lit_id_hash_table_get_size_for_table (entries_count, blocks_count);
  const size_t declarations_area_size = JERRY_ALIGNUP (func_scopes_count * sizeof (mem_cpointer_t)
                                                       + var_decls_count * sizeof (lit_cpointer_t),
                                                       MEM_ALIGNMENT);
  const size_t header_and_tables_size = JERRY_ALIGNUP ((sizeof (bytecode_data_header_t)
                                                        + hash_table_size
                                                        + declarations_area_size),
                                                       MEM_ALIGNMENT);

  uint8_t *buffer_p = (uint8_t *) mem_heap_alloc_block (bytecode_size + header_and_tables_size,
                                                        MEM_HEAP_ALLOC_LONG_TERM);

  lit_id_hash_table *lit_id_hash_p = lit_id_hash_table_init (buffer_p + sizeof (bytecode_data_header_t),
                                                             hash_table_size,
                                                             entries_count, blocks_count);

  mem_cpointer_t *declarations_p = (mem_cpointer_t *) (buffer_p + sizeof (bytecode_data_header_t) + hash_table_size);

  for (size_t i = 0; i < func_scopes_count; i++)
  {
    declarations_p[i] = MEM_CP_NULL;
  }

  scopes_tree_dump_var_decls (scope_p, (lit_cpointer_t *) (declarations_p + func_scopes_count));

  vm_instr_t *bytecode_p = (vm_instr_t *) (buffer_p + header_and_tables_size);

  JERRY_ASSERT (scope_p->max_uniq_literals_num >= lit_id_hash_p->current_bucket_pos);

  bytecode_data_header_t *header_p = (bytecode_data_header_t *) buffer_p;

  if ((uint16_t) func_scopes_count != func_scopes_count)
  {
    jerry_fatal (ERR_OUT_OF_MEMORY);
  }

  bc_fill_bytecode_data_header (header_p,
                                lit_id_hash_p, bytecode_p,
                                declarations_p,
                                (uint16_t) func_scopes_count,
                                var_decls_count,
                                scope_p->strict_mode,
                                scope_p->ref_arguments,
                                scope_p->ref_eval,
                                scope_p->is_vars_and_args_to_regs_possible,
                                false,
                                false);

  JERRY_ASSERT (scope_p->bc_header_cp == MEM_CP_NULL);
  MEM_CP_SET_NON_NULL_POINTER (scope_p->bc_header_cp, header_p);

  return header_p;
} /* bc_dump_single_scope */