示例#1
0
/**
 * Allocate a chunk of specified size
 *
 * @return pointer to allocated chunk, if allocation was successful,
 *         or NULL - if not enough memory.
 */
void * __attr_hot___ __attr_always_inline___
jmem_pools_alloc (size_t size) /**< size of the chunk */
{
#ifdef JMEM_GC_BEFORE_EACH_ALLOC
  jmem_run_free_unused_memory_callbacks (JMEM_FREE_UNUSED_MEMORY_SEVERITY_HIGH);
#endif /* JMEM_GC_BEFORE_EACH_ALLOC */

  if (size <= 8)
  {
    if (JERRY_CONTEXT (jmem_free_8_byte_chunk_p) != NULL)
    {
      const jmem_pools_chunk_t *const chunk_p = JERRY_CONTEXT (jmem_free_8_byte_chunk_p);

      JMEM_POOLS_STAT_REUSE ();

      VALGRIND_DEFINED_SPACE (chunk_p, sizeof (jmem_pools_chunk_t));

      JERRY_CONTEXT (jmem_free_8_byte_chunk_p) = chunk_p->next_p;

      VALGRIND_UNDEFINED_SPACE (chunk_p, sizeof (jmem_pools_chunk_t));

      return (void *) chunk_p;
    }
    else
    {
      JMEM_POOLS_STAT_NEW_ALLOC ();
      return (void *) jmem_heap_alloc_block (8);
    }
  }

#ifdef JERRY_CPOINTER_32_BIT
  JERRY_ASSERT (size <= 16);

  if (JERRY_CONTEXT (jmem_free_16_byte_chunk_p) != NULL)
  {
    const jmem_pools_chunk_t *const chunk_p = JERRY_CONTEXT (jmem_free_16_byte_chunk_p);

    JMEM_POOLS_STAT_REUSE ();

    VALGRIND_DEFINED_SPACE (chunk_p, sizeof (jmem_pools_chunk_t));

    JERRY_CONTEXT (jmem_free_16_byte_chunk_p) = chunk_p->next_p;

    VALGRIND_UNDEFINED_SPACE (chunk_p, sizeof (jmem_pools_chunk_t));

    return (void *) chunk_p;
  }
  else
  {
    JMEM_POOLS_STAT_NEW_ALLOC ();
    return (void *) jmem_heap_alloc_block (16);
  }
#else /* !JERRY_CPOINTER_32_BIT */
  JERRY_UNREACHABLE ();
  return NULL;
#endif
} /* jmem_pools_alloc */
示例#2
0
/**
 * Allocate a chunk of specified size
 *
 * @return pointer to allocated chunk, if allocation was successful,
 *         or NULL - if not enough memory.
 */
inline void * __attribute__((hot)) __attr_always_inline___
jmem_pools_alloc (void)
{
#ifdef JMEM_GC_BEFORE_EACH_ALLOC
  jmem_run_free_unused_memory_callbacks (JMEM_FREE_UNUSED_MEMORY_SEVERITY_HIGH);
#endif /* JMEM_GC_BEFORE_EACH_ALLOC */

  if (JERRY_CONTEXT (jmem_free_chunk_p) != NULL)
  {
    const jmem_pools_chunk_t *const chunk_p = JERRY_CONTEXT (jmem_free_chunk_p);

    JMEM_POOLS_STAT_REUSE ();

    VALGRIND_DEFINED_SPACE (chunk_p, JMEM_POOL_CHUNK_SIZE);

    JERRY_CONTEXT (jmem_free_chunk_p) = chunk_p->next_p;

    VALGRIND_UNDEFINED_SPACE (chunk_p, JMEM_POOL_CHUNK_SIZE);

    return (void *) chunk_p;
  }
  else
  {
    JMEM_POOLS_STAT_NEW_ALLOC ();
    return (void *) jmem_heap_alloc_block (JMEM_POOL_CHUNK_SIZE);
  }
} /* jmem_pools_alloc */
示例#3
0
/**
 * Allocate a chunk of specified size
 *
 * @return pointer to allocated chunk, if allocation was successful,
 *         or NULL - if not enough memory.
 */
uint8_t* __attr_always_inline___
mem_pools_alloc (void)
{
#ifdef MEM_GC_BEFORE_EACH_ALLOC
    mem_run_try_to_give_memory_back_callbacks (MEM_TRY_GIVE_MEMORY_BACK_SEVERITY_HIGH);
#endif /* MEM_GC_BEFORE_EACH_ALLOC */

    mem_check_pools ();

    do
    {
        if (mem_free_chunk_p != NULL)
        {
            mem_pool_chunk_t *chunk_p = mem_free_chunk_p;

            MEM_POOLS_STAT_ALLOC_CHUNK ();

#ifndef JERRY_NDEBUG
            mem_free_chunks_number--;
#endif /* !JERRY_NDEBUG */

            VALGRIND_DEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE);

            mem_free_chunk_p = chunk_p->u.free.next_p;

            VALGRIND_UNDEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE);


            mem_check_pools ();

            VALGRIND_FREYA_MALLOCLIKE_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE);
            return (uint8_t *) chunk_p;
        }
        else
        {
            mem_pools_alloc_longpath ();

            /* the assertion guarantees that there will be no more than two iterations */
            JERRY_ASSERT (mem_free_chunk_p != NULL);
        }
    } while (true);
} /* mem_pools_alloc */
示例#4
0
/**
 * Allocate a chunk in the pool
 */
uint8_t*
mem_pool_alloc_chunk (mem_pool_state_t *pool_p) /**< pool */
{
  mem_check_pool (pool_p);

  JERRY_ASSERT (pool_p->free_chunks_number != 0);
  JERRY_ASSERT (pool_p->first_free_chunk < MEM_POOL_CHUNKS_NUMBER);

  mem_pool_chunk_index_t chunk_index = pool_p->first_free_chunk;
  uint8_t *chunk_p = MEM_POOL_CHUNK_ADDRESS (pool_p, chunk_index);

  VALGRIND_DEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE);

  mem_pool_chunk_index_t *next_free_chunk_index_p = (mem_pool_chunk_index_t*) chunk_p;
  pool_p->first_free_chunk = *next_free_chunk_index_p;
  pool_p->free_chunks_number--;

  VALGRIND_UNDEFINED_SPACE (chunk_p, MEM_POOL_CHUNK_SIZE);

  mem_check_pool (pool_p);

  return chunk_p;
} /* mem_pool_alloc_chunk */
示例#5
0
/**
 * Allocation of memory region.
 *
 * See also:
 *          mem_heap_alloc_block
 *
 * @return pointer to allocated memory block - if allocation is successful,
 *         NULL - if there is not enough memory.
 */
static
void* mem_heap_alloc_block_internal (size_t size_in_bytes, /**< size of region to allocate in bytes */
                                     mem_block_length_type_t length_type, /**< length type of the block
                                                                           *   (one-chunked or general) */
                                     mem_heap_alloc_term_t alloc_term) /**< expected allocation term */
{
  mem_block_header_t *block_p;
  mem_direction_t direction;

  JERRY_ASSERT (size_in_bytes != 0);
  JERRY_ASSERT (length_type != mem_block_length_type_t::ONE_CHUNKED
                || size_in_bytes == mem_heap_get_chunked_block_data_size ());

  mem_check_heap ();

  if (alloc_term == MEM_HEAP_ALLOC_LONG_TERM)
  {
    block_p = mem_heap.first_block_p;
    direction = MEM_DIRECTION_NEXT;
  }
  else
  {
    JERRY_ASSERT (alloc_term == MEM_HEAP_ALLOC_SHORT_TERM);

    block_p = mem_heap.last_block_p;
    direction = MEM_DIRECTION_PREV;
  }

  /* searching for appropriate block */
  while (block_p != NULL)
  {
    VALGRIND_DEFINED_STRUCT (block_p);

    if (mem_is_block_free (block_p))
    {
      if (mem_get_block_data_space_size (block_p) >= size_in_bytes)
      {
        break;
      }
    }
    else
    {
      JERRY_ASSERT (!mem_is_block_free (block_p));
    }

    mem_block_header_t *next_block_p = mem_get_next_block_by_direction (block_p, direction);

    VALGRIND_NOACCESS_STRUCT (block_p);

    block_p = next_block_p;
  }

  if (block_p == NULL)
  {
    /* not enough free space */
    return NULL;
  }

  /* appropriate block found, allocating space */
  size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes);
  size_t found_block_size_in_chunks = mem_get_block_chunks_count (block_p);

  JERRY_ASSERT (new_block_size_in_chunks <= found_block_size_in_chunks);

  mem_heap.allocated_chunks += new_block_size_in_chunks;

  JERRY_ASSERT (mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE <= mem_heap.heap_size);

  if (mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE >= mem_heap.limit)
  {
    mem_heap.limit = JERRY_MIN (mem_heap.heap_size,
                                JERRY_MAX (mem_heap.limit + CONFIG_MEM_HEAP_DESIRED_LIMIT,
                                           mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE));
    JERRY_ASSERT (mem_heap.limit >= mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE);
  }

  mem_block_header_t *prev_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_PREV);
  mem_block_header_t *next_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_NEXT);

  if (new_block_size_in_chunks < found_block_size_in_chunks)
  {
    MEM_HEAP_STAT_FREE_BLOCK_SPLIT ();

    if (direction == MEM_DIRECTION_PREV)
    {
      prev_block_p = block_p;
      uint8_t *block_end_p = (uint8_t*) block_p + found_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE;
      block_p = (mem_block_header_t*) (block_end_p - new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE);

      VALGRIND_DEFINED_STRUCT (prev_block_p);

      mem_set_block_next (prev_block_p, block_p);

      VALGRIND_NOACCESS_STRUCT (prev_block_p);

      if (next_block_p == NULL)
      {
        mem_heap.last_block_p = block_p;
      }
      else
      {
        VALGRIND_DEFINED_STRUCT (next_block_p);

        mem_set_block_prev (next_block_p, block_p);

        VALGRIND_NOACCESS_STRUCT (next_block_p);
      }
    }
    else
    {
      uint8_t *new_free_block_first_chunk_p = (uint8_t*) block_p + new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE;
      mem_init_block_header (new_free_block_first_chunk_p,
                             0,
                             MEM_BLOCK_FREE,
                             mem_block_length_type_t::GENERAL,
                             block_p,
                             next_block_p);

      mem_block_header_t *new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;

      if (next_block_p == NULL)
      {
        mem_heap.last_block_p = new_free_block_p;
      }
      else
      {
        VALGRIND_DEFINED_STRUCT (next_block_p);

        mem_block_header_t* new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;
        mem_set_block_prev (next_block_p, new_free_block_p);

        VALGRIND_NOACCESS_STRUCT (next_block_p);
      }

      next_block_p = new_free_block_p;
    }
  }

  mem_init_block_header ((uint8_t*) block_p,
                         size_in_bytes,
                         MEM_BLOCK_ALLOCATED,
                         length_type,
                         prev_block_p,
                         next_block_p);

  VALGRIND_DEFINED_STRUCT (block_p);

  MEM_HEAP_STAT_ALLOC_BLOCK (block_p);

  JERRY_ASSERT (mem_get_block_data_space_size (block_p) >= size_in_bytes);

  VALGRIND_NOACCESS_STRUCT (block_p);

  /* return data space beginning address */
  uint8_t *data_space_p = (uint8_t*) (block_p + 1);
  JERRY_ASSERT ((uintptr_t) data_space_p % MEM_ALIGNMENT == 0);

  VALGRIND_UNDEFINED_SPACE (data_space_p, size_in_bytes);

  mem_check_heap ();

  return data_space_p;
} /* mem_heap_alloc_block_internal */
示例#6
0
/**
 * Allocation of memory region.
 *
 * See also:
 *          jmem_heap_alloc_block
 *
 * @return pointer to allocated memory block - if allocation is successful,
 *         NULL - if there is not enough memory.
 */
static __attr_hot___
void *jmem_heap_alloc_block_internal (const size_t size)
{
  // Align size
  const size_t required_size = ((size + JMEM_ALIGNMENT - 1) / JMEM_ALIGNMENT) * JMEM_ALIGNMENT;
  jmem_heap_free_t *data_space_p = NULL;

  VALGRIND_DEFINED_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t));

  // Fast path for 8 byte chunks, first region is guaranteed to be sufficient
  if (required_size == JMEM_ALIGNMENT
      && likely (JERRY_HEAP_CONTEXT (first).next_offset != JMEM_HEAP_END_OF_LIST))
  {
    data_space_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset);
    JERRY_ASSERT (jmem_is_heap_pointer (data_space_p));

    VALGRIND_DEFINED_SPACE (data_space_p, sizeof (jmem_heap_free_t));
    JERRY_CONTEXT (jmem_heap_allocated_size) += JMEM_ALIGNMENT;
    JMEM_HEAP_STAT_ALLOC_ITER ();

    if (data_space_p->size == JMEM_ALIGNMENT)
    {
      JERRY_HEAP_CONTEXT (first).next_offset = data_space_p->next_offset;
    }
    else
    {
      JERRY_ASSERT (data_space_p->size > JMEM_ALIGNMENT);

      jmem_heap_free_t *remaining_p;
      remaining_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset) + 1;

      VALGRIND_DEFINED_SPACE (remaining_p, sizeof (jmem_heap_free_t));
      remaining_p->size = data_space_p->size - JMEM_ALIGNMENT;
      remaining_p->next_offset = data_space_p->next_offset;
      VALGRIND_NOACCESS_SPACE (remaining_p, sizeof (jmem_heap_free_t));

      JERRY_HEAP_CONTEXT (first).next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (remaining_p);
    }

    VALGRIND_UNDEFINED_SPACE (data_space_p, sizeof (jmem_heap_free_t));

    if (unlikely (data_space_p == JERRY_CONTEXT (jmem_heap_list_skip_p)))
    {
      JERRY_CONTEXT (jmem_heap_list_skip_p) = JMEM_HEAP_GET_ADDR_FROM_OFFSET (JERRY_HEAP_CONTEXT (first).next_offset);
    }
  }
  // Slow path for larger regions
  else
  {
    uint32_t current_offset = JERRY_HEAP_CONTEXT (first).next_offset;
    jmem_heap_free_t *prev_p = &JERRY_HEAP_CONTEXT (first);

    while (current_offset != JMEM_HEAP_END_OF_LIST)
    {
      jmem_heap_free_t *current_p = JMEM_HEAP_GET_ADDR_FROM_OFFSET (current_offset);
      JERRY_ASSERT (jmem_is_heap_pointer (current_p));
      VALGRIND_DEFINED_SPACE (current_p, sizeof (jmem_heap_free_t));
      JMEM_HEAP_STAT_ALLOC_ITER ();

      const uint32_t next_offset = current_p->next_offset;
      JERRY_ASSERT (next_offset == JMEM_HEAP_END_OF_LIST
                    || jmem_is_heap_pointer (JMEM_HEAP_GET_ADDR_FROM_OFFSET (next_offset)));

      if (current_p->size >= required_size)
      {
        // Region is sufficiently big, store address
        data_space_p = current_p;
        JERRY_CONTEXT (jmem_heap_allocated_size) += required_size;

        // Region was larger than necessary
        if (current_p->size > required_size)
        {
          // Get address of remaining space
          jmem_heap_free_t *const remaining_p = (jmem_heap_free_t *) ((uint8_t *) current_p + required_size);

          // Update metadata
          VALGRIND_DEFINED_SPACE (remaining_p, sizeof (jmem_heap_free_t));
          remaining_p->size = current_p->size - (uint32_t) required_size;
          remaining_p->next_offset = next_offset;
          VALGRIND_NOACCESS_SPACE (remaining_p, sizeof (jmem_heap_free_t));

          // Update list
          VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t));
          prev_p->next_offset = JMEM_HEAP_GET_OFFSET_FROM_ADDR (remaining_p);
          VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t));
        }
        // Block is an exact fit
        else
        {
          // Remove the region from the list
          VALGRIND_DEFINED_SPACE (prev_p, sizeof (jmem_heap_free_t));
          prev_p->next_offset = next_offset;
          VALGRIND_NOACCESS_SPACE (prev_p, sizeof (jmem_heap_free_t));
        }

        JERRY_CONTEXT (jmem_heap_list_skip_p) = prev_p;

        // Found enough space
        break;
      }

      VALGRIND_NOACCESS_SPACE (current_p, sizeof (jmem_heap_free_t));
      // Next in list
      prev_p = current_p;
      current_offset = next_offset;
    }
  }

  while (JERRY_CONTEXT (jmem_heap_allocated_size) >= JERRY_CONTEXT (jmem_heap_limit))
  {
    JERRY_CONTEXT (jmem_heap_limit) += CONFIG_MEM_HEAP_DESIRED_LIMIT;
  }

  VALGRIND_NOACCESS_SPACE (&JERRY_HEAP_CONTEXT (first), sizeof (jmem_heap_free_t));

  if (unlikely (!data_space_p))
  {
    return NULL;
  }

  JERRY_ASSERT ((uintptr_t) data_space_p % JMEM_ALIGNMENT == 0);
  VALGRIND_UNDEFINED_SPACE (data_space_p, size);
  JMEM_HEAP_STAT_ALLOC (size);

  return (void *) data_space_p;
} /* jmem_heap_finalize */