/** * Startup initialization of heap * * Note: * heap start and size should be aligned on MEM_HEAP_CHUNK_SIZE */ void mem_heap_init (uint8_t *heap_start, /**< first address of heap space */ size_t heap_size) /**< heap space size */ { JERRY_ASSERT (heap_start != NULL); JERRY_ASSERT (heap_size != 0); JERRY_STATIC_ASSERT ((MEM_HEAP_CHUNK_SIZE & (MEM_HEAP_CHUNK_SIZE - 1u)) == 0); JERRY_ASSERT ((uintptr_t) heap_start % MEM_ALIGNMENT == 0); JERRY_ASSERT ((uintptr_t) heap_start % MEM_HEAP_CHUNK_SIZE == 0); JERRY_ASSERT (heap_size % MEM_HEAP_CHUNK_SIZE == 0); JERRY_ASSERT (heap_size <= (1u << MEM_HEAP_OFFSET_LOG)); mem_heap.heap_start = heap_start; mem_heap.heap_size = heap_size; mem_heap.limit = CONFIG_MEM_HEAP_DESIRED_LIMIT; VALGRIND_NOACCESS_SPACE (heap_start, heap_size); mem_init_block_header (mem_heap.heap_start, 0, MEM_BLOCK_FREE, mem_block_length_type_t::GENERAL, NULL, NULL); mem_heap.first_block_p = (mem_block_header_t*) mem_heap.heap_start; mem_heap.last_block_p = mem_heap.first_block_p; MEM_HEAP_STAT_INIT (); } /* mem_heap_init */
/** * Allocation of memory region. * * See also: * mem_heap_alloc_block * * @return pointer to allocated memory block - if allocation is successful, * NULL - if there is not enough memory. */ static void* mem_heap_alloc_block_internal (size_t size_in_bytes, /**< size of region to allocate in bytes */ mem_block_length_type_t length_type, /**< length type of the block * (one-chunked or general) */ mem_heap_alloc_term_t alloc_term) /**< expected allocation term */ { mem_block_header_t *block_p; mem_direction_t direction; JERRY_ASSERT (size_in_bytes != 0); JERRY_ASSERT (length_type != mem_block_length_type_t::ONE_CHUNKED || size_in_bytes == mem_heap_get_chunked_block_data_size ()); mem_check_heap (); if (alloc_term == MEM_HEAP_ALLOC_LONG_TERM) { block_p = mem_heap.first_block_p; direction = MEM_DIRECTION_NEXT; } else { JERRY_ASSERT (alloc_term == MEM_HEAP_ALLOC_SHORT_TERM); block_p = mem_heap.last_block_p; direction = MEM_DIRECTION_PREV; } /* searching for appropriate block */ while (block_p != NULL) { VALGRIND_DEFINED_STRUCT (block_p); if (mem_is_block_free (block_p)) { if (mem_get_block_data_space_size (block_p) >= size_in_bytes) { break; } } else { JERRY_ASSERT (!mem_is_block_free (block_p)); } mem_block_header_t *next_block_p = mem_get_next_block_by_direction (block_p, direction); VALGRIND_NOACCESS_STRUCT (block_p); block_p = next_block_p; } if (block_p == NULL) { /* not enough free space */ return NULL; } /* appropriate block found, allocating space */ size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes); size_t found_block_size_in_chunks = mem_get_block_chunks_count (block_p); JERRY_ASSERT (new_block_size_in_chunks <= found_block_size_in_chunks); mem_heap.allocated_chunks += new_block_size_in_chunks; JERRY_ASSERT (mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE <= mem_heap.heap_size); if (mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE >= mem_heap.limit) { mem_heap.limit = JERRY_MIN (mem_heap.heap_size, JERRY_MAX (mem_heap.limit + CONFIG_MEM_HEAP_DESIRED_LIMIT, mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE)); JERRY_ASSERT (mem_heap.limit >= mem_heap.allocated_chunks * MEM_HEAP_CHUNK_SIZE); } mem_block_header_t *prev_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_PREV); mem_block_header_t *next_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_NEXT); if (new_block_size_in_chunks < found_block_size_in_chunks) { MEM_HEAP_STAT_FREE_BLOCK_SPLIT (); if (direction == MEM_DIRECTION_PREV) { prev_block_p = block_p; uint8_t *block_end_p = (uint8_t*) block_p + found_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE; block_p = (mem_block_header_t*) (block_end_p - new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE); VALGRIND_DEFINED_STRUCT (prev_block_p); mem_set_block_next (prev_block_p, block_p); VALGRIND_NOACCESS_STRUCT (prev_block_p); if (next_block_p == NULL) { mem_heap.last_block_p = block_p; } else { VALGRIND_DEFINED_STRUCT (next_block_p); mem_set_block_prev (next_block_p, block_p); VALGRIND_NOACCESS_STRUCT (next_block_p); } } else { uint8_t *new_free_block_first_chunk_p = (uint8_t*) block_p + new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE; mem_init_block_header (new_free_block_first_chunk_p, 0, MEM_BLOCK_FREE, mem_block_length_type_t::GENERAL, block_p, next_block_p); mem_block_header_t *new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p; if (next_block_p == NULL) { mem_heap.last_block_p = new_free_block_p; } else { VALGRIND_DEFINED_STRUCT (next_block_p); mem_block_header_t* new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p; mem_set_block_prev (next_block_p, new_free_block_p); VALGRIND_NOACCESS_STRUCT (next_block_p); } next_block_p = new_free_block_p; } } mem_init_block_header ((uint8_t*) block_p, size_in_bytes, MEM_BLOCK_ALLOCATED, length_type, prev_block_p, next_block_p); VALGRIND_DEFINED_STRUCT (block_p); MEM_HEAP_STAT_ALLOC_BLOCK (block_p); JERRY_ASSERT (mem_get_block_data_space_size (block_p) >= size_in_bytes); VALGRIND_NOACCESS_STRUCT (block_p); /* return data space beginning address */ uint8_t *data_space_p = (uint8_t*) (block_p + 1); JERRY_ASSERT ((uintptr_t) data_space_p % MEM_ALIGNMENT == 0); VALGRIND_UNDEFINED_SPACE (data_space_p, size_in_bytes); mem_check_heap (); return data_space_p; } /* mem_heap_alloc_block_internal */
/*lint -sem(mem_malloc,1p,2n>=0&&(@p==0||@P==2n)) */ void* sns_mem_malloc( sns_mem_heap_type *heap_ptr, /* Heap from which to allocate */ mem_magic_number_struct *mem_magic_number, unsigned int size /* Number of bytes to allocate */ ) { unsigned long chunks; /* the computed minimum size of the memory block in chunks needed to satisfy the request */ unsigned long actualSize; /* the computed minimum size of the memory block in bytes needed to satisfy the request */ unsigned char bonusBytes; /* the computed number of unused bytes at the end of the allocated memory block. Will always be < kMinChunkSizeLite */ sns_mem_block_header_type *freeBlock = NULL; /* the free block found of size >= actualSize */ void *answer = NULL; /* the address of memory to be returned to the caller */ uint16 * pblk = NULL; uint32 blockHeaderSize=sizeof(sns_mem_block_header_type); /*- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/ MEMHEAP_ASSERT(heap_ptr != NULL); MEMHEAP_ASSERT(mem_magic_number); MEMHEAP_ASSERT(mem_magic_number->magic_num); MEMHEAP_ASSERT(mem_magic_number->magic_num_index_array); if (!size) return NULL; /* quick check if requested size of memory is available */ if( (unsigned long) size > heap_ptr->total_bytes ) return NULL; /* chunks overflow check : check max memory that can be malloc'd at a time */ if( (0xFFFFFFFF - ( kMinChunkSizeLite + sizeof(sns_mem_block_header_type)) ) < ((unsigned long) size)) return NULL; #if defined FEATURE_MEM_DEBUG && defined FEATURE_QDSP6 if(size <= 4) { blockHeaderSize=blockHeaderSize-4; } #endif chunks = ((unsigned long) size + blockHeaderSize + kMinChunkSizeLite - 1) / kMinChunkSizeLite; actualSize = chunks * kMinChunkSizeLite; bonusBytes = (unsigned char) (actualSize - size - blockHeaderSize); qurt_rmutex_lock(&sns_uheap_lock); MEMHEAP_ASSERT(heap_ptr->magic_num == mem_magic_number->magic_num[heap_ptr->magic_num_index]); freeBlock = mem_find_free_block(heap_ptr, actualSize); if (freeBlock) { /* split the block (if necessary) and return the new block */ MEMHEAP_ASSERT(freeBlock->forw_offset > 0); // frd offset and actual size are chunk aligned if (freeBlock->forw_offset > actualSize) { /* must split into two free blocks */ sns_mem_block_header_type *newBlock = (sns_mem_block_header_type *) ((char *) freeBlock + actualSize); mem_init_block_header(newBlock, freeBlock->forw_offset - actualSize, heap_ptr); newBlock->last_flag = freeBlock->last_flag; freeBlock->forw_offset = actualSize; freeBlock->last_flag = 0; ++heap_ptr->total_blocks; pblk = (uint16*)newBlock; ADD_GUARD_BYTES_TO_FREE_HEADER(heap_ptr->magic_num_free, pblk); } /* mark the block as used and return it */ freeBlock->free_flag = kBlockUsedLite; freeBlock->extra = bonusBytes; /* set up next block to search for next allocation request */ heap_ptr->next_block = mem_get_next_block(heap_ptr, freeBlock); heap_ptr->used_bytes += size; MEMHEAP_ASSERT(heap_ptr->total_bytes >= heap_ptr->used_bytes); if (heap_ptr->used_bytes > heap_ptr->max_used) { heap_ptr->max_used = heap_ptr->used_bytes; } if (size > heap_ptr->max_request) { heap_ptr->max_request = size; } pblk = (uint16*)freeBlock; ADD_GUARD_BYTES_TO_USED_HEADER(heap_ptr->magic_num_used, pblk); answer = (char *) freeBlock + blockHeaderSize; } #ifdef FEATURE_MEM_DEBUG if(answer != NULL) { MEMHEAP_ASSERT( freeBlock != NULL ); freeBlock->caller_ptr=MEM_HEAP_CALLER_ADDRESS(MEM_HEAP_CALLER_ADDRESS_LEVEL); } #endif qurt_rmutex_unlock(&sns_uheap_lock); return answer; } /* END mem_malloc */
/*lint -sem(mem_free,1p) */ int sns_mem_free( sns_mem_heap_type *heap_ptr, /* Heap in which to free memory */ mem_magic_number_struct *mem_magic_number, void *ptr /* Memory to free */ ) { sns_mem_block_header_type *theBlock; /* The computed address of the memory header block in the heap that controls the memory referenced by ptr */ uint32 sizeBlockHeader=sizeof(sns_mem_block_header_type); uint16 *pblk = NULL; /*- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/ MEMHEAP_ASSERT(heap_ptr != NULL); MEMHEAP_ASSERT(mem_magic_number); MEMHEAP_ASSERT(mem_magic_number->magic_num); MEMHEAP_ASSERT(mem_magic_number->magic_num_index_array); /*commented out the below assert since NULL free occurances are found and once corrsponding fixes are in its need to be uncomment*/ MEMHEAP_ASSERT(heap_ptr->magic_num == mem_magic_number->magic_num[heap_ptr->magic_num_index]); /*commented out the below assert since NULL free occurances are found and once corrsponding fixes are in its need to be uncomment*/ if(ptr == NULL) { //MEMHEAP_ERROR(" NULL ptr occurenaces in mem_free()",0,0,0); //MEMHEAP_ASSERT(0); return SNS_MEMHEAP_SUCCESS; } /* free the passed in block */ MEMHEAP_ASSERT(heap_ptr->first_block); qurt_rmutex_lock(&sns_uheap_lock); theBlock = (sns_mem_block_header_type *) ((char *) ptr - sizeBlockHeader); // now we have to make a guess that size of (mem_block_header_type) is 12 or 16 #if defined FEATURE_MEM_DEBUG && defined FEATURE_QDSP6 if ( theBlock->header_guard != heap_ptr->block_header_guard ) { theBlock=(sns_mem_block_header_type *)((char *)theBlock+4); sizeBlockHeader=sizeBlockHeader-4; } #endif //check for block alignment to 16 MEMHEAP_ASSERT((((uint32)theBlock)%kMinChunkSizeLite) == 0); /* boundry check for the ptr passed to free */ MEMHEAP_ASSERT(BOUNDARY_CHECK(theBlock, heap_ptr)); /* Try to detect corruption. */ MEMHEAP_ASSERT(!theBlock->free_flag); /* Attempt to detect multiple frees of same block */ /* Make sure forw_offset is reasonable */ MEMHEAP_ASSERT(theBlock->forw_offset >= sizeBlockHeader); /* Make sure extra is reasonable */ MEMHEAP_ASSERT(theBlock->extra < kMinChunkSizeLite); /* Make sure forw_offset is not spiling over the heap boundry */ MEMHEAP_ASSERT(FRD_OFFSET_CHECK(theBlock, heap_ptr)); //check for heap canary pblk = (uint16*)(theBlock); MEMHEAP_ASSERT(!INTEGRITY_CHECK_ON_USED_HEADER(heap_ptr->magic_num_used, pblk)); if (!theBlock->free_flag) /* Be intelligent about not munging the heap if a multiple free of the same block is detected */ { MEMHEAP_ASSERT((theBlock->forw_offset - sizeBlockHeader - theBlock->extra) <= heap_ptr->used_bytes); heap_ptr->used_bytes -= theBlock->forw_offset - sizeBlockHeader - theBlock->extra; MEMHEAP_ASSERT(heap_ptr->total_bytes >= heap_ptr->used_bytes); theBlock->free_flag = (char) kBlockFreeLite; /* now backup the next pointer if applicable */ //next_block = mem_get_next_block(heap_ptr, theBlock); if (theBlock < heap_ptr->next_block) { /* Backup now to lessen possible fragmentation */ heap_ptr->next_block = theBlock; } pblk = (uint16*)theBlock; ADD_GUARD_BYTES_TO_FREE_HEADER(heap_ptr->magic_num_free, pblk); /* reset heap to initial state if everything is now freed */ if (!heap_ptr->used_bytes) { /* reset heap now, but retain statistics */ heap_ptr->next_block = heap_ptr->first_block; mem_init_block_header(heap_ptr->first_block, heap_ptr->total_bytes, heap_ptr); heap_ptr->first_block->last_flag = (char) kLastBlockLite; heap_ptr->total_blocks = 1; pblk = (uint16*)(heap_ptr->first_block); ADD_GUARD_BYTES_TO_FREE_HEADER(heap_ptr->magic_num_free, pblk); } } qurt_rmutex_unlock(&sns_uheap_lock); return SNS_MEMHEAP_SUCCESS; } /* END mem_free */
/*lint -sem(mem_init_heap,1p,2p,2P>=3n) */ int sns_mem_init_heap( sns_mem_heap_type *heap_ptr, /* Statically allocated heap structure */ mem_magic_number_struct *mem_magic_number, /* Base pointer for magic number array,index*/ void *heap_mem_ptr, /* Pointer to contiguous block of memory used for this heap */ unsigned long heap_mem_size /* The size in bytes of the memory pointed to by heap_mem_ptr */ ) { char *memory_end_ptr; /* 1 beyond computed end of memory passed in to use as heap. */ char *memory_start_ptr; /* The computed beginning of the memory passed in to use as heap. This computed value guarantees the heap actually starts on a paragraph boundary. */ unsigned long chunks; /* How many whole blocks of size kMinChunkSizeLite fit in the area of memory starting at memory_start_ptr and ending at (memory_end_ptr-1) */ uint16 * pblk = NULL; MEMHEAP_ASSERT(heap_ptr); MEMHEAP_ASSERT(mem_magic_number); MEMHEAP_ASSERT(mem_magic_number->magic_num); MEMHEAP_ASSERT(mem_magic_number->magic_num_index_array); MEMHEAP_ASSERT(mem_magic_number->magic_num_index < SNS_MAX_HEAP_INIT); /* support at the most 30 heaps*/ if( (heap_ptr->magic_num) && (heap_ptr->magic_num == mem_magic_number->magic_num[heap_ptr->magic_num_index])){ /* heap is already initialized so just return */ return SNS_MEMHEAP_SUCCESS; } memset(heap_ptr, 0, sizeof(sns_mem_heap_type)); MEMHEAP_ASSERT(heap_mem_ptr); MEMHEAP_ASSERT(heap_mem_size); MEMHEAP_ASSERT(heap_mem_size >= (2*kMinChunkSizeLite-1)); memory_start_ptr = (char *)heap_mem_ptr; memory_end_ptr = memory_start_ptr + heap_mem_size; //use the memory required for memheap_crit_sect from the heap getting initialized */ while( (((unsigned long)memory_start_ptr) & 0x000FUL) ) { ++memory_start_ptr; } /* Initilize the heap mutex */ qurt_rmutex_init(&sns_uheap_lock); /* Advance to the nearest paragraph boundary. This while loop should work ** regardless of how many bits are required for address pointers near or ** far, etc. ** ** Turn off lint "size incompatibility" warning because cast from pointer ** to unsigned long will lose bits, but we don't care because we're only ** interested in the least significant bits and we never cast back into a ** pointer, so the warning can be safely ignored */ /*lint --e(507)*/ /*while( (((unsigned long)memory_start_ptr) & 0x000FUL) ) { ++memory_start_ptr; }*/ chunks = (unsigned long) ((memory_end_ptr - memory_start_ptr) / kMinChunkSizeLite); heap_ptr->first_block = (sns_mem_block_header_type *) memory_start_ptr; heap_ptr->next_block = heap_ptr->first_block; sns_mem_heap_get_random_num((&(mem_magic_number->magic_num[mem_magic_number->magic_num_index_array[mem_magic_number->magic_num_index]])), 4); heap_ptr->magic_num = mem_magic_number->magic_num[mem_magic_number->magic_num_index_array[mem_magic_number->magic_num_index]]; heap_ptr->magic_num_index = mem_magic_number->magic_num_index_array[mem_magic_number->magic_num_index]; sns_mem_heap_get_random_num(&(heap_ptr->magic_num_free), 2); sns_mem_heap_get_random_num(&(heap_ptr->magic_num_used), 2); mem_magic_number->magic_num_index++; mem_init_block_header(heap_ptr->first_block, chunks * kMinChunkSizeLite, heap_ptr); heap_ptr->first_block->last_flag = (char) kLastBlockLite; heap_ptr->total_blocks = 1; heap_ptr->max_used = 0; heap_ptr->max_request = 0; heap_ptr->used_bytes = 0; heap_ptr->total_bytes = chunks * kMinChunkSizeLite; pblk = (uint16*)(heap_ptr->first_block); ADD_GUARD_BYTES_TO_FREE_HEADER(heap_ptr->magic_num_free, pblk); return SNS_MEMHEAP_SUCCESS; } /* END mem_init_heap */