Exemple #1
0
	void Deallocate(u8* p, size_t size)
	{
		ENSURE((uintptr_t)p % allocationAlignment == 0);
		ENSURE(IsValidSize(size));
		ENSURE(pool_contains(&m_pool, p));
		ENSURE(pool_contains(&m_pool, p+size-1));

		Validate();

		m_stats.OnDeallocate(size);
		Coalesce(p, size);
		AddToFreelist(p, size);

		Validate();
	}
Exemple #2
0
void pool_free(Pool* p, void* el)
{
	// only allowed to free items if we were initialized with
	// fixed el_size. (this avoids having to pass el_size here and
	// check if requested_size matches that when allocating)
	if(p->el_size == 0)
	{
		DEBUG_WARN_ERR(ERR::LOGIC);	// cannot free variable-size items
		return;
	}

	if(pool_contains(p, el))
		mem_freelist_AddToFront(p->freelist, el);
	else
		DEBUG_WARN_ERR(ERR::LOGIC);	// invalid pointer (not in pool)
}
Exemple #3
0
void* pool_alloc(Pool* p, size_t size)
{
	TIMER_ACCRUE(tc_pool_alloc);
	// if pool allows variable sizes, go with the size parameter,
	// otherwise the pool el_size setting.
	const size_t el_size = p->el_size? p->el_size : Align<allocationAlignment>(size);
	ASSERT(el_size != 0);

	// note: freelist is always empty in pools with variable-sized elements
	// because they disallow pool_free.
	void* el = mem_freelist_Detach(p->freelist);
	if(!el)	// freelist empty, need to allocate a new entry
	{
		// expand, if necessary
		if(da_reserve(&p->da, el_size) < 0)
			return 0;

		el = p->da.base + p->da.pos;
		p->da.pos += el_size;
	}

	ASSERT(pool_contains(p, el));	// paranoia
	return el;
}
Exemple #4
0
static ssize_t h_idx_from_data(HDATA* hd)
{
	if(!pool_contains(&hpool, hd))
		WARN_RETURN(ERR::INVALID_POINTER);
	return (uintptr_t(hd) - uintptr_t(hpool.da.base))/hpool.el_size;
}