inline void buffer_pool::put_buffers(mem_buf_desc_t *buff_list) { mem_buf_desc_t *next; __log_info_funcall("returning list, present %lu, created %lu", m_n_buffers, m_n_buffers_created); while (buff_list) { next = buff_list->p_next_desc; put_buffer_helper(buff_list); buff_list = next; } if (unlikely(m_n_buffers > m_n_buffers_created)) { buffersPanic(); } }
buffer_pool::buffer_pool(size_t buffer_count, size_t buf_size, ib_ctx_handler *p_ib_ctx_h, mem_buf_desc_owner *owner, pbuf_free_custom_fn custom_free_function) : m_lock_spin("buffer_pool"), m_n_buffers(0), m_n_buffers_created(buffer_count), m_p_head(NULL) { size_t sz_aligned_element = 0; uint8_t *ptr_buff, *ptr_desc; __log_info_func("count = %d", buffer_count); m_p_bpool_stat = &m_bpool_stat_static; memset(m_p_bpool_stat , 0, sizeof(*m_p_bpool_stat)); vma_stats_instance_create_bpool_block(m_p_bpool_stat); size_t size; if (buffer_count) { sz_aligned_element = (buf_size + MCE_ALIGNMENT) & (~MCE_ALIGNMENT); size = (sizeof(mem_buf_desc_t) + sz_aligned_element) * buffer_count + MCE_ALIGNMENT; } else { size = buf_size; } void *data_block = m_allocator.alloc_and_reg_mr(size, p_ib_ctx_h); if (!buffer_count) return; // Align pointers ptr_buff = (uint8_t *)((unsigned long)((char*)data_block + MCE_ALIGNMENT) & (~MCE_ALIGNMENT)); ptr_desc = ptr_buff + sz_aligned_element * buffer_count; // Split the block to buffers for (size_t i = 0; i < buffer_count; ++i) { mem_buf_desc_t* ptr_desc_mbdt = (mem_buf_desc_t*)ptr_desc; memset(ptr_desc_mbdt, 0, sizeof (*ptr_desc_mbdt)); mem_buf_desc_t *desc = new (ptr_desc) mem_buf_desc_t(ptr_buff, buf_size); desc->p_desc_owner = owner; desc->lwip_pbuf.custom_free_function = custom_free_function; put_buffer_helper(desc); #ifdef DEFINED_VMAPOLL desc->rx.vma_polled = false; #endif ptr_buff += sz_aligned_element; ptr_desc += sizeof(mem_buf_desc_t); } __log_info_func("done"); }
void buffer_pool::put_buffers(descq_t *buffers, size_t count) { mem_buf_desc_t *buff_list, *next; size_t amount; __log_info_funcall("returning %lu, present %lu, created %lu", count, m_n_buffers, m_n_buffers_created); for (amount = MIN(count, buffers->size()); amount > 0 ; amount--) { buff_list = buffers->get_and_pop_back(); while (buff_list) { next = buff_list->p_next_desc; put_buffer_helper(buff_list); buff_list = next; } } if (unlikely(m_n_buffers > m_n_buffers_created)) { buffersPanic(); } }
buffer_pool::buffer_pool(size_t buffer_count, size_t buf_size, pbuf_free_custom_fn custom_free_function) : m_lock_spin("buffer_pool"), m_n_buffers(0), m_n_buffers_created(buffer_count), m_p_head(NULL) { size_t sz_aligned_element = 0; uint8_t *ptr_buff, *ptr_desc; __log_info_func("count = %d", buffer_count); m_p_bpool_stat = &m_bpool_stat_static; memset(m_p_bpool_stat , 0, sizeof(*m_p_bpool_stat)); vma_stats_instance_create_bpool_block(m_p_bpool_stat); if (buffer_count) { sz_aligned_element = (buf_size + MCE_ALIGNMENT) & (~MCE_ALIGNMENT); m_size = (sizeof(mem_buf_desc_t) + sz_aligned_element) * buffer_count + MCE_ALIGNMENT; } else { m_size = buf_size; } void *data_block = m_allocator.alloc_and_reg_mr(m_size, NULL); if (!buffer_count) return; // Align pointers ptr_buff = (uint8_t *)((unsigned long)((char*)data_block + MCE_ALIGNMENT) & (~MCE_ALIGNMENT)); ptr_desc = ptr_buff + sz_aligned_element * buffer_count; // Split the block to buffers for (size_t i = 0; i < buffer_count; ++i) { mem_buf_desc_t *desc = new (ptr_desc) mem_buf_desc_t(ptr_buff, buf_size, custom_free_function); put_buffer_helper(desc); ptr_buff += sz_aligned_element; ptr_desc += sizeof(mem_buf_desc_t); } __log_info_func("done"); }