Esempio n. 1
0
/*
 * igb_alloc_rcb_lists - Memory allocation for the receive control blocks
 * of one ring.
 */
static int
igb_alloc_rcb_lists(igb_rx_data_t *rx_data)
{
	int i;
	int ret;
	rx_control_block_t *rcb;
	igb_t *igb = rx_data->rx_ring->igb;
	dma_buffer_t *rx_buf;
	uint32_t rcb_count;

	/*
	 * Allocate memory for the rx control blocks for work list and
	 * free list.
	 */
	rcb_count = rx_data->ring_size + rx_data->free_list_size;
	rcb = rx_data->rcb_area;

	for (i = 0; i < rcb_count; i++, rcb++) {
		ASSERT(rcb != NULL);

		if (i < rx_data->ring_size) {
			/* Attach the rx control block to the work list */
			rx_data->work_list[i] = rcb;
		} else {
			/* Attach the rx control block to the free list */
			rx_data->free_list[i - rx_data->ring_size] = rcb;
		}

		rx_buf = &rcb->rx_buf;
		ret = igb_alloc_dma_buffer(igb,
		    rx_buf, igb->rx_buf_size);

		if (ret != IGB_SUCCESS) {
			igb_log(igb, IGB_LOG_ERROR,
			    "Allocate rx dma buffer failed");
			goto alloc_rcb_lists_fail;
		}

		rx_buf->size -= IPHDR_ALIGN_ROOM;
		rx_buf->address += IPHDR_ALIGN_ROOM;
		rx_buf->dma_address += IPHDR_ALIGN_ROOM;

		rcb->ref_cnt = 1;
		rcb->rx_data = (igb_rx_data_t *)rx_data;
		rcb->free_rtn.free_func = igb_rx_recycle;
		rcb->free_rtn.free_arg = (char *)rcb;

		rcb->mp = desballoc((unsigned char *)
		    rx_buf->address,
		    rx_buf->size,
		    0, &rcb->free_rtn);
	}

	return (IGB_SUCCESS);

alloc_rcb_lists_fail:
	igb_free_rcb_lists(rx_data);

	return (IGB_FAILURE);
}
Esempio n. 2
0
/*
 * igb_alloc_tcb_lists - Memory allocation for the transmit control bolcks
 * of one ring.
 */
static int
igb_alloc_tcb_lists(igb_tx_ring_t *tx_ring)
{
	int i;
	int ret;
	tx_control_block_t *tcb;
	dma_buffer_t *tx_buf;
	igb_t *igb = tx_ring->igb;
	dev_info_t *devinfo = igb->dip;

	/*
	 * Allocate memory for the work list.
	 */
	tx_ring->work_list = kmem_zalloc(sizeof (tx_control_block_t *) *
	    tx_ring->ring_size, KM_NOSLEEP);

	if (tx_ring->work_list == NULL) {
		igb_error(igb,
		    "Cound not allocate memory for tx work list");
		return (IGB_FAILURE);
	}

	/*
	 * Allocate memory for the free list.
	 */
	tx_ring->free_list = kmem_zalloc(sizeof (tx_control_block_t *) *
	    tx_ring->free_list_size, KM_NOSLEEP);

	if (tx_ring->free_list == NULL) {
		kmem_free(tx_ring->work_list,
		    sizeof (tx_control_block_t *) * tx_ring->ring_size);
		tx_ring->work_list = NULL;

		igb_error(igb,
		    "Cound not allocate memory for tx free list");
		return (IGB_FAILURE);
	}

	/*
	 * Allocate memory for the tx control blocks of free list.
	 */
	tx_ring->tcb_area =
	    kmem_zalloc(sizeof (tx_control_block_t) *
	    tx_ring->free_list_size, KM_NOSLEEP);

	if (tx_ring->tcb_area == NULL) {
		kmem_free(tx_ring->work_list,
		    sizeof (tx_control_block_t *) * tx_ring->ring_size);
		tx_ring->work_list = NULL;

		kmem_free(tx_ring->free_list,
		    sizeof (tx_control_block_t *) * tx_ring->free_list_size);
		tx_ring->free_list = NULL;

		igb_error(igb,
		    "Cound not allocate memory for tx control blocks");
		return (IGB_FAILURE);
	}

	/*
	 * Allocate dma memory for the tx control block of free list.
	 */
	tcb = tx_ring->tcb_area;
	for (i = 0; i < tx_ring->free_list_size; i++, tcb++) {
		ASSERT(tcb != NULL);

		tx_ring->free_list[i] = tcb;

		/*
		 * Pre-allocate dma handles for transmit. These dma handles
		 * will be dynamically bound to the data buffers passed down
		 * from the upper layers at the time of transmitting.
		 */
		ret = ddi_dma_alloc_handle(devinfo,
		    &igb_tx_dma_attr,
		    DDI_DMA_DONTWAIT, NULL,
		    &tcb->tx_dma_handle);
		if (ret != DDI_SUCCESS) {
			tcb->tx_dma_handle = NULL;
			igb_error(igb,
			    "Could not allocate tx dma handle: %x", ret);
			goto alloc_tcb_lists_fail;
		}

		/*
		 * Pre-allocate transmit buffers for packets that the
		 * size is less than bcopy_thresh.
		 */
		tx_buf = &tcb->tx_buf;

		ret = igb_alloc_dma_buffer(igb,
		    tx_buf, igb->tx_buf_size);

		if (ret != IGB_SUCCESS) {
			ASSERT(tcb->tx_dma_handle != NULL);
			ddi_dma_free_handle(&tcb->tx_dma_handle);
			tcb->tx_dma_handle = NULL;
			igb_error(igb, "Allocate tx dma buffer failed");
			goto alloc_tcb_lists_fail;
		}
		tcb->last_index = MAX_TX_RING_SIZE;
	}

	return (IGB_SUCCESS);

alloc_tcb_lists_fail:
	igb_free_tcb_lists(tx_ring);

	return (IGB_FAILURE);
}