/* * ixgbe_alloc_rcb_lists - Memory allocation for the receive control blocks * of one ring. */ static int ixgbe_alloc_rcb_lists(ixgbe_rx_data_t *rx_data) { int i; int ret; rx_control_block_t *rcb; ixgbe_t *ixgbe = rx_data->rx_ring->ixgbe; dma_buffer_t *rx_buf; uint32_t rcb_count; /* * Allocate memory for the rx control blocks for work list and * free list. */ rcb_count = rx_data->ring_size + rx_data->free_list_size; rcb = rx_data->rcb_area; for (i = 0; i < rcb_count; i++, rcb++) { ASSERT(rcb != NULL); if (i < rx_data->ring_size) { /* Attach the rx control block to the work list */ rx_data->work_list[i] = rcb; } else { /* Attach the rx control block to the free list */ rx_data->free_list[i - rx_data->ring_size] = rcb; } rx_buf = &rcb->rx_buf; ret = ixgbe_alloc_dma_buffer(ixgbe, rx_buf, ixgbe->rx_buf_size); if (ret != IXGBE_SUCCESS) { ixgbe_error(ixgbe, "Allocate rx dma buffer failed"); goto alloc_rcb_lists_fail; } rx_buf->size -= IPHDR_ALIGN_ROOM; rx_buf->address += IPHDR_ALIGN_ROOM; rx_buf->dma_address += IPHDR_ALIGN_ROOM; rcb->ref_cnt = 1; rcb->rx_data = (ixgbe_rx_data_t *)rx_data; rcb->free_rtn.free_func = ixgbe_rx_recycle; rcb->free_rtn.free_arg = (char *)rcb; rcb->mp = desballoc((unsigned char *) rx_buf->address, rx_buf->size, 0, &rcb->free_rtn); } return (IXGBE_SUCCESS); alloc_rcb_lists_fail: ixgbe_free_rcb_lists(rx_data); return (IXGBE_FAILURE); }
/* * ixgbe_alloc_tcb_lists - Memory allocation for the transmit control bolcks * of one ring. */ static int ixgbe_alloc_tcb_lists(ixgbe_tx_ring_t *tx_ring) { int i; int ret; tx_control_block_t *tcb; dma_buffer_t *tx_buf; ixgbe_t *ixgbe = tx_ring->ixgbe; dev_info_t *devinfo = ixgbe->dip; /* * Allocate memory for the work list. */ tx_ring->work_list = kmem_zalloc(sizeof (tx_control_block_t *) * tx_ring->ring_size, KM_NOSLEEP); if (tx_ring->work_list == NULL) { ixgbe_error(ixgbe, "Cound not allocate memory for tx work list"); return (IXGBE_FAILURE); } /* * Allocate memory for the free list. */ tx_ring->free_list = kmem_zalloc(sizeof (tx_control_block_t *) * tx_ring->free_list_size, KM_NOSLEEP); if (tx_ring->free_list == NULL) { kmem_free(tx_ring->work_list, sizeof (tx_control_block_t *) * tx_ring->ring_size); tx_ring->work_list = NULL; ixgbe_error(ixgbe, "Cound not allocate memory for tx free list"); return (IXGBE_FAILURE); } /* * Allocate memory for the tx control blocks of free list. */ tx_ring->tcb_area = kmem_zalloc(sizeof (tx_control_block_t) * tx_ring->free_list_size, KM_NOSLEEP); if (tx_ring->tcb_area == NULL) { kmem_free(tx_ring->work_list, sizeof (tx_control_block_t *) * tx_ring->ring_size); tx_ring->work_list = NULL; kmem_free(tx_ring->free_list, sizeof (tx_control_block_t *) * tx_ring->free_list_size); tx_ring->free_list = NULL; ixgbe_error(ixgbe, "Cound not allocate memory for tx control blocks"); return (IXGBE_FAILURE); } /* * Allocate dma memory for the tx control block of free list. */ tcb = tx_ring->tcb_area; for (i = 0; i < tx_ring->free_list_size; i++, tcb++) { ASSERT(tcb != NULL); tx_ring->free_list[i] = tcb; /* * Pre-allocate dma handles for transmit. These dma handles * will be dynamically bound to the data buffers passed down * from the upper layers at the time of transmitting. */ ret = ddi_dma_alloc_handle(devinfo, &ixgbe_tx_dma_attr, DDI_DMA_DONTWAIT, NULL, &tcb->tx_dma_handle); if (ret != DDI_SUCCESS) { tcb->tx_dma_handle = NULL; ixgbe_error(ixgbe, "Could not allocate tx dma handle: %x", ret); goto alloc_tcb_lists_fail; } /* * Pre-allocate transmit buffers for packets that the * size is less than bcopy_thresh. */ tx_buf = &tcb->tx_buf; ret = ixgbe_alloc_dma_buffer(ixgbe, tx_buf, ixgbe->tx_buf_size); if (ret != IXGBE_SUCCESS) { ASSERT(tcb->tx_dma_handle != NULL); ddi_dma_free_handle(&tcb->tx_dma_handle); tcb->tx_dma_handle = NULL; ixgbe_error(ixgbe, "Allocate tx dma buffer failed"); goto alloc_tcb_lists_fail; } tcb->last_index = MAX_TX_RING_SIZE; } return (IXGBE_SUCCESS); alloc_tcb_lists_fail: ixgbe_free_tcb_lists(tx_ring); return (IXGBE_FAILURE); }