ixgbe_tx_free_bufs(struct ixgbe_tx_queue *txq) { struct ixgbe_tx_entry_v *txep; uint32_t status; uint32_t n; uint32_t i; int nb_free = 0; struct rte_mbuf *m, *free[RTE_IXGBE_TX_MAX_FREE_BUF_SZ]; /* check DD bit on threshold descriptor */ status = txq->tx_ring[txq->tx_next_dd].wb.status; if (!(status & IXGBE_ADVTXD_STAT_DD)) return 0; n = txq->tx_rs_thresh; /* * first buffer to free from S/W ring is at index * tx_next_dd - (tx_rs_thresh-1) */ txep = &((struct ixgbe_tx_entry_v *)txq->sw_ring)[txq->tx_next_dd - (n - 1)]; m = __rte_pktmbuf_prefree_seg(txep[0].mbuf); if (likely(m != NULL)) { free[0] = m; nb_free = 1; for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i].mbuf); if (likely(m != NULL)) { if (likely(m->pool == free[0]->pool)) free[nb_free++] = m; else { rte_mempool_put_bulk(free[0]->pool, (void *)free, nb_free); free[0] = m; nb_free = 1; } } } rte_mempool_put_bulk(free[0]->pool, (void **)free, nb_free); } else { for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i].mbuf); if (m != NULL) rte_mempool_put(m->pool, m); } } /* buffers were freed, update counters */ txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); if (txq->tx_next_dd >= txq->nb_tx_desc) txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); return txq->tx_rs_thresh; }
i40e_tx_free_bufs(struct i40e_tx_queue *txq) { struct i40e_tx_entry *txep; uint32_t n; uint32_t i; int nb_free = 0; struct rte_mbuf *m, *free[RTE_I40E_TX_MAX_FREE_BUF_SZ]; /* check DD bits on threshold descriptor */ if ((txq->tx_ring[txq->tx_next_dd].cmd_type_offset_bsz & rte_cpu_to_le_64(I40E_TXD_QW1_DTYPE_MASK)) != rte_cpu_to_le_64(I40E_TX_DESC_DTYPE_DESC_DONE)) return 0; n = txq->tx_rs_thresh; /* first buffer to free from S/W ring is at index * tx_next_dd - (tx_rs_thresh-1) */ txep = &txq->sw_ring[txq->tx_next_dd - (n - 1)]; m = __rte_pktmbuf_prefree_seg(txep[0].mbuf); if (likely(m != NULL)) { free[0] = m; nb_free = 1; for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i].mbuf); if (likely(m != NULL)) { if (likely(m->pool == free[0]->pool)) { free[nb_free++] = m; } else { rte_mempool_put_bulk(free[0]->pool, (void *)free, nb_free); free[0] = m; nb_free = 1; } } } rte_mempool_put_bulk(free[0]->pool, (void **)free, nb_free); } else { for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i].mbuf); if (m != NULL) rte_mempool_put(m->pool, m); } } /* buffers were freed, update counters */ txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); if (txq->tx_next_dd >= txq->nb_tx_desc) txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); return txq->tx_rs_thresh; }
fm10k_tx_free_bufs(struct fm10k_tx_queue *txq) { struct rte_mbuf **txep; uint8_t flags; uint32_t n; uint32_t i; int nb_free = 0; struct rte_mbuf *m, *free[RTE_FM10K_TX_MAX_FREE_BUF_SZ]; /* check DD bit on threshold descriptor */ flags = txq->hw_ring[txq->next_dd].flags; if (!(flags & FM10K_TXD_FLAG_DONE)) return 0; n = txq->rs_thresh; /* First buffer to free from S/W ring is at index * next_dd - (rs_thresh-1) */ txep = &txq->sw_ring[txq->next_dd - (n - 1)]; m = __rte_pktmbuf_prefree_seg(txep[0]); if (likely(m != NULL)) { free[0] = m; nb_free = 1; for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i]); if (likely(m != NULL)) { if (likely(m->pool == free[0]->pool)) free[nb_free++] = m; else { rte_mempool_put_bulk(free[0]->pool, (void *)free, nb_free); free[0] = m; nb_free = 1; } } } rte_mempool_put_bulk(free[0]->pool, (void **)free, nb_free); } else { for (i = 1; i < n; i++) { m = __rte_pktmbuf_prefree_seg(txep[i]); if (m != NULL) rte_mempool_put(m->pool, m); } } /* buffers were freed, update counters */ txq->nb_free = (uint16_t)(txq->nb_free + txq->rs_thresh); txq->next_dd = (uint16_t)(txq->next_dd + txq->rs_thresh); if (txq->next_dd >= txq->nb_desc) txq->next_dd = (uint16_t)(txq->rs_thresh - 1); return txq->rs_thresh; }