dma_addr_t hwsw_map_single (struct device *dev, void *addr, size_t size, int dir) { if (use_swiotlb(dev)) return swiotlb_map_single(dev, addr, size, dir); else return hwiommu_map_single(dev, addr, size, dir); }
void * hwsw_alloc_coherent (struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t flags) { if (use_swiotlb(dev)) return swiotlb_alloc_coherent(dev, size, dma_handle, flags); else return hwiommu_alloc_coherent(dev, size, dma_handle, flags); }
void hwsw_free_coherent (struct device *dev, size_t size, void *vaddr, dma_addr_t dma_handle) { if (use_swiotlb(dev)) swiotlb_free_coherent(dev, size, vaddr, dma_handle); else hwiommu_free_coherent(dev, size, vaddr, dma_handle); }
void hwsw_sync_sg_for_device (struct device *dev, struct scatterlist *sg, int nelems, int dir) { if (use_swiotlb(dev)) swiotlb_sync_sg_for_device(dev, sg, nelems, dir); else hwiommu_sync_sg_for_device(dev, sg, nelems, dir); }
void hwsw_sync_single_for_device (struct device *dev, dma_addr_t addr, size_t size, int dir) { if (use_swiotlb(dev)) swiotlb_sync_single_for_device(dev, addr, size, dir); else hwiommu_sync_single_for_device(dev, addr, size, dir); }
void hwsw_unmap_sg (struct device *dev, struct scatterlist *sglist, int nents, int dir) { if (use_swiotlb(dev)) return swiotlb_unmap_sg(dev, sglist, nents, dir); else return hwiommu_unmap_sg(dev, sglist, nents, dir); }
void hwsw_unmap_single (struct device *dev, dma_addr_t iova, size_t size, int dir) { if (use_swiotlb(dev)) return swiotlb_unmap_single(dev, iova, size, dir); else return hwiommu_unmap_single(dev, iova, size, dir); }
void hwsw_unmap_sg_attrs(struct device *dev, struct scatterlist *sglist, int nents, int dir, struct dma_attrs *attrs) { if (use_swiotlb(dev)) return swiotlb_unmap_sg_attrs(dev, sglist, nents, dir, attrs); else return hwiommu_unmap_sg_attrs(dev, sglist, nents, dir, attrs); }
const struct dma_map_ops *hwsw_dma_get_ops(struct device *dev) { if (use_swiotlb(dev)) return NULL; return &sba_dma_ops; }