void dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle, unsigned long offset, size_t size, enum dma_data_direction direction) { BUG_ON(direction == DMA_NONE); if (cpu_is_noncoherent_r10000(dev)) { unsigned long addr; addr = dma_addr_to_virt(dev, dma_handle); __dma_sync(addr + offset, size, direction); } #ifdef CONFIG_BRCMSTB brcm_sync_for_cpu(dma_addr_to_virt(dev, dma_handle) + offset, size, direction); #endif }
void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, enum dma_data_direction direction) { if (cpu_is_noncoherent_r10000(dev)) __dma_sync(dma_addr_to_virt(dev, dma_addr), size, direction); plat_unmap_dma_mem(dev, dma_addr, size, direction); }
static void mips_dma_unmap_page(struct device *dev, dma_addr_t dma_addr, size_t size, enum dma_data_direction direction, struct dma_attrs *attrs) { if (cpu_is_noncoherent_r10000(dev)) __dma_sync(dma_addr_to_virt(dev, dma_addr), size, direction); plat_unmap_dma_mem(dev, dma_addr, size, direction); }
static void mips_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) { if (cpu_is_noncoherent_r10000(dev)) { unsigned long addr; addr = dma_addr_to_virt(dev, dma_handle); __dma_sync(addr, size, direction); } }
static int hexagon_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, enum dma_data_direction dir, struct dma_attrs *attrs) { struct scatterlist *s; int i; WARN_ON(nents == 0 || sg[0].length == 0); for_each_sg(sg, s, nents, i) { s->dma_address = sg_phys(s); if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) return 0; s->dma_length = s->length; flush_dcache_range(dma_addr_to_virt(s->dma_address), dma_addr_to_virt(s->dma_address + s->length)); }
static void mips_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) { plat_extra_sync_for_device(dev); if (!plat_device_is_coherent(dev)) { unsigned long addr; addr = dma_addr_to_virt(dev, dma_handle); __dma_sync(addr, size, direction); } }
void dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle, unsigned long offset, size_t size, enum dma_data_direction direction) { BUG_ON(direction == DMA_NONE); if (!plat_device_is_coherent(dev)) { unsigned long addr; addr = dma_addr_to_virt(dma_handle); __dma_sync(addr + offset, size, direction); } }
void dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, enum dma_data_direction direction) { BUG_ON(direction == DMA_NONE); if (!plat_device_is_coherent(dev) && direction != DMA_TO_DEVICE) { unsigned long addr; addr = dma_addr_to_virt(dma_address); dma_cache_wback_inv(addr, size); } plat_unmap_dma_mem(dma_address); }