void arch_sync_dma_for_cpu(struct device *dev, phys_addr_t paddr, size_t size, enum dma_data_direction dir) { switch (dir) { case DMA_BIDIRECTIONAL: case DMA_FROM_DEVICE: do_cache_op(paddr, size, __invalidate_dcache_range); break; case DMA_NONE: BUG(); break; default: break; } }
void arch_sync_dma_for_device(struct device *dev, phys_addr_t paddr, size_t size, enum dma_data_direction dir) { switch (dir) { case DMA_BIDIRECTIONAL: case DMA_TO_DEVICE: if (XCHAL_DCACHE_IS_WRITEBACK) do_cache_op(paddr, size, __flush_dcache_range); break; case DMA_NONE: BUG(); break; default: break; } }
static void xtensa_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { switch (dir) { case DMA_BIDIRECTIONAL: case DMA_FROM_DEVICE: do_cache_op(dma_handle, size, __invalidate_dcache_range); break; case DMA_NONE: BUG(); break; default: break; } }
static void xtensa_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { switch (dir) { case DMA_BIDIRECTIONAL: case DMA_TO_DEVICE: if (XCHAL_DCACHE_IS_WRITEBACK) do_cache_op(dma_handle, size, __flush_dcache_range); break; case DMA_NONE: BUG(); break; default: break; } }