void *dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t gfp) { void *ret; ret = (void *)__alloc_dma_pages(get_pages(size)); if (ret) { memset(ret, 0, size); *dma_handle = virt_to_phys(ret); } return ret; }
static void *bfin_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs) { void *ret; ret = (void *)__alloc_dma_pages(get_pages(size)); if (ret) { memset(ret, 0, size); *dma_handle = virt_to_phys(ret); } return ret; }
/* * Allocate DMA coherent memory space and return both the kernel * virtual and DMA address for that space. */ void *c6x_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle, gfp_t gfp, struct dma_attrs *attrs) { u32 paddr; int order; if (!dma_size || !size) return NULL; order = get_count_order(((size - 1) >> PAGE_SHIFT) + 1); paddr = __alloc_dma_pages(order); if (handle) *handle = paddr; if (!paddr) return NULL; return phys_to_virt(paddr); }
/* * Allocate DMA coherent memory space and return both the kernel * virtual and DMA address for that space. */ void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle, gfp_t gfp, unsigned long attrs) { void *ret; u32 paddr; int order; if (!dma_size || !size) return NULL; order = get_count_order(((size - 1) >> PAGE_SHIFT) + 1); paddr = __alloc_dma_pages(order); if (handle) *handle = paddr; if (!paddr) return NULL; ret = phys_to_virt(paddr); memset(ret, 0, 1 << order); return ret; }