void *dma_direct_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t flag) { void *ret; #ifdef CONFIG_NOT_COHERENT_CACHE ret = __dma_alloc_coherent(dev, size, dma_handle, flag); if (ret == NULL) return NULL; *dma_handle += get_dma_offset(dev); return ret; #else struct page *page; int node = dev_to_node(dev); /* ignore region specifiers */ flag &= ~(__GFP_HIGHMEM); page = alloc_pages_node(node, flag, get_order(size)); if (page == NULL) return NULL; ret = page_address(page); memset(ret, 0, size); *dma_handle = virt_to_abs(ret) + get_dma_offset(dev); return ret; #endif }
static void *__dma_alloc_noncoherent(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t flags, struct dma_attrs *attrs) { void *ptr, *coherent_ptr; struct page *page; size = PAGE_ALIGN(size); if (!(flags & __GFP_WAIT)) { struct page *page = NULL; void *addr = __alloc_from_pool(size, &page); if (addr) *dma_handle = phys_to_dma(dev, page_to_phys(page)); return addr; } if (IS_ENABLED(CONFIG_DMA_CMA)) { struct page *page; page = dma_alloc_from_contiguous(dev, size >> PAGE_SHIFT, get_order(size)); if (page) { *dma_handle = phys_to_dma(dev, page_to_phys(page)); ptr = page_address(page); } else if (dev_get_cma_priv_area(dev)) { pr_err("%s: failed to allocate from dma-contiguous\n", __func__); return NULL; } else { ptr = __dma_alloc_coherent( dev, size, dma_handle, flags, attrs); } } else {
page = dma_alloc_from_contiguous(dev, size >> PAGE_SHIFT, get_order(size)); if (page) { *dma_handle = phys_to_dma(dev, page_to_phys(page)); ptr = page_address(page); } else if (dev_get_cma_priv_area(dev)) { pr_err("%s: failed to allocate from dma-contiguous\n", __func__); return NULL; } else { ptr = __dma_alloc_coherent( dev, size, dma_handle, flags, attrs); } } else { ptr = __dma_alloc_coherent(dev, size, dma_handle, flags, attrs); } if (!ptr) goto no_mem; /* remove any dirty cache lines on the kernel alias */ __dma_flush_range(ptr, ptr + size); /* create a coherent mapping */ page = virt_to_page(ptr); coherent_ptr = dma_common_contiguous_remap(page, size, VM_USERMAP, __get_dma_pgprot(attrs, __pgprot(PROT_NORMAL_NC), false), NULL); if (!coherent_ptr)