void *fmem_map_virtual_area(int cacheability) { unsigned long addr; const struct mem_type *type; int ret; addr = (unsigned long) fmem_data.area->addr; type = get_mem_type(cacheability); if (type == NULL) return ERR_PTR(-EINVAL); ret = ioremap_pages(addr, fmem_data.phys, fmem_data.size, type); if (ret) return ERR_PTR(ret); fmem_data.virt = fmem_data.area->addr; return fmem_data.virt; }
void *ion_map_fmem_buffer(struct ion_buffer *buffer, unsigned long phys_base, void *virt_base, unsigned long flags) { int ret; unsigned int offset = buffer->priv_phys - phys_base; unsigned long start = ((unsigned long)virt_base) + offset; const struct mem_type *type = ION_IS_CACHED(flags) ? get_mem_type(MT_DEVICE_CACHED) : get_mem_type(MT_DEVICE); if (phys_base > buffer->priv_phys) return NULL; ret = ioremap_pages(start, buffer->priv_phys, buffer->size, type); if (!ret) return (void *)start; else return NULL; }