void msm_ringbuffer_destroy(struct msm_ringbuffer *ring) { if (ring->bo) { msm_gem_put_vaddr(ring->bo); drm_gem_object_unreference_unlocked(ring->bo); } kfree(ring); }
void adreno_gpu_cleanup(struct adreno_gpu *gpu) { if (gpu->memptrs_bo) { if (gpu->memptrs) msm_gem_put_vaddr(gpu->memptrs_bo); if (gpu->memptrs_iova) msm_gem_put_iova(gpu->memptrs_bo, gpu->base.id); drm_gem_object_unreference_unlocked(gpu->memptrs_bo); } release_firmware(gpu->pm4); release_firmware(gpu->pfp); msm_gpu_cleanup(&gpu->base); }
void adreno_gpu_cleanup(struct adreno_gpu *adreno_gpu) { struct msm_gpu *gpu = &adreno_gpu->base; if (adreno_gpu->memptrs_bo) { if (adreno_gpu->memptrs) msm_gem_put_vaddr(adreno_gpu->memptrs_bo); if (adreno_gpu->memptrs_iova) msm_gem_put_iova(adreno_gpu->memptrs_bo, gpu->aspace); drm_gem_object_unreference_unlocked(adreno_gpu->memptrs_bo); } release_firmware(adreno_gpu->pm4); release_firmware(adreno_gpu->pfp); msm_gpu_cleanup(gpu); }
void adreno_gpu_cleanup(struct adreno_gpu *adreno_gpu) { struct msm_gpu *gpu = &adreno_gpu->base; if (adreno_gpu->memptrs_bo) { if (adreno_gpu->memptrs) msm_gem_put_vaddr(adreno_gpu->memptrs_bo); if (adreno_gpu->memptrs_iova) msm_gem_put_iova(adreno_gpu->memptrs_bo, gpu->id); drm_gem_object_unreference_unlocked(adreno_gpu->memptrs_bo); } release_firmware(adreno_gpu->pm4); release_firmware(adreno_gpu->pfp); msm_gpu_cleanup(gpu); if (gpu->aspace) { gpu->aspace->mmu->funcs->detach(gpu->aspace->mmu, iommu_ports, ARRAY_SIZE(iommu_ports)); msm_gem_address_space_put(gpu->aspace); } }
void msm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) { msm_gem_put_vaddr(obj); }