/** * Unprotects memory if heap is secure heap. Also ensures that we are in * the correct FMEM state if this heap is a reusable heap. * Must be called with heap->lock locked. */ static void ion_cp_unprotect(struct ion_heap *heap, int version, void *data) { struct ion_cp_heap *cp_heap = container_of(heap, struct ion_cp_heap, heap); if (atomic_dec_and_test(&cp_heap->protect_cnt)) { int error_code = ion_cp_unprotect_mem( cp_heap->secure_base, cp_heap->secure_size, cp_heap->permission_type, version, data); if (error_code) { pr_err("Failed to un-protect memory for heap %s - " "error code: %d\n", heap->name, error_code); } else { cp_heap->heap_protected = HEAP_NOT_PROTECTED; pr_debug("Un-protected heap %s @ 0x%x\n", heap->name, (unsigned int) cp_heap->base); if (cp_heap->reusable && !cp_heap->allocated_bytes) { if (fmem_set_state(FMEM_T_STATE) != 0) pr_err("%s: unable to transition heap to T-state", __func__); } } } pr_debug("%s: protect count is %d\n", __func__, atomic_read(&cp_heap->protect_cnt)); BUG_ON(atomic_read(&cp_heap->protect_cnt) < 0); }
/** * Unprotects memory if heap is secure heap. Also ensures that we are in * the correct FMEM state if this heap is a reusable heap. * Must be called with heap->lock locked. */ static void ion_cp_unprotect(struct ion_heap *heap) { struct ion_cp_heap *cp_heap = container_of(heap, struct ion_cp_heap, heap); if (cp_heap->heap_protected == HEAP_PROTECTED) { int error_code = ion_cp_unprotect_mem( cp_heap->secure_base, cp_heap->secure_size, cp_heap->permission_type); if (error_code) { pr_err("Failed to un-protect memory for heap %s - " "error code: %d\n", heap->name, error_code); } else { cp_heap->heap_protected = HEAP_NOT_PROTECTED; pr_debug("Un-protected heap %s @ 0x%x\n", heap->name, (unsigned int) cp_heap->base); if (cp_heap->reusable && !cp_heap->allocated_bytes) { if (fmem_set_state(FMEM_T_STATE) != 0) pr_err("%s: unable to transition heap to T-state", __func__); } } } }
/* Must be protected by ion_cp_buffer lock */ static int __ion_cp_unprotect_buffer(struct ion_buffer *buffer, int version, void *data, int force_unsecure) { struct ion_cp_buffer *buf = buffer->priv_virt; int ret_value = 0; if (force_unsecure) { if (!buf->is_secure || atomic_read(&buf->secure_cnt) == 0) return 0; if (atomic_read(&buf->secure_cnt) != 1) { WARN(1, "Forcing unsecure of buffer with outstanding secure count %d!\n", atomic_read(&buf->secure_cnt)); atomic_set(&buf->secure_cnt, 1); } } if (atomic_dec_and_test(&buf->secure_cnt)) { ret_value = ion_cp_unprotect_mem( buf->buffer, buffer->size, 0, version, data); if (ret_value) { pr_err("Failed to unsecure buffer %p, error %d\n", buffer, ret_value); /* * If the force unsecure is happening, the buffer * is being destroyed. We failed to unsecure the * buffer even though the memory is given back. * Just die now rather than discovering later what * happens when trying to use the secured memory as * unsecured... */ BUG_ON(force_unsecure); /* Bump the count back up one to try again later */ atomic_inc(&buf->secure_cnt); } else { buf->version = -1; buf->data = NULL; } } pr_debug("buffer %p unprotect count %d\n", buffer, atomic_read(&buf->secure_cnt)); BUG_ON(atomic_read(&buf->secure_cnt) < 0); return ret_value; }
static int __ion_cp_unprotect_buffer(struct ion_buffer *buffer, int version, void *data, int force_unsecure) { struct ion_cp_buffer *buf = buffer->priv_virt; int ret_value = 0; if (force_unsecure) { if (!buf->is_secure || atomic_read(&buf->secure_cnt) == 0) return 0; if (atomic_read(&buf->secure_cnt) != 1) { WARN(1, "Forcing unsecure of buffer with outstanding secure count %d!\n", atomic_read(&buf->secure_cnt)); atomic_set(&buf->secure_cnt, 1); } } if (atomic_dec_and_test(&buf->secure_cnt)) { ret_value = ion_cp_unprotect_mem( buf->buffer, buffer->size, 0, version, data); if (ret_value) { pr_err("Failed to unsecure buffer %p, error %d\n", buffer, ret_value); /* */ BUG_ON(force_unsecure); /* */ atomic_inc(&buf->secure_cnt); } else { buf->version = -1; buf->data = NULL; } } pr_debug("buffer %p unprotect count %d\n", buffer, atomic_read(&buf->secure_cnt)); BUG_ON(atomic_read(&buf->secure_cnt) < 0); return ret_value; }
/** * Unprotects memory if heap is secure heap. Also ensures that we are in * the correct FMEM state if this heap is a reusable heap. * Must be called with heap->lock locked. */ static void ion_cp_unprotect(struct ion_heap *heap) { struct ion_cp_heap *cp_heap = container_of(heap, struct ion_cp_heap, heap); if (atomic_dec_and_test(&cp_heap->protect_cnt)) { int error_code = ion_cp_unprotect_mem( cp_heap->secure_base, cp_heap->secure_size, cp_heap->permission_type); if (error_code) { pr_err("Failed to un-protect memory for heap %s - " "error code: %d\n", heap->name, error_code); } else { cp_heap->heap_protected = HEAP_NOT_PROTECTED; pr_debug("Un-protected heap %s @ 0x%x\n", heap->name, (unsigned int) cp_heap->base); } } BUG_ON(atomic_read(&cp_heap->protect_cnt) < 0); }