/**
 * Protects memory if heap is unsecured heap. Also ensures that we are in
 * the correct FMEM state if this heap is a reusable heap.
 * Must be called with heap->lock locked.
 */
static int ion_cp_protect(struct ion_heap *heap)
{
	struct ion_cp_heap *cp_heap =
		container_of(heap, struct ion_cp_heap, heap);
	int ret_value = 0;

	if (cp_heap->heap_protected == HEAP_NOT_PROTECTED) {
		/* Make sure we are in C state when the heap is protected. */
		if (cp_heap->reusable && !cp_heap->allocated_bytes) {
			ret_value = fmem_set_state(FMEM_C_STATE);
			if (ret_value)
				goto out;
		}

		ret_value = ion_cp_protect_mem(cp_heap->secure_base,
				cp_heap->secure_size, cp_heap->permission_type);
		if (ret_value) {
			pr_err("Failed to protect memory for heap %s - "
				"error code: %d\n", heap->name, ret_value);

			if (cp_heap->reusable && !cp_heap->allocated_bytes) {
				if (fmem_set_state(FMEM_T_STATE) != 0)
					pr_err("%s: unable to transition heap to T-state\n",
						__func__);
			}
		} else {
			cp_heap->heap_protected = HEAP_PROTECTED;
			pr_debug("Protected heap %s @ 0x%lx\n",
				heap->name, cp_heap->base);
		}
	}
out:
	return ret_value;
}
Beispiel #2
0
/* Must be protected by ion_cp_buffer lock */
static int __ion_cp_protect_buffer(struct ion_buffer *buffer, int version,
					void *data, int flags)
{
	struct ion_cp_buffer *buf = buffer->priv_virt;
	int ret_value = 0;

	if (atomic_inc_return(&buf->secure_cnt) == 1) {
		ret_value = ion_cp_protect_mem(buf->buffer,
				buffer->size, 0,
				version, data);

		if (ret_value) {
			pr_err("Failed to secure buffer %p, error %d\n",
				buffer, ret_value);
			atomic_dec(&buf->secure_cnt);
		} else {
			pr_debug("Protected buffer %p from %pa (size %x)\n",
				buffer, &buf->buffer,
				buffer->size);
			buf->want_delayed_unsecure |=
				flags & ION_UNSECURE_DELAYED ? 1 : 0;
			buf->data = data;
			buf->version = version;
		}
	}
	pr_debug("buffer %p protect count %d\n", buffer,
		atomic_read(&buf->secure_cnt));
	BUG_ON(atomic_read(&buf->secure_cnt) < 0);
	return ret_value;
}
/**
 * Protects memory if heap is unsecured heap. Also ensures that we are in
 * the correct FMEM state if this heap is a reusable heap.
 * Must be called with heap->lock locked.
 */
static int ion_cp_protect(struct ion_heap *heap, int version, void *data)
{
	struct ion_cp_heap *cp_heap =
		container_of(heap, struct ion_cp_heap, heap);
	int ret_value = 0;

	if (atomic_inc_return(&cp_heap->protect_cnt) == 1) {
		/* Make sure we are in C state when the heap is protected. */
		if (cp_heap->reusable && !cp_heap->allocated_bytes) {
			ret_value = fmem_set_state(FMEM_C_STATE);
			if (ret_value)
				goto out;
		}

		ret_value = ion_cp_protect_mem(cp_heap->secure_base,
				cp_heap->secure_size, cp_heap->permission_type,
				version, data);
		if (ret_value) {
			pr_err("Failed to protect memory for heap %s - "
				"error code: %d\n", heap->name, ret_value);

			if (cp_heap->reusable && !cp_heap->allocated_bytes) {
				if (fmem_set_state(FMEM_T_STATE) != 0)
					pr_err("%s: unable to transition heap to T-state\n",
						__func__);
			}
			atomic_dec(&cp_heap->protect_cnt);
		} else {
			cp_heap->heap_protected = HEAP_PROTECTED;
			pr_debug("Protected heap %s @ 0x%lx\n",
				heap->name, cp_heap->base);
		}
	}
out:
	pr_debug("%s: protect count is %d\n", __func__,
		atomic_read(&cp_heap->protect_cnt));
	BUG_ON(atomic_read(&cp_heap->protect_cnt) < 0);
	return ret_value;
}
static int ion_cp_protect(struct ion_heap *heap)
{
	struct ion_cp_heap *cp_heap =
		container_of(heap, struct ion_cp_heap, heap);
	int ret_value = 0;

	if (atomic_inc_return(&cp_heap->protect_cnt) == 1) {
		ret_value = ion_cp_protect_mem(cp_heap->secure_base,
				cp_heap->secure_size, cp_heap->permission_type);
		if (ret_value) {
			pr_err("Failed to protect memory for heap %s - "
				"error code: %d\n", heap->name, ret_value);

		} else {
			cp_heap->heap_protected = HEAP_PROTECTED;
			pr_debug("Protected heap %s @ 0x%x\n",
				heap->name, (unsigned int) cp_heap->base);
		}
	}
	pr_debug("%s: protect count is %d\n", __func__,
		atomic_read(&cp_heap->protect_cnt));
	BUG_ON(atomic_read(&cp_heap->protect_cnt) < 0);
	return ret_value;
}