Example #1
0
void *ddl_pmem_alloc(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 alloc_size, offset = 0, flags = 0;
	u32 index = 0;
	struct ddl_context *ddl_context;
	struct msm_mapped_buffer *mapped_buffer = NULL;
	DBG_PMEM("\n%s() IN: Requested alloc size(%u)", __func__, (u32)sz);
	if (!addr) {
		DDL_MSG_ERROR("\n%s() Invalid Parameters", __func__);
		goto bail_out;
	}
	ddl_context = ddl_get_context();
	alloc_size = (sz + alignment);
	addr->alloced_phys_addr = (phys_addr_t)
	allocate_contiguous_memory_nomap(alloc_size,
		res_trk_get_mem_type(), SZ_4K);
	if (!addr->alloced_phys_addr) {
		DDL_MSG_ERROR("%s() : acm alloc failed (%d)\n", __func__,
			alloc_size);
		goto bail_out;
	}
	flags = MSM_SUBSYSTEM_MAP_IOVA | MSM_SUBSYSTEM_MAP_KADDR;
	if (alignment == DDL_KILO_BYTE(128))
			index = 1;
	else if (alignment > SZ_4K)
		flags |= MSM_SUBSYSTEM_ALIGN_IOVA_8K;

	addr->mapped_buffer =
	msm_subsystem_map_buffer((unsigned long)addr->alloced_phys_addr,
	alloc_size, flags, &vidc_mmu_subsystem[index],
	sizeof(vidc_mmu_subsystem[index])/sizeof(unsigned int));
	if (IS_ERR(addr->mapped_buffer)) {
		pr_err(" %s() buffer map failed", __func__);
		goto free_acm_alloc;
	}
	mapped_buffer = addr->mapped_buffer;
	if (!mapped_buffer->vaddr || !mapped_buffer->iova[0]) {
		pr_err("%s() map buffers failed\n", __func__);
		goto free_map_buffers;
	}
	addr->physical_base_addr = (u8 *)mapped_buffer->iova[0];
	addr->virtual_base_addr = mapped_buffer->vaddr;
	addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
	offset = (u32)(addr->align_physical_addr -
			addr->physical_base_addr);
	addr->align_virtual_addr = addr->virtual_base_addr + offset;
	addr->buffer_size = sz;
	return addr->virtual_base_addr;

free_map_buffers:
	msm_subsystem_unmap_buffer(addr->mapped_buffer);
	addr->mapped_buffer = NULL;
free_acm_alloc:
	free_contiguous_memory_by_paddr(
		(unsigned long)addr->alloced_phys_addr);
	addr->alloced_phys_addr = (phys_addr_t)NULL;
bail_out:
	return NULL;
}
static void get_dec_op_done_crop(u32 output_order,
	struct vidc_1080p_dec_disp_info *dec_disp_info,
	struct vcd_frame_rect *crop_data,
	struct vcd_property_frame_size *op_frame_sz,
	struct vcd_property_frame_size *frame_sz,
	struct ddl_buf_addr *shared_mem)
{
	u32 crop_exists =
		(output_order == VCD_DEC_ORDER_DECODE) ?
		dec_disp_info->dec_crop_exists :
		dec_disp_info->disp_crop_exists;
	crop_data->left = 0;
	crop_data->top = 0;
	crop_data->right = dec_disp_info->img_size_x;
	crop_data->bottom = dec_disp_info->img_size_y;
	op_frame_sz->width = dec_disp_info->img_size_x;
	op_frame_sz->height = dec_disp_info->img_size_y;
	ddl_calculate_stride(op_frame_sz, false);
	op_frame_sz->stride = DDL_ALIGN(op_frame_sz->width,
				DDL_TILE_ALIGN_WIDTH);
	op_frame_sz->scan_lines = DDL_ALIGN(op_frame_sz->height,
					DDL_TILE_ALIGN_HEIGHT);
	DDL_MSG_LOW("%s img_size_x = %u img_size_y = %u\n",
				__func__, dec_disp_info->img_size_x,
				dec_disp_info->img_size_y);
	if (crop_exists) {
		if (output_order == VCD_DEC_ORDER_DECODE)
			vidc_sm_get_dec_order_crop_info(shared_mem,
				&dec_disp_info->crop_left_offset,
				&dec_disp_info->crop_right_offset,
				&dec_disp_info->crop_top_offset,
				&dec_disp_info->crop_bottom_offset);
		else
			vidc_sm_get_crop_info(shared_mem,
				&dec_disp_info->crop_left_offset,
				&dec_disp_info->crop_right_offset,
				&dec_disp_info->crop_top_offset,
				&dec_disp_info->crop_bottom_offset);
		crop_data->left = dec_disp_info->crop_left_offset;
		crop_data->top = dec_disp_info->crop_top_offset;
		crop_data->right -= dec_disp_info->crop_right_offset;
		crop_data->bottom -= dec_disp_info->crop_bottom_offset;
		op_frame_sz->width = crop_data->right - crop_data->left;
		op_frame_sz->height = crop_data->bottom - crop_data->top;
	}
}
Example #3
0
static void *res_trk_pmem_map
	(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 offset = 0, flags = 0;
	u32 index = 0;
	struct ddl_context *ddl_context;
	struct msm_mapped_buffer *mapped_buffer = NULL;
	ddl_context = ddl_get_context();
	if (!addr->alloced_phys_addr) {
		pr_err(" %s() alloced addres NULL", __func__);
		goto bail_out;
	}
	flags = MSM_SUBSYSTEM_MAP_IOVA | MSM_SUBSYSTEM_MAP_KADDR;
	if (alignment == DDL_KILO_BYTE(128))
			index = 1;
	else if (alignment > SZ_4K)
		flags |= MSM_SUBSYSTEM_ALIGN_IOVA_8K;

	addr->mapped_buffer =
	msm_subsystem_map_buffer((unsigned long)addr->alloced_phys_addr,
	sz, flags, &restrk_mmu_subsystem[index],
	sizeof(restrk_mmu_subsystem[index])/sizeof(unsigned int));
	if (IS_ERR(addr->mapped_buffer)) {
		pr_err(" %s() buffer map failed", __func__);
		goto bail_out;
	}
	mapped_buffer = addr->mapped_buffer;
	if (!mapped_buffer->vaddr || !mapped_buffer->iova[0]) {
		pr_err("%s() map buffers failed\n", __func__);
		goto bail_out;
	}
	addr->physical_base_addr = (u8 *)mapped_buffer->iova[0];
	addr->virtual_base_addr = mapped_buffer->vaddr;
	addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
	offset = (u32)(addr->align_physical_addr -
			addr->physical_base_addr);
	addr->align_virtual_addr = addr->virtual_base_addr + offset;
	addr->buffer_size = sz;
	return addr->virtual_base_addr;
bail_out:
	return NULL;
}
void *ddl_pmem_alloc(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 alloc_size, offset = 0;
	DBG_PMEM("\n%s() IN: Requested alloc size(%u)", __func__, (u32)sz);
	alloc_size = (sz + alignment);
	addr->physical_base_addr = (u8 *) pmem_kalloc(alloc_size,
		PMEM_MEMTYPE_SMI | PMEM_ALIGNMENT_4K);
	if (!addr->physical_base_addr) {
		DDL_MSG_ERROR("%s() : pmem alloc failed (%d)\n", __func__,
			alloc_size);
		return NULL;
	}
	DDL_MSG_LOW("%s() : pmem alloc physical base addr/sz 0x%x / %d\n",\
		__func__, (u32)addr->physical_base_addr, alloc_size);
	addr->virtual_base_addr = (u8 *)ioremap((unsigned long)
		addr->physical_base_addr, alloc_size);
	if (!addr->virtual_base_addr) {
		DDL_MSG_ERROR("%s() : ioremap failed, virtual(%x)\n", __func__,
			(u32)addr->virtual_base_addr);
		return NULL;
	}
	DDL_MSG_LOW("%s() : pmem alloc virtual base addr/sz 0x%x / %d\n",\
		__func__, (u32)addr->virtual_base_addr, alloc_size);
	addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
	offset = (u32)(addr->align_physical_addr -
			addr->physical_base_addr);
	addr->align_virtual_addr = addr->virtual_base_addr + offset;
	addr->buffer_size = sz;
	DDL_MSG_LOW("\n%s() : alig_phy_addr(%p) alig_vir_addr(%p)",
		__func__, addr->align_physical_addr, addr->align_virtual_addr);
	DBG_PMEM("\n%s() OUT: phy_addr(%p) vir_addr(%p) size(%u)",
		__func__, addr->physical_base_addr, addr->virtual_base_addr,
		addr->buffer_size);
	return addr->virtual_base_addr;
}
void *ddl_pmem_alloc(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 alloc_size, offset = 0, flags = 0;
	u32 index = 0;
	struct ddl_context *ddl_context;
	struct msm_mapped_buffer *mapped_buffer = NULL;
	int rc = -EINVAL;
	ion_phys_addr_t phyaddr = 0;
	size_t len = 0;
	DBG_PMEM("\n%s() IN: Requested alloc size(%u)", __func__, (u32)sz);
	if (!addr) {
		DDL_MSG_ERROR("\n%s() Invalid Parameters", __func__);
		goto bail_out;
	}
	ddl_context = ddl_get_context();
	alloc_size = (sz + alignment);
	if (res_trk_get_enable_ion()) {
		if (!ddl_context->video_ion_client)
			ddl_context->video_ion_client =
				res_trk_get_ion_client();
		if (!ddl_context->video_ion_client) {
			DDL_MSG_ERROR("%s() :DDL ION Client Invalid handle\n",
						 __func__);
			goto bail_out;
		}
		addr->alloc_handle = ion_alloc(
		ddl_context->video_ion_client, alloc_size, SZ_4K,
			(1<<res_trk_get_mem_type()));
		if (IS_ERR_OR_NULL(addr->alloc_handle)) {
			DDL_MSG_ERROR("%s() :DDL ION alloc failed\n",
						 __func__);
			goto bail_out;
		}
		rc = ion_phys(ddl_context->video_ion_client,
				addr->alloc_handle, &phyaddr,
				 &len);
		if (rc || !phyaddr) {
			DDL_MSG_ERROR("%s():DDL ION client physical failed\n",
						 __func__);
			goto free_acm_ion_alloc;
		}
		addr->alloced_phys_addr = phyaddr;
	} else {
		addr->alloced_phys_addr = (phys_addr_t)
		allocate_contiguous_memory_nomap(alloc_size,
			res_trk_get_mem_type(), SZ_4K);
		if (!addr->alloced_phys_addr) {
			DDL_MSG_ERROR("%s() : acm alloc failed (%d)\n",
					 __func__, alloc_size);
			goto bail_out;
		}
	}
	flags = MSM_SUBSYSTEM_MAP_IOVA | MSM_SUBSYSTEM_MAP_KADDR;
	if (alignment == DDL_KILO_BYTE(128))
			index = 1;
	else if (alignment > SZ_4K)
		flags |= MSM_SUBSYSTEM_ALIGN_IOVA_8K;

	addr->mapped_buffer =
	msm_subsystem_map_buffer((unsigned long)addr->alloced_phys_addr,
	alloc_size, flags, &vidc_mmu_subsystem[index],
	sizeof(vidc_mmu_subsystem[index])/sizeof(unsigned int));
	if (IS_ERR(addr->mapped_buffer)) {
		pr_err(" %s() buffer map failed", __func__);
		goto free_acm_ion_alloc;
	}
	mapped_buffer = addr->mapped_buffer;
	if (!mapped_buffer->vaddr || !mapped_buffer->iova[0]) {
		pr_err("%s() map buffers failed\n", __func__);
		goto free_map_buffers;
	}
	addr->physical_base_addr = (u8 *)mapped_buffer->iova[0];
	addr->virtual_base_addr = mapped_buffer->vaddr;
	addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
	offset = (u32)(addr->align_physical_addr -
			addr->physical_base_addr);
	addr->align_virtual_addr = addr->virtual_base_addr + offset;
	addr->buffer_size = sz;
	return addr->virtual_base_addr;

free_map_buffers:
	msm_subsystem_unmap_buffer(addr->mapped_buffer);
	addr->mapped_buffer = NULL;
free_acm_ion_alloc:
	if (ddl_context->video_ion_client) {
		if (addr->alloc_handle) {
			ion_free(ddl_context->video_ion_client,
				addr->alloc_handle);
			addr->alloc_handle = NULL;
		}
	} else {
		free_contiguous_memory_by_paddr(
			(unsigned long)addr->alloced_phys_addr);
		addr->alloced_phys_addr = (phys_addr_t)NULL;
	}
bail_out:
	return NULL;
}
static void *res_trk_pmem_map
	(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 offset = 0, flags = 0;
	u32 index = 0;
	struct ddl_context *ddl_context;
	struct msm_mapped_buffer *mapped_buffer = NULL;
	int ret = 0;
	unsigned long iova = 0;
	unsigned long buffer_size  = 0;
	unsigned long *kernel_vaddr = NULL;

	ddl_context = ddl_get_context();
	if (res_trk_get_enable_ion() && addr->alloc_handle) {
		kernel_vaddr = (unsigned long *) ion_map_kernel(
					ddl_context->video_ion_client,
					addr->alloc_handle, UNCACHED);
		if (IS_ERR_OR_NULL(kernel_vaddr)) {
			DDL_MSG_ERROR("%s():DDL ION client map failed\n",
						 __func__);
			goto ion_bail_out;
		}
		addr->virtual_base_addr = (u8 *) kernel_vaddr;
		ret = ion_map_iommu(ddl_context->video_ion_client,
				addr->alloc_handle,
				VIDEO_DOMAIN,
				VIDEO_FIRMWARE_POOL,
				SZ_4K,
				0,
				&iova,
				&buffer_size,
				UNCACHED, 0);
		if (ret || !iova) {
			DDL_MSG_ERROR(
			"%s():DDL ION client iommu map failed, ret = %d iova = 0x%lx\n",
			__func__, ret, iova);
			goto ion_unmap_bail_out;
		}
		addr->mapped_buffer = NULL;
		addr->physical_base_addr = (u8 *)iova;
		addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
		offset = (u32)(addr->align_physical_addr -
				addr->physical_base_addr);
		addr->align_virtual_addr = addr->virtual_base_addr + offset;
		addr->buffer_size = buffer_size;
	} else {
		if (!res_trk_check_for_sec_session()) {
			if (!addr->alloced_phys_addr) {
				pr_err(" %s() alloced addres NULL", __func__);
				goto bail_out;
			}
			flags = MSM_SUBSYSTEM_MAP_IOVA |
				MSM_SUBSYSTEM_MAP_KADDR;
			if (alignment == DDL_KILO_BYTE(128))
					index = 1;
			else if (alignment > SZ_4K)
				flags |= MSM_SUBSYSTEM_ALIGN_IOVA_8K;
			addr->mapped_buffer =
			msm_subsystem_map_buffer(
			(unsigned long)addr->alloced_phys_addr,
			sz, flags, &restrk_mmu_subsystem[index],
			sizeof(restrk_mmu_subsystem[index])/
				sizeof(unsigned int));
			if (IS_ERR(addr->mapped_buffer)) {
				pr_err(" %s() buffer map failed", __func__);
				goto bail_out;
			}
			mapped_buffer = addr->mapped_buffer;
			if (!mapped_buffer->vaddr || !mapped_buffer->iova[0]) {
				pr_err("%s() map buffers failed\n", __func__);
				goto bail_out;
			}
			addr->physical_base_addr =
				 (u8 *)mapped_buffer->iova[0];
			addr->virtual_base_addr =
					mapped_buffer->vaddr;
		} else {
			addr->physical_base_addr =
				(u8 *) addr->alloced_phys_addr;
			addr->virtual_base_addr =
				(u8 *)addr->alloced_phys_addr;
		}
		addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
		addr->physical_base_addr, alignment);
		offset = (u32)(addr->align_physical_addr -
				addr->physical_base_addr);
		addr->align_virtual_addr = addr->virtual_base_addr + offset;
		addr->buffer_size = sz;
	}
	return addr->virtual_base_addr;
bail_out:
	if (IS_ERR(addr->mapped_buffer))
		msm_subsystem_unmap_buffer(addr->mapped_buffer);
	return NULL;
ion_unmap_bail_out:
	if (!IS_ERR_OR_NULL(addr->alloc_handle)) {
		ion_unmap_kernel(resource_context.
			res_ion_client,	addr->alloc_handle);
	}
ion_bail_out:
	return NULL;
}
static u32 ddl_decoder_seq_done_callback(struct ddl_context *ddl_context,
	struct ddl_client_context *ddl)
{
	struct ddl_decoder_data *decoder = &ddl->codec_data.decoder;
	struct vidc_1080p_seq_hdr_info seq_hdr_info;
	u32 process_further = true;
	struct ddl_profile_info_type disp_profile_info;
/*HTC_START Fix Klocwork issue*/
	seq_hdr_info.profile = 0;
	seq_hdr_info.level = 0;
/*HTC_END*/
	DDL_MSG_MED("ddl_decoder_seq_done_callback");
	if (!ddl->decoding ||
		!DDLCLIENT_STATE_IS(ddl, DDL_CLIENT_WAIT_FOR_INITCODECDONE)) {
		DDL_MSG_ERROR("STATE-CRITICAL-HDDONE");
		ddl_client_fatal_cb(ddl);
	} else {
		if (vidc_msg_timing)
			ddl_calc_core_proc_time(__func__, DEC_OP_TIME);
		ddl->cmd_state = DDL_CMD_INVALID;
		DDL_MSG_LOW("ddl_state_transition: %s ~~>"
			"DDL_CLIENT_WAIT_FOR_DPB",
			ddl_get_state_string(ddl->client_state));
		ddl->client_state = DDL_CLIENT_WAIT_FOR_DPB;
		DDL_MSG_LOW("HEADER_DONE");
		vidc_1080p_get_decode_seq_start_result(&seq_hdr_info);
		parse_hdr_size_data(ddl, &seq_hdr_info);
		if (!seq_hdr_info.img_size_x || !seq_hdr_info.img_size_y) {
			DDL_MSG_ERROR("FATAL:ZeroImageSize");
			ddl_client_fatal_cb(ddl);
			return process_further;
		}
		vidc_sm_get_profile_info(&ddl->shared_mem
			[ddl->command_channel], &disp_profile_info);
		disp_profile_info.pic_profile = seq_hdr_info.profile;
		disp_profile_info.pic_level = seq_hdr_info.level;
		ddl_get_dec_profile_level(decoder, seq_hdr_info.profile,
			seq_hdr_info.level);
		switch (decoder->codec.codec) {
		case VCD_CODEC_H264:
			if (decoder->profile.profile == VCD_PROFILE_H264_HIGH ||
				decoder->profile.profile ==
				VCD_PROFILE_UNKNOWN) {
				if ((disp_profile_info.chroma_format_idc >
					VIDC_1080P_IDCFORMAT_420) ||
					(disp_profile_info.bit_depth_luma_minus8
					 || disp_profile_info.
					bit_depth_chroma_minus8)) {
					DDL_MSG_ERROR("Unsupported H.264 "
						"feature: IDC "
						"format : %d, Bitdepth: %d",
						disp_profile_info.
						chroma_format_idc,
						(disp_profile_info.
						 bit_depth_luma_minus8
						 ||	disp_profile_info.
					bit_depth_chroma_minus8));
					ddl_client_fatal_cb(ddl);
					return process_further;
				}
			}
			break;
		case VCD_CODEC_MPEG4:
		case VCD_CODEC_DIVX_4:
		case VCD_CODEC_DIVX_5:
		case VCD_CODEC_DIVX_6:
		case VCD_CODEC_XVID:
			if (seq_hdr_info.data_partition)
				if ((seq_hdr_info.img_size_x *
				seq_hdr_info.img_size_y) > (720 * 576)) {
					DDL_MSG_ERROR("Unsupported DP clip");
					ddl_client_fatal_cb(ddl);
					return process_further;
				}
			break;
		default:
			break;
		}
		ddl_calculate_stride(&decoder->frame_size,
			!decoder->progressive_only);
		decoder->frame_size.scan_lines =
		DDL_ALIGN(decoder->frame_size.height, DDL_TILE_ALIGN_HEIGHT);
		decoder->frame_size.stride =
		DDL_ALIGN(decoder->frame_size.width, DDL_TILE_ALIGN_WIDTH);
		parse_hdr_crop_data(ddl, &seq_hdr_info);
		if (decoder->codec.codec == VCD_CODEC_H264 &&
			seq_hdr_info.level > VIDC_1080P_H264_LEVEL4) {
			DDL_MSG_ERROR("WARNING: H264MaxLevelExceeded : %d",
				seq_hdr_info.level);
		}
		ddl_set_default_decoder_buffer_req(decoder, false);
		if (decoder->header_in_start) {
			if (!(decoder->cont_mode) ||
				(decoder->min_dpb_num >
				 decoder->client_output_buf_req.min_count) ||
				(decoder->actual_output_buf_req.sz >
				 decoder->client_output_buf_req.sz)) {
				decoder->client_frame_size =
					 decoder->frame_size;
			decoder->client_output_buf_req =
				decoder->actual_output_buf_req;
			decoder->client_input_buf_req =
				decoder->actual_input_buf_req;
			}
			ddl_context->ddl_callback(VCD_EVT_RESP_START,
				VCD_S_SUCCESS, NULL, 0, (u32 *) ddl,
				ddl->client_data);
			ddl_release_command_channel(ddl_context,
				ddl->command_channel);
		} else {
			u32 seq_hdr_only_frame = false;
			u32 need_reconfig = false;
			struct vcd_frame_data *input_vcd_frm =
				&ddl->input_frame.vcd_frm;
			need_reconfig = ddl_check_reconfig(ddl);
			DDL_MSG_HIGH("%s : need_reconfig = %u\n", __func__,
				 need_reconfig);
			if (input_vcd_frm->flags &
				  VCD_FRAME_FLAG_EOS) {
					need_reconfig = false;
			}
			if (((input_vcd_frm->flags &
				VCD_FRAME_FLAG_CODECCONFIG) &&
				(!(input_vcd_frm->flags &
				VCD_FRAME_FLAG_SYNCFRAME))) ||
				input_vcd_frm->data_len <=
				seq_hdr_info.dec_frm_size) {
				seq_hdr_only_frame = true;
				input_vcd_frm->offset +=
					seq_hdr_info.dec_frm_size;
				input_vcd_frm->data_len = 0;
				input_vcd_frm->flags |=
					VCD_FRAME_FLAG_CODECCONFIG;
				ddl->input_frame.frm_trans_end =
					!need_reconfig;
				ddl_context->ddl_callback(
					VCD_EVT_RESP_INPUT_DONE,
					VCD_S_SUCCESS, &ddl->input_frame,
					sizeof(struct ddl_frame_data_tag),
					(u32 *) ddl, ddl->client_data);
			} else {
				if (decoder->codec.codec ==
					VCD_CODEC_VC1_RCV) {
					vidc_sm_set_start_byte_number(
						&ddl->shared_mem
						[ddl->command_channel],
						seq_hdr_info.dec_frm_size);
				}
			}
			if (need_reconfig) {
				struct ddl_frame_data_tag *payload =
					&ddl->input_frame;
				u32 payload_size =
					sizeof(struct ddl_frame_data_tag);
				decoder->client_frame_size =
					decoder->frame_size;
				decoder->client_output_buf_req =
					decoder->actual_output_buf_req;
				decoder->client_input_buf_req =
					decoder->actual_input_buf_req;
				if (seq_hdr_only_frame) {
					payload = NULL;
					payload_size = 0;
				}
				DDL_MSG_HIGH("%s : sending port reconfig\n",
					 __func__);
				ddl_context->ddl_callback(
					VCD_EVT_IND_OUTPUT_RECONFIG,
					VCD_S_SUCCESS, payload,
					payload_size, (u32 *) ddl,
					ddl->client_data);
			}
			if (!need_reconfig && !seq_hdr_only_frame) {
				if (!ddl_vidc_decode_set_buffers(ddl))
					process_further = false;
				else {
					DDL_MSG_ERROR("ddl_vidc_decode_set_"
						"buffers failed");
					ddl_client_fatal_cb(ddl);
				}
			} else
				ddl_release_command_channel(ddl_context,
					ddl->command_channel);
		}
	}
	return process_further;
}
Example #8
0
void *ddl_pmem_alloc(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 alloc_size, offset = 0 ;
	u32 index = 0;
	struct ddl_context *ddl_context;
	struct msm_mapped_buffer *mapped_buffer = NULL;
	unsigned long iova = 0;
	unsigned long buffer_size = 0;
	unsigned long *kernel_vaddr = NULL;
	unsigned long ionflag = 0;
	unsigned long flags = 0;
	int ret = 0;
	ion_phys_addr_t phyaddr = 0;
	size_t len = 0;
	int rc = 0;
	DBG_PMEM("\n%s() IN: Requested alloc size(%u)", __func__, (u32)sz);
	if (!addr) {
		DDL_MSG_ERROR("\n%s() Invalid Parameters", __func__);
		goto bail_out;
	}
	ddl_context = ddl_get_context();
	res_trk_set_mem_type(addr->mem_type);
	alloc_size = (sz + alignment);
	if (res_trk_get_enable_ion()) {
		if (!ddl_context->video_ion_client)
			ddl_context->video_ion_client =
				res_trk_get_ion_client();
		if (!ddl_context->video_ion_client) {
			DDL_MSG_ERROR("%s() :DDL ION Client Invalid handle\n",
						 __func__);
			goto bail_out;
		}
		alloc_size = (alloc_size+4095) & ~4095;
		addr->alloc_handle = ion_alloc(
		ddl_context->video_ion_client, alloc_size, SZ_4K,
			res_trk_get_mem_type());
		if (IS_ERR_OR_NULL(addr->alloc_handle)) {
			DDL_MSG_ERROR("%s() :DDL ION alloc failed\n",
						 __func__);
			goto bail_out;
		}
		if (res_trk_check_for_sec_session() ||
			addr->mem_type == DDL_FW_MEM)
			ionflag = UNCACHED;
		else
			ionflag = CACHED;
		kernel_vaddr = (unsigned long *) ion_map_kernel(
					ddl_context->video_ion_client,
					addr->alloc_handle, ionflag);
		if (IS_ERR_OR_NULL(kernel_vaddr)) {
				DDL_MSG_ERROR("%s() :DDL ION map failed\n",
							 __func__);
				goto free_ion_alloc;
		}
		addr->virtual_base_addr = (u8 *) kernel_vaddr;
		if (res_trk_check_for_sec_session()) {
			rc = ion_phys(ddl_context->video_ion_client,
				addr->alloc_handle, &phyaddr,
					&len);
			if (rc || !phyaddr) {
				DDL_MSG_ERROR(
				"%s():DDL ION client physical failed\n",
				__func__);
				goto unmap_ion_alloc;
			}
			addr->alloced_phys_addr = phyaddr;
		} else {
			ret = ion_map_iommu(ddl_context->video_ion_client,
					addr->alloc_handle,
					VIDEO_DOMAIN,
					VIDEO_MAIN_POOL,
					SZ_4K,
					0,
					&iova,
					&buffer_size,
					UNCACHED, 0);
			if (ret || !iova) {
				DDL_MSG_ERROR(
				"%s():DDL ION ion map iommu failed, ret = %d iova = 0x%lx\n",
					__func__, ret, iova);
				goto unmap_ion_alloc;
			}
			addr->alloced_phys_addr = (phys_addr_t) iova;
		}
		if (!addr->alloced_phys_addr) {
			DDL_MSG_ERROR("%s():DDL ION client physical failed\n",
						__func__);
			goto unmap_ion_alloc;
		}
		addr->mapped_buffer = NULL;
		addr->physical_base_addr = (u8 *) addr->alloced_phys_addr;
		addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
			addr->physical_base_addr, alignment);
		offset = (u32)(addr->align_physical_addr -
				addr->physical_base_addr);
		addr->align_virtual_addr = addr->virtual_base_addr + offset;
		addr->buffer_size = alloc_size;
	} else {
		addr->alloced_phys_addr = (phys_addr_t)
		allocate_contiguous_memory_nomap(alloc_size,
			res_trk_get_mem_type(), SZ_4K);
		if (!addr->alloced_phys_addr) {
			DDL_MSG_ERROR("%s() : acm alloc failed (%d)\n",
					 __func__, alloc_size);
			goto bail_out;
		}
		flags = MSM_SUBSYSTEM_MAP_IOVA | MSM_SUBSYSTEM_MAP_KADDR;
		if (alignment == DDL_KILO_BYTE(128))
				index = 1;
		else if (alignment > SZ_4K)
			flags |= MSM_SUBSYSTEM_ALIGN_IOVA_8K;

		addr->mapped_buffer =
		msm_subsystem_map_buffer((unsigned long)addr->alloced_phys_addr,
			alloc_size, flags, &vidc_mmu_subsystem[index],
			sizeof(vidc_mmu_subsystem[index])/sizeof(unsigned int));
		if (IS_ERR(addr->mapped_buffer)) {
			pr_err(" %s() buffer map failed", __func__);
			goto free_acm_alloc;
		}
		mapped_buffer = addr->mapped_buffer;
		if (!mapped_buffer->vaddr || !mapped_buffer->iova[0]) {
			pr_err("%s() map buffers failed\n", __func__);
			goto free_map_buffers;
		}
		addr->physical_base_addr = (u8 *)mapped_buffer->iova[0];
		addr->virtual_base_addr = mapped_buffer->vaddr;
		addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
			addr->physical_base_addr, alignment);
		offset = (u32)(addr->align_physical_addr -
				addr->physical_base_addr);
		addr->align_virtual_addr = addr->virtual_base_addr + offset;
		addr->buffer_size = sz;
	}
	return addr->virtual_base_addr;
free_map_buffers:
	msm_subsystem_unmap_buffer(addr->mapped_buffer);
	addr->mapped_buffer = NULL;
free_acm_alloc:
		free_contiguous_memory_by_paddr(
			(unsigned long)addr->alloced_phys_addr);
		addr->alloced_phys_addr = (phys_addr_t)NULL;
		return NULL;
unmap_ion_alloc:
	ion_unmap_kernel(ddl_context->video_ion_client,
		addr->alloc_handle);
	addr->virtual_base_addr = NULL;
	addr->alloced_phys_addr = (phys_addr_t)NULL;
free_ion_alloc:
	ion_free(ddl_context->video_ion_client,
		addr->alloc_handle);
	addr->alloc_handle = NULL;
bail_out:
	return NULL;
}
void *ddl_pmem_alloc(struct ddl_buf_addr *addr, size_t sz, u32 alignment)
{
	u32 alloc_size, offset = 0 ;
	struct ddl_context *ddl_context;
	unsigned long iova = 0;
	unsigned long buffer_size = 0;
	unsigned long *kernel_vaddr = NULL;
	int ret = 0;
	ion_phys_addr_t phyaddr = 0;
	size_t len = 0;
	int rc = 0;
	DBG_PMEM("\n%s() IN: Requested alloc size(%u)", __func__, (u32)sz);
	if (!addr) {
		DDL_MSG_ERROR("\n%s() Invalid Parameters", __func__);
		goto bail_out;
	}
	ddl_context = ddl_get_context();
	res_trk_set_mem_type(addr->mem_type);
	alloc_size = (sz + alignment);
	if (res_trk_get_enable_ion()) {
		if (!ddl_context->video_ion_client)
			ddl_context->video_ion_client =
				res_trk_get_ion_client();
		if (!ddl_context->video_ion_client) {
			DDL_MSG_ERROR("%s() :DDL ION Client Invalid handle\n",
						 __func__);
			goto bail_out;
		}
		alloc_size = (alloc_size+4095) & ~4095;
		addr->alloc_handle = ion_alloc(
		ddl_context->video_ion_client, alloc_size, SZ_4K,
			res_trk_get_mem_type(), res_trk_get_ion_flags());
		if (IS_ERR_OR_NULL(addr->alloc_handle)) {
			DDL_MSG_ERROR("%s() :DDL ION alloc failed\n",
						 __func__);
			goto bail_out;
		}
		kernel_vaddr = (unsigned long *) ion_map_kernel(
					ddl_context->video_ion_client,
					addr->alloc_handle);
		if (IS_ERR_OR_NULL(kernel_vaddr)) {
				DDL_MSG_ERROR("%s() :DDL ION map failed\n",
							 __func__);
				goto free_ion_alloc;
		}
		addr->virtual_base_addr = (u8 *) kernel_vaddr;
		if (res_trk_check_for_sec_session()) {
			rc = ion_phys(ddl_context->video_ion_client,
				addr->alloc_handle, &phyaddr,
					&len);
			if (rc || !phyaddr) {
				DDL_MSG_ERROR(
				"%s():DDL ION client physical failed\n",
				__func__);
				goto unmap_ion_alloc;
			}
			addr->alloced_phys_addr = phyaddr;
		} else {
			ret = ion_map_iommu(ddl_context->video_ion_client,
					addr->alloc_handle,
					VIDEO_DOMAIN,
					VIDEO_MAIN_POOL,
					SZ_4K,
					0,
					&iova,
					&buffer_size,
					0, 0);
			if (ret || !iova) {
				DDL_MSG_ERROR(
				"%s():DDL ION ion map iommu failed, ret = %d iova = 0x%lx\n",
					__func__, ret, iova);
				goto unmap_ion_alloc;
			}
			addr->alloced_phys_addr = (phys_addr_t) iova;
		}
		if (!addr->alloced_phys_addr) {
			DDL_MSG_ERROR("%s():DDL ION client physical failed\n",
						__func__);
			goto unmap_ion_alloc;
		}
		addr->mapped_buffer = NULL;
		addr->physical_base_addr = (u8 *) addr->alloced_phys_addr;
		addr->align_physical_addr = (u8 *) DDL_ALIGN((u32)
			addr->physical_base_addr, alignment);
		offset = (u32)(addr->align_physical_addr -
				addr->physical_base_addr);
		addr->align_virtual_addr = addr->virtual_base_addr + offset;
		addr->buffer_size = alloc_size;
	} else {
		pr_err("ION must be enabled.");
		goto bail_out;
	}
	return addr->virtual_base_addr;
unmap_ion_alloc:
	ion_unmap_kernel(ddl_context->video_ion_client,
		addr->alloc_handle);
	addr->virtual_base_addr = NULL;
	addr->alloced_phys_addr = (phys_addr_t)NULL;
free_ion_alloc:
	ion_free(ddl_context->video_ion_client,
		addr->alloc_handle);
	addr->alloc_handle = NULL;
bail_out:
	return NULL;
}
static u32 ddl_decoder_seq_done_callback(struct ddl_context *ddl_context,
	struct ddl_client_context *ddl)
{
	struct ddl_decoder_data *decoder = &ddl->codec_data.decoder;
	struct vidc_1080p_seq_hdr_info seq_hdr_info;
	u32 process_further = true;

	DDL_MSG_MED("ddl_decoder_seq_done_callback");
	if (!ddl->decoding ||
		!DDLCLIENT_STATE_IS(ddl, DDL_CLIENT_WAIT_FOR_INITCODECDONE)) {
		DDL_MSG_ERROR("STATE-CRITICAL-HDDONE");
		ddl_client_fatal_cb(ddl);
	} else {
		ddl->cmd_state = DDL_CMD_INVALID;
		DDL_MSG_LOW("ddl_state_transition: %s ~~>"
			"DDL_CLIENT_WAIT_FOR_DPB",
			ddl_get_state_string(ddl->client_state));
		ddl->client_state = DDL_CLIENT_WAIT_FOR_DPB;
		DDL_MSG_LOW("HEADER_DONE");
		vidc_1080p_get_decode_seq_start_result(&seq_hdr_info);
		parse_hdr_size_data(ddl, &seq_hdr_info);
		if (!seq_hdr_info.img_size_x || !seq_hdr_info.img_size_y) {
			DDL_MSG_ERROR("FATAL:ZeroImageSize");
			ddl_client_fatal_cb(ddl);
			return process_further;
		}
		vidc_sm_get_profile_info(&ddl->shared_mem
			[ddl->command_channel],
			&seq_hdr_info.profile, &seq_hdr_info.level);
		ddl_get_dec_profile_level(decoder, seq_hdr_info.profile,
			seq_hdr_info.level);
		ddl_calculate_stride(&decoder->frame_size,
			!decoder->progressive_only);
		decoder->frame_size.scan_lines =
		DDL_ALIGN(decoder->frame_size.height, DDL_TILE_ALIGN_HEIGHT);
		decoder->frame_size.stride =
		DDL_ALIGN(decoder->frame_size.width, DDL_TILE_ALIGN_WIDTH);
		parse_hdr_crop_data(ddl, &seq_hdr_info);
		if (decoder->codec.codec == VCD_CODEC_H264 &&
			seq_hdr_info.level > VIDC_1080P_H264_LEVEL4) {
			DDL_MSG_ERROR("WARNING: H264MaxLevelExceeded : %d",
				seq_hdr_info.level);
		}
		ddl_set_default_decoder_buffer_req(decoder, false);
		if (decoder->header_in_start) {
			decoder->client_frame_size = decoder->frame_size;
			decoder->client_output_buf_req =
				decoder->actual_output_buf_req;
			if ((decoder->frame_size.width *
				decoder->frame_size.height) >=
				 VCD_DDL_WVGA_BUF_SIZE) {
				if ((decoder->actual_output_buf_req.\
					actual_count + 2) < 10)
					decoder->client_output_buf_req.\
						actual_count = 10;
				else
					decoder->client_output_buf_req.\
						actual_count += 2;
			} else
				decoder->client_output_buf_req.\
					actual_count = decoder->\
					actual_output_buf_req.\
					actual_count + 5;
			decoder->client_input_buf_req =
				decoder->actual_input_buf_req;
			ddl_context->ddl_callback(VCD_EVT_RESP_START,
				VCD_S_SUCCESS, NULL, 0, (u32 *) ddl,
				ddl->client_data);
			ddl_release_command_channel(ddl_context,
				ddl->command_channel);
		} else {
			u32 seq_hdr_only_frame = false;
			u32 need_reconfig = true;
			struct vcd_frame_data *input_vcd_frm =
				&ddl->input_frame.vcd_frm;

			if ((input_vcd_frm->flags &
				  VCD_FRAME_FLAG_EOS) ||
				((decoder->frame_size.width ==
				decoder->client_frame_size.width) &&
				(decoder->frame_size.height ==
				decoder->client_frame_size.height) &&
				(decoder->actual_output_buf_req.sz <=
				decoder->client_output_buf_req.sz) &&
				(decoder->actual_output_buf_req.actual_count ==
				 decoder->client_output_buf_req.actual_count) &&
				(decoder->frame_size.scan_lines ==
				decoder->client_frame_size.scan_lines) &&
				(decoder->frame_size.stride ==
				 decoder->client_frame_size.stride)))
					need_reconfig = false;
			if (((input_vcd_frm->flags &
				VCD_FRAME_FLAG_CODECCONFIG) &&
				(!(input_vcd_frm->flags &
				VCD_FRAME_FLAG_SYNCFRAME))) ||
				input_vcd_frm->data_len <=
				seq_hdr_info.dec_frm_size) {
				seq_hdr_only_frame = true;
				input_vcd_frm->offset +=
					seq_hdr_info.dec_frm_size;
				input_vcd_frm->data_len = 0;
				input_vcd_frm->flags |=
					VCD_FRAME_FLAG_CODECCONFIG;
				ddl->input_frame.frm_trans_end =
					!need_reconfig;
				ddl_context->ddl_callback(
					VCD_EVT_RESP_INPUT_DONE,
					VCD_S_SUCCESS, &ddl->input_frame,
					sizeof(struct ddl_frame_data_tag),
					(u32 *) ddl, ddl->client_data);
			} else {
				if (decoder->codec.codec ==
					VCD_CODEC_VC1_RCV) {
					vidc_sm_set_start_byte_number(
						&ddl->shared_mem
						[ddl->command_channel],
						seq_hdr_info.dec_frm_size);
				}
			}
			if (need_reconfig) {
				struct ddl_frame_data_tag *payload =
					&ddl->input_frame;
				u32 payload_size =
					sizeof(struct ddl_frame_data_tag);

				decoder->client_frame_size =
					decoder->frame_size;
				decoder->client_output_buf_req =
					decoder->actual_output_buf_req;
				decoder->client_input_buf_req =
					decoder->actual_input_buf_req;
				if (seq_hdr_only_frame) {
					payload = NULL;
					payload_size = 0;
				}
				ddl_context->ddl_callback(
					VCD_EVT_IND_OUTPUT_RECONFIG,
					VCD_S_SUCCESS, payload,
					payload_size, (u32 *) ddl,
					ddl->client_data);
			}
			if (!need_reconfig && !seq_hdr_only_frame) {
				if (!ddl_vidc_decode_set_buffers(ddl))
					process_further = false;
				else {
					DDL_MSG_ERROR("ddl_vidc_decode_set_"
						"buffers failed");
					ddl_client_fatal_cb(ddl);
				}
			} else
				ddl_release_command_channel(ddl_context,
					ddl->command_channel);
		}
	}
	return process_further;
}