Exemplo n.º 1
0
static int sun4i_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct sun4i_runtime_data *prtd = substream->runtime->private_data;
	struct dma_hw_conf codec_dma_conf;
	int ret = 0;
 
	if (!prtd->params)
		return 0;
	
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK){
		codec_dma_conf.drqsrc_type  = DRQ_TYPE_SDRAM;
		codec_dma_conf.drqdst_type  = DRQ_TYPE_IIS;
		codec_dma_conf.xfer_type    = DMAXFER_D_BHALF_S_BHALF;
		codec_dma_conf.address_type = DMAADDRT_D_FIX_S_INC;
		codec_dma_conf.dir          = SW_DMA_WDEV;
		codec_dma_conf.reload       = 0;
		codec_dma_conf.hf_irq       = SW_DMA_IRQ_FULL;
		codec_dma_conf.from         = prtd->dma_start;
		codec_dma_conf.to           = prtd->params->dma_addr;
		ret = sw_dma_config(prtd->params->channel, &codec_dma_conf);
	}
   
	/* flush the DMA channel */
	sw_dma_ctrl(prtd->params->channel, SW_DMAOP_FLUSH);
	prtd->dma_loaded = 0;
	prtd->dma_pos = prtd->dma_start;

	/* enqueue dma buffers */
	sun4i_pcm_enqueue(substream);

	return ret;	
}
static int sun7i_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct sun7i_runtime_data *prtd = substream->runtime->private_data;
	dma_config_t spdif_dma_conf;
	int ret = 0;

	if (!prtd->params)
		return 0;
		
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK){
		//spdif_dma_conf.drqsrc_type  = DRQ_TYPE_SDRAM;
		//spdif_dma_conf.drqdst_type  = DRQ_TYPE_SPDIF;
		/* config para */
		memset(&spdif_dma_conf, 0, sizeof(spdif_dma_conf));
		spdif_dma_conf.xfer_type.src_data_width = DATA_WIDTH_16BIT;
		spdif_dma_conf.xfer_type.src_bst_len = DATA_BRST_4;
		spdif_dma_conf.xfer_type.dst_data_width = DATA_WIDTH_16BIT;
		spdif_dma_conf.xfer_type.dst_bst_len = DATA_BRST_4;
		spdif_dma_conf.address_type.src_addr_mode = NDMA_ADDR_INCREMENT;
		spdif_dma_conf.address_type.dst_addr_mode = NDMA_ADDR_NOCHANGE;
		if (substream->runtime->channels== 4) {
			printk("%s, line:%d\n", __func__, __LINE__);
			spdif_dma_conf.bconti_mode = true;
			spdif_dma_conf.irq_spt = CHAN_IRQ_FD|CHAN_IRQ_HD;
			strcpy(substream->pcm->card->id, "sndspdifraw");
			printk("%s, line:%d, substream->pcm->card->id:%s\n", __func__, __LINE__, substream->pcm->card->id);
		} else {
			printk("%s, line:%d\n", __func__, __LINE__);
		spdif_dma_conf.bconti_mode = false;
		spdif_dma_conf.irq_spt = CHAN_IRQ_FD;
		strcpy(substream->pcm->card->id, "sndspdif");
		}
		spdif_dma_conf.src_drq_type = N_SRC_SDRAM;
		spdif_dma_conf.dst_drq_type = N_DST_SPDIF_TX;//DRQDST_SPDIFTX;

		if (0 != sw_dma_config(prtd->dma_hdl, &spdif_dma_conf)) {
			printk("err:%s,line:%d\n", __func__, __LINE__);
			return -EINVAL;
		}
	} else {
		return -EINVAL;
	}
	/* flush the DMA channel */
	//sw_dma_ctrl(prtd->params->channel, SW_DMAOP_FLUSH);
	prtd->dma_loaded = 0;
	//prtd->dma_pos = prtd->dma_start;

	/* enqueue dma buffers */
	sun7i_pcm_enqueue(substream);

	return ret;	
}
static int sun7i_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct sun7i_runtime_data *prtd = substream->runtime->private_data;
	dma_config_t codec_dma_conf;
	int ret = 0;
	//printk("pcm:::%s,line:%d\n", __func__, __LINE__);
	if (!prtd->params)
		return 0;
		
   	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK){
		memset(&codec_dma_conf, 0, sizeof(codec_dma_conf));
		codec_dma_conf.xfer_type.src_data_width 	= DATA_WIDTH_32BIT;
		codec_dma_conf.xfer_type.src_bst_len 	= DATA_BRST_4;
		codec_dma_conf.xfer_type.dst_data_width 	= DATA_WIDTH_32BIT;
		codec_dma_conf.xfer_type.dst_bst_len 	= DATA_BRST_4;
		codec_dma_conf.address_type.src_addr_mode 	= DDMA_ADDR_LINEAR;
		codec_dma_conf.address_type.dst_addr_mode 	= DDMA_ADDR_IO;
		codec_dma_conf.src_drq_type 	= D_SRC_SDRAM;
		codec_dma_conf.dst_drq_type 	= D_DST_HDMI_AUD;
		codec_dma_conf.bconti_mode 		= false;
		codec_dma_conf.irq_spt 		= CHAN_IRQ_FD;
		if(0 != sw_dma_config(prtd->dma_hdl, &codec_dma_conf)) {
			printk("err:%s,line:%d\n", __func__, __LINE__);
			return -EINVAL;
			return -EINVAL;
		}
		/*	dma_para_t para;
			para.src_blk_sz 	= 0;
			para.src_wait_cyc 	= 0;
			para.dst_blk_sz 	= 0;
			para.dst_wait_cyc 	= 0;*/
			u32 tmp = 0x1F071F07;
			if(0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_SET_PARA_REG, &tmp)) {
				
				return -EINVAL;
			}
			
		}else{
		return -EINVAL;
		}
		
	/* flush the DMA channel */
	/*sw_dma_ctrl(prtd->params->channel, SW_DMAOP_FLUSH);*/
	prtd->dma_loaded = 0;
	/*prtd->dma_pos = prtd->dma_start;*/
	/* enqueue dma buffers */
	sun7i_pcm_enqueue(substream);

	return ret;	
}
Exemplo n.º 4
0
static int sun4i_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct sun4i_runtime_data *prtd = substream->runtime->private_data;
	struct dma_hw_conf codec_dma_conf;
	int ret = 0;
		
//	codec_dma_conf = kmalloc(sizeof(struct dma_hw_conf), GFP_KERNEL);
//	if (!codec_dma_conf)   
//	{
//	   ret =  - ENOMEM;
//	   return ret;
//	}
	if (!prtd->params)
		return 0;
		
   if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK){
			    codec_dma_conf.drqsrc_type  = DRQ_TYPE_SDRAM;
				codec_dma_conf.drqdst_type  = DRQ_TYPE_HDMIAUDIO;
				codec_dma_conf.xfer_type    = DMAXFER_D_BWORD_S_BWORD;
				codec_dma_conf.address_type = DMAADDRT_D_IO_S_LN;
				codec_dma_conf.dir          = SW_DMA_WDEV;
				codec_dma_conf.reload       = 0;
				codec_dma_conf.hf_irq       = SW_DMA_IRQ_FULL;
				codec_dma_conf.from         = prtd->dma_start;
				codec_dma_conf.to           = prtd->params->dma_addr;
				codec_dma_conf.cmbk		 	= 0x1F071F07;
			  ret = sw_dma_config(prtd->params->channel, &codec_dma_conf);
	}
   
	/* flush the DMA channel */
	sw_dma_ctrl(prtd->params->channel, SW_DMAOP_FLUSH);
	prtd->dma_loaded = 0;
	prtd->dma_pos = prtd->dma_start;

	/* enqueue dma buffers */
	sun4i_pcm_enqueue(substream);

	return ret;	
}
Exemplo n.º 5
0
/**
 * __dtc_chain_conti_mode - dma test case for DTC_CHAIN_CONTI_MOD
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_chain_conti_mode(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_conti_mode;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para 	= 0;
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= usrc_paddr;
	dma_config.dst_addr 	= udst_paddr;
	//dma_config.byte_cnt 	= DTC_ONE_LEN;
	dma_config.byte_cnt 	= DTC_TOTAL_LEN; /* only enqueue one buf */
	dma_config.bconti_mode 	= true;
	dma_config.src_drq_type 	= DRQSRC_SDRAM;
	dma_config.dst_drq_type 	= DRQDST_SDRAM;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	/* let callback go on enqueueing... */
	msleep(8000);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	/* stop and free dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s, success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	pr_info("%s, end!\n", __func__);
	return uret;
}
Exemplo n.º 6
0
/**
 * __dtc_many_enq - dma test case for DTC_1TM2M_MANY_ENQ
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_many_enq(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_many_enq;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type = DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para = 0;
	dma_config.irq_spt = CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr = usrc_paddr;
	dma_config.dst_addr = udst_paddr;
	dma_config.byte_cnt = DTC_ONE_LEN;
	dma_config.bconti_mode = false;
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}

	/* normal enqueue and callback enqueue simutanously */
	{
		u32 	ucur_cnt = 0, ucur_saddr = 0, ucur_daddr = 0;
		u32	uloop_cnt = DTC_TOTAL_LEN / DTC_ONE_LEN;

		while((ucur_cnt = atomic_add_return(1, &g_acur_cnt)) < uloop_cnt) {
			pr_info("%s, line %d, ucur_cnt %d\n", __func__, __LINE__, ucur_cnt);
			ucur_saddr = g_src_addr + ucur_cnt * DTC_ONE_LEN;
			ucur_daddr = g_dst_addr + ucur_cnt * DTC_ONE_LEN;
			if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL))
				printk("%s err, line %d\n", __func__, __LINE__);

			/* in order to meet cb/normal enqueue simutanously */
			msleep(0);
		}
	}
	pr_info("%s, line %d\n", __func__, __LINE__);

	__waitdone_many_enq();

	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s, success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	pr_info("%s end!\n", __func__);
	return uret;
}
Exemplo n.º 7
0
u32 __dtc_stopcmd(void)
{
	u32 	uret = 0;
	u32 	i = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("case_stp_dma", DMA_WORK_MODE_CHAIN);
	//dma_hdl = sw_dma_request("case_stp_dma", DMA_WORK_MODE_SINGLE);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_stopcmd;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	dma_config.bconti_mode = false; /* must be 0, otherwise irq will come again and again */
	dma_config.xfer_type = DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN; /* change with dma type */
	dma_config.irq_spt = CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr = usrc_paddr;
	dma_config.dst_addr = udst_paddr;
	dma_config.byte_cnt = DTC_ONE_LEN;
	dma_config.para = 0; /* to check here */
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);

	atomic_set(&g_adma_done, 0);
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_start success\n", __func__);

	/* callback enqueue and normal enqueue simutanously */
	i = 0;
	while(i++ < 100) {
		u32 	ucur_saddr = 0, ucur_daddr = 0;
		u32	uindex = 0;

		get_random_bytes(&uindex, sizeof(uindex));
		uindex  %= (DTC_TOTAL_LEN / DTC_ONE_LEN);
		ucur_saddr = g_src_addr + uindex * DTC_ONE_LEN;
		ucur_daddr = g_dst_addr + uindex * DTC_ONE_LEN;
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
	}

#if 0
	if(0 != __waitdone_stopcmd()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_stopcmd sucess\n", __func__);
#endif
	sw_dma_dump_chan(dma_hdl);

	/* stop and release dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		//uret = __LINE__; /* we donnot need data ok, just test stop cmd */
	}

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	return uret;
}
Exemplo n.º 8
0
int wemac_dma_config_start(__u8 rw, void * buff_addr, __u32 len)
{
	int ret;
	if(rw == 0){
		struct dma_hw_conf emac_hwconf = {
			.xfer_type = DMAXFER_D_SWORD_S_SWORD,
			.hf_irq = SW_DMA_IRQ_FULL,
			.cmbk = 0x03030303,
			.dir = SW_DMA_RDEV,
			.from = 0x01C0B04C,
			.address_type = DMAADDRT_D_LN_S_IO,
			.drqsrc_type = DRQ_TYPE_EMAC
		};

		ret = sw_dma_setflags(ch_rx, SW_DMAF_AUTOSTART);
		if(ret!=0)
			return ret;
		ret = sw_dma_config(ch_rx, &emac_hwconf);
		if(ret!=0)
			return ret;
		ret = emacrx_DMAEqueueBuf(ch_rx, buff_addr, len);
		if(ret!=0)
			return ret;
		ret = sw_dma_ctrl(ch_rx, SW_DMAOP_START);
	} else {
		struct dma_hw_conf emac_hwconf = {
			.xfer_type = DMAXFER_D_SWORD_S_SWORD,
			.hf_irq = SW_DMA_IRQ_FULL,
			.cmbk = 0x03030303,
			.dir = SW_DMA_WDEV,
			.to = 0x01C0B024,
			.address_type = DMAADDRT_D_IO_S_LN,
			.drqdst_type = DRQ_TYPE_EMAC
		};

		ret = sw_dma_setflags(ch_tx, SW_DMAF_AUTOSTART);
		if(ret!=0)
			return ret;
		ret = sw_dma_config(ch_tx, &emac_hwconf);
		if(ret!=0)
			return ret;
		ret = emactx_DMAEqueueBuf(ch_tx, buff_addr, len);
		if(ret!=0)
			return ret;
		ret = sw_dma_ctrl(ch_tx, SW_DMAOP_START);
	}
	return 0;
}

__s32 emacrx_WaitDmaFinish(void)
{
	unsigned long flags;
	int i;

	while(1){
		local_irq_save(flags);
		if (emacrx_dma_completed_flag){
			local_irq_restore(flags);
			break;
		}
		for(i=0;i<1000;i++);
		local_irq_restore(flags);
	}

	return 0;
}
Exemplo n.º 9
0
u32 __dtc_case_enq_aftdone(void)
{
	u32 	uret = 0;
	u32 	i = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	src_paddr = 0, dst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&src_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, src_paddr 0x%08x\n", __func__, (u32)src_vaddr, src_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&dst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, dst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, dst_paddr);

	/* init src buffer */
	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	/* init loop para */
	atomic_set(&g_acur_cnt, 0);
	g_src_addr = src_paddr;
	g_dst_addr = dst_paddr;

	/* request dma channel */
	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_case_enq_aftdone;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	/* enqueue buffer */
	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	//dma_config.conti_mode = 1;
	dma_config.bconti_mode 	= false; /* must be 0, otherwise irq will come again and again */
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN; /* change with dma type */
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= src_paddr;
	dma_config.dst_addr 	= dst_paddr;
	dma_config.byte_cnt 	= DTC_ONE_LEN;
	dma_config.para 	= 0;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_start success\n", __func__);

	if(0 != __waitdone_case_enq_aftdone()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_case_enq_aftdone sucess\n", __func__);

	/* after done. app and fd_cb enqueue simutanously */
	i = 0;
	while(i++ < 30) {
		u32 ucur_saddr = 0, ucur_daddr = 0;

		pr_info("%s: i %d\n", __func__, i);
		ucur_saddr = g_src_addr + 0 * DTC_ONE_LEN;
		ucur_daddr = g_dst_addr + 0 * DTC_ONE_LEN;
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		msleep(1);
	}
	msleep(2000);

	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	/* print err line */
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		}
		if(0 != sw_dma_release(dma_hdl)) {
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		}
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, src_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, dst_paddr);

	return uret;
}
Exemplo n.º 10
0
/**
 * __dtc_chain_mode - dma test case for chain mode
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_chain_mode(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	src_paddr = 0, dst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&src_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, src_paddr 0x%08x\n", __func__, (u32)src_vaddr, src_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&dst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, dst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, dst_paddr);

	/* init src buffer */
	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	/* init loop para */
	atomic_set(&g_acur_cnt, 0);
	g_src_addr = src_paddr;
	g_dst_addr = dst_paddr;

	/* request dma channel */
	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set queue done callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	/* set full done callback */
	done_cb.func = __cb_fd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	/* set half done callback */
	done_cb.func = __cb_hd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	/* set operation done callback */
	op_cb.func = __cb_op_chain;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	/* set config para */
	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para 	= 0;
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= src_paddr;
	dma_config.dst_addr 	= dst_paddr;
	dma_config.byte_cnt 	= DTC_ONE_LEN;
	//dma_config.conti_mode = 1;
	dma_config.bconti_mode = false;
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	/* enqueue buffer */
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	/* dump chain */
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}

	/* enqueue other buffer, with callback enqueue simutanously */
	{
		u32 	ucur_cnt = 0, ucur_saddr = 0, ucur_daddr = 0;
		u32	uloop_cnt = DTC_TOTAL_LEN / DTC_ONE_LEN;
		while((ucur_cnt = atomic_add_return(1, &g_acur_cnt)) < uloop_cnt) {
			ucur_saddr = g_src_addr + ucur_cnt * DTC_ONE_LEN;
			ucur_daddr = g_dst_addr + ucur_cnt * DTC_ONE_LEN;
			if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL))
				printk("%s err, line %d\n", __func__, __LINE__);
		}
	}
	pr_info("%s, line %d\n", __func__, __LINE__);

	/* wait dma done */
	if(0 != __waitdone_chain()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_chain sucess\n", __func__);

	/*
	 * NOTE: must sleep here, becase when __waitdone_chain return, buffer enqueue complete, but
	 * data might not transfer complete, 2012-11-14
	 */
	msleep(1000);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	/* stop and release dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret); /* print err line */
	else
		pr_info("%s, success!\n", __func__);

	/* stop and free dma channel, if need */
	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}
	pr_err("%s, line %d!\n", __func__, __LINE__);

	/* free dma memory */
	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, src_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, dst_paddr);

	pr_err("%s, end!\n", __func__);
	return uret;
}
static int sun7i_pcm_prepare(struct snd_pcm_substream *substream)
{	
	
	dma_config_t codec_play_dma_conf;
	dma_config_t codec_capture_dma_conf;
	int play_ret = 0, capture_ret = 0;
	struct sun7i_playback_runtime_data *play_prtd = NULL;
	struct sun7i_capture_runtime_data *capture_prtd = NULL;
	/*pr_info("sun7i_i2sdma.c::substream->stream%d)\n",substream->stream);*/
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
		play_prtd = substream->runtime->private_data;
		if (!play_prtd->params) {
			return 0;
			}
			/*pr_info("play:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);*/

			memset(&codec_play_dma_conf, 0, sizeof(codec_play_dma_conf));		
			codec_play_dma_conf.xfer_type.src_data_width 	= DATA_WIDTH_16BIT;		
			codec_play_dma_conf.xfer_type.src_bst_len 	= DATA_BRST_1;		
			codec_play_dma_conf.xfer_type.dst_data_width 	= DATA_WIDTH_16BIT;		
			codec_play_dma_conf.xfer_type.dst_bst_len 	= DATA_BRST_1;		
			codec_play_dma_conf.address_type.src_addr_mode 	= NDMA_ADDR_INCREMENT;		
			codec_play_dma_conf.address_type.dst_addr_mode 	= NDMA_ADDR_NOCHANGE;		
			codec_play_dma_conf.src_drq_type 	= N_SRC_SDRAM;		
			codec_play_dma_conf.dst_drq_type 	= N_DST_IIS0_TX;	
			codec_play_dma_conf.bconti_mode 		= false;		
			codec_play_dma_conf.irq_spt 		=  CHAN_IRQ_FD;	
			if(0 !=(play_ret = sw_dma_config(play_prtd->dma_hdl, &codec_play_dma_conf))) {
				printk("err:%s,line:%d\n", __func__, __LINE__);
				return -EINVAL;
				}
		

		/* flush the DMA channel */
		
		play_prtd->dma_loaded = 0;
		play_prtd->dma_pos = play_prtd->dma_start;
		/* enqueue dma buffers */
		sun7i_pcm_enqueue(substream);
		
		return play_ret;
	} else {
		capture_prtd = substream->runtime->private_data;
		
		if (!capture_prtd->params) {
			return 0;
		}
		/*pr_info("CAPTUR:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);*/
		memset(&codec_capture_dma_conf, 0, sizeof(codec_capture_dma_conf));		
		codec_capture_dma_conf.xfer_type.src_data_width 	= DATA_WIDTH_16BIT;		
		codec_capture_dma_conf.xfer_type.src_bst_len 	= DATA_BRST_1;		
		codec_capture_dma_conf.xfer_type.dst_data_width 	= DATA_WIDTH_16BIT;		
		codec_capture_dma_conf.xfer_type.dst_bst_len 	= DATA_BRST_1;		
		codec_capture_dma_conf.address_type.src_addr_mode 	= NDMA_ADDR_NOCHANGE;		
		codec_capture_dma_conf.address_type.dst_addr_mode 	= NDMA_ADDR_INCREMENT;		
		codec_capture_dma_conf.src_drq_type 	= N_SRC_IIS0_RX;		
		codec_capture_dma_conf.dst_drq_type 	= N_DST_SDRAM;		
		codec_capture_dma_conf.bconti_mode 		= false;		
		codec_capture_dma_conf.irq_spt 		=  CHAN_IRQ_FD;
		
		//capture_ret = sw_dma_config(capture_dma_config->dma_hdl, &capture_dma_config);
		if(0!=(capture_ret = sw_dma_config(capture_prtd->dma_hdl, &codec_capture_dma_conf))){
		printk("err:%s,line:%d\n", __func__, __LINE__);
			return -EINVAL;
			}
		

		/* flush the DMA channel */
		
		capture_prtd->dma_loaded = 0;
		capture_prtd->dma_pos = capture_prtd->dma_start;
		
		/* enqueue dma buffers */
		sun7i_pcm_enqueue(substream);
		
		return capture_ret;
}
}