static int sun7i_pcm_hw_free(struct snd_pcm_substream *substream)
{
	struct sun7i_runtime_data *prtd = substream->runtime->private_data;
	

  	
	snd_pcm_set_runtime_buffer(substream, NULL);
  
	if (prtd->params) {
			/*
		 * stop play dma transfer
		 */
		if (0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			return -EINVAL;
		}
		/*
		*	release play dma handle
		*/
		if (0 != sw_dma_release(prtd->dma_hdl)) {
			return -EINVAL;
		}
		prtd->dma_hdl = (dma_hdl_t)NULL;
		prtd->params = NULL;
	}

	return 0;
}
static int sun7i_pcm_trigger(struct snd_pcm_substream *substream, int cmd)
{
	struct sun7i_runtime_data *prtd = substream->runtime->private_data;
	int ret ;
	spin_lock(&prtd->lock);

	switch (cmd) {
	case SNDRV_PCM_TRIGGER_START:
	case SNDRV_PCM_TRIGGER_RESUME:
	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:	
		printk("%s, line:%d\n", __func__, __LINE__);
	    /*
		* start dma transfer
		*/
		if (0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_START, NULL)) {
			printk("%s err, dma start err\n", __FUNCTION__);
			return -EINVAL;
		}
		break;
		
	case SNDRV_PCM_TRIGGER_SUSPEND:
	case SNDRV_PCM_TRIGGER_STOP:
	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
		printk("%s, line:%d\n", __func__, __LINE__);
		/*
		* stop play dma transfer
		*/
		if (0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			printk("%s err, dma stop err\n", __FUNCTION__);
			return -EINVAL;
		}
		strcpy(substream->pcm->card->id, "sndspdif");
		printk("%s, line:%d\n", __func__, __LINE__);
		break;

	default:
		ret = -EINVAL;
		break;
	}

	spin_unlock(&prtd->lock);
	return 0;
}
static int sun7i_pcm_hw_params(struct snd_pcm_substream *substream,
	struct snd_pcm_hw_params *params)
{
	struct snd_pcm_runtime *runtime = substream->runtime;
	struct sun7i_runtime_data *prtd = runtime->private_data;
	struct snd_soc_pcm_runtime *rtd = substream->private_data;
	unsigned long totbytes = params_buffer_bytes(params);
	struct sun7i_dma_params *dma = 
					snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);

	
	if (!dma)
		return 0;
		
	if (prtd->params == NULL) {
		prtd->params = dma;
			/*
		 * requeset audio dma handle(we don't care about the channel!)
		 */
		prtd->dma_hdl = sw_dma_request(prtd->params->name, CHAN_DEDICATE);
		if (NULL == prtd->dma_hdl) {
			printk(KERN_ERR "failed to request spdif dma handle\n");
			return -EINVAL;
		}
	}

		/*
	* set callback
	*/
	memset(&prtd->play_done_cb, 0, sizeof(prtd->play_done_cb));
	prtd->play_done_cb.func = sun7i_audio_buffdone;
	prtd->play_done_cb.parg = substream;
	/*use the full buffer callback, maybe we should use the half buffer callback?*/
	if (0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_SET_FD_CB, (void *)&(prtd->play_done_cb))) {
		printk(KERN_ERR "failed to set dma buffer done!!!\n");
		sw_dma_release(prtd->dma_hdl);
		return -EINVAL;
	}
		
	snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);

	runtime->dma_bytes = totbytes;

	spin_lock_irq(&prtd->lock);
	prtd->dma_loaded = 0;
	prtd->dma_limit = runtime->hw.periods_min;
	prtd->dma_period = params_period_bytes(params);
	prtd->dma_start = runtime->dma_addr;
	prtd->dma_pos = prtd->dma_start;
	prtd->dma_end = prtd->dma_start + totbytes;
	spin_unlock_irq(&prtd->lock);
	return 0;
}
static int sun7i_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct sun7i_runtime_data *prtd = substream->runtime->private_data;
	dma_config_t codec_dma_conf;
	int ret = 0;
	//printk("pcm:::%s,line:%d\n", __func__, __LINE__);
	if (!prtd->params)
		return 0;
		
   	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK){
		memset(&codec_dma_conf, 0, sizeof(codec_dma_conf));
		codec_dma_conf.xfer_type.src_data_width 	= DATA_WIDTH_32BIT;
		codec_dma_conf.xfer_type.src_bst_len 	= DATA_BRST_4;
		codec_dma_conf.xfer_type.dst_data_width 	= DATA_WIDTH_32BIT;
		codec_dma_conf.xfer_type.dst_bst_len 	= DATA_BRST_4;
		codec_dma_conf.address_type.src_addr_mode 	= DDMA_ADDR_LINEAR;
		codec_dma_conf.address_type.dst_addr_mode 	= DDMA_ADDR_IO;
		codec_dma_conf.src_drq_type 	= D_SRC_SDRAM;
		codec_dma_conf.dst_drq_type 	= D_DST_HDMI_AUD;
		codec_dma_conf.bconti_mode 		= false;
		codec_dma_conf.irq_spt 		= CHAN_IRQ_FD;
		if(0 != sw_dma_config(prtd->dma_hdl, &codec_dma_conf)) {
			printk("err:%s,line:%d\n", __func__, __LINE__);
			return -EINVAL;
			return -EINVAL;
		}
		/*	dma_para_t para;
			para.src_blk_sz 	= 0;
			para.src_wait_cyc 	= 0;
			para.dst_blk_sz 	= 0;
			para.dst_wait_cyc 	= 0;*/
			u32 tmp = 0x1F071F07;
			if(0 != sw_dma_ctl(prtd->dma_hdl, DMA_OP_SET_PARA_REG, &tmp)) {
				
				return -EINVAL;
			}
			
		}else{
		return -EINVAL;
		}
		
	/* flush the DMA channel */
	/*sw_dma_ctrl(prtd->params->channel, SW_DMAOP_FLUSH);*/
	prtd->dma_loaded = 0;
	/*prtd->dma_pos = prtd->dma_start;*/
	/* enqueue dma buffers */
	sun7i_pcm_enqueue(substream);

	return ret;	
}
Пример #5
0
/**
 * __dtc_chain_conti_mode - dma test case for DTC_CHAIN_CONTI_MOD
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_chain_conti_mode(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_conti_mode;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_conti_mode;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para 	= 0;
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= usrc_paddr;
	dma_config.dst_addr 	= udst_paddr;
	//dma_config.byte_cnt 	= DTC_ONE_LEN;
	dma_config.byte_cnt 	= DTC_TOTAL_LEN; /* only enqueue one buf */
	dma_config.bconti_mode 	= true;
	dma_config.src_drq_type 	= DRQSRC_SDRAM;
	dma_config.dst_drq_type 	= DRQDST_SDRAM;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	/* let callback go on enqueueing... */
	msleep(8000);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	/* stop and free dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s, success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	pr_info("%s, end!\n", __func__);
	return uret;
}
Пример #6
0
/**
 * __dtc_many_enq - dma test case for DTC_1TM2M_MANY_ENQ
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_many_enq(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_many_enq;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_many_enq;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type = DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para = 0;
	dma_config.irq_spt = CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr = usrc_paddr;
	dma_config.dst_addr = udst_paddr;
	dma_config.byte_cnt = DTC_ONE_LEN;
	dma_config.bconti_mode = false;
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}

	/* normal enqueue and callback enqueue simutanously */
	{
		u32 	ucur_cnt = 0, ucur_saddr = 0, ucur_daddr = 0;
		u32	uloop_cnt = DTC_TOTAL_LEN / DTC_ONE_LEN;

		while((ucur_cnt = atomic_add_return(1, &g_acur_cnt)) < uloop_cnt) {
			pr_info("%s, line %d, ucur_cnt %d\n", __func__, __LINE__, ucur_cnt);
			ucur_saddr = g_src_addr + ucur_cnt * DTC_ONE_LEN;
			ucur_daddr = g_dst_addr + ucur_cnt * DTC_ONE_LEN;
			if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL))
				printk("%s err, line %d\n", __func__, __LINE__);

			/* in order to meet cb/normal enqueue simutanously */
			msleep(0);
		}
	}
	pr_info("%s, line %d\n", __func__, __LINE__);

	__waitdone_many_enq();

	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s, success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	pr_info("%s end!\n", __func__);
	return uret;
}
Пример #7
0
u32 __dtc_stopcmd(void)
{
	u32 	uret = 0;
	u32 	i = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	usrc_paddr = 0, udst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&usrc_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, usrc_paddr 0x%08x\n", __func__, (u32)src_vaddr, usrc_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&udst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, udst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, udst_paddr);

	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	atomic_set(&g_acur_cnt, 0);
	g_src_addr = usrc_paddr;
	g_dst_addr = udst_paddr;

	dma_hdl = sw_dma_request("case_stp_dma", DMA_WORK_MODE_CHAIN);
	//dma_hdl = sw_dma_request("case_stp_dma", DMA_WORK_MODE_SINGLE);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_stopcmd;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_stopcmd;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	dma_config.bconti_mode = false; /* must be 0, otherwise irq will come again and again */
	dma_config.xfer_type = DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN; /* change with dma type */
	dma_config.irq_spt = CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr = usrc_paddr;
	dma_config.dst_addr = udst_paddr;
	dma_config.byte_cnt = DTC_ONE_LEN;
	dma_config.para = 0; /* to check here */
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);

	atomic_set(&g_adma_done, 0);
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_start success\n", __func__);

	/* callback enqueue and normal enqueue simutanously */
	i = 0;
	while(i++ < 100) {
		u32 	ucur_saddr = 0, ucur_daddr = 0;
		u32	uindex = 0;

		get_random_bytes(&uindex, sizeof(uindex));
		uindex  %= (DTC_TOTAL_LEN / DTC_ONE_LEN);
		ucur_saddr = g_src_addr + uindex * DTC_ONE_LEN;
		ucur_daddr = g_dst_addr + uindex * DTC_ONE_LEN;
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
	}

#if 0
	if(0 != __waitdone_stopcmd()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_stopcmd sucess\n", __func__);
#endif
	sw_dma_dump_chan(dma_hdl);

	/* stop and release dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		//uret = __LINE__; /* we donnot need data ok, just test stop cmd */
	}

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, usrc_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, udst_paddr);

	return uret;
}
Пример #8
0
u32 __dtc_case_enq_aftdone(void)
{
	u32 	uret = 0;
	u32 	i = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	src_paddr = 0, dst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&src_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, src_paddr 0x%08x\n", __func__, (u32)src_vaddr, src_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&dst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, dst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, dst_paddr);

	/* init src buffer */
	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	/* init loop para */
	atomic_set(&g_acur_cnt, 0);
	g_src_addr = src_paddr;
	g_dst_addr = dst_paddr;

	/* request dma channel */
	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	done_cb.func = __cb_fd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	done_cb.func = __cb_hd_case_enq_aftdone;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	op_cb.func = __cb_op_case_enq_aftdone;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	/* enqueue buffer */
	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	//dma_config.conti_mode = 1;
	dma_config.bconti_mode 	= false; /* must be 0, otherwise irq will come again and again */
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN; /* change with dma type */
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= src_paddr;
	dma_config.dst_addr 	= dst_paddr;
	dma_config.byte_cnt 	= DTC_ONE_LEN;
	dma_config.para 	= 0;
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	sw_dma_dump_chan(dma_hdl);

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_start success\n", __func__);

	if(0 != __waitdone_case_enq_aftdone()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_case_enq_aftdone sucess\n", __func__);

	/* after done. app and fd_cb enqueue simutanously */
	i = 0;
	while(i++ < 30) {
		u32 ucur_saddr = 0, ucur_daddr = 0;

		pr_info("%s: i %d\n", __func__, i);
		ucur_saddr = g_src_addr + 0 * DTC_ONE_LEN;
		ucur_daddr = g_dst_addr + 0 * DTC_ONE_LEN;
		if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL)) {
			uret = __LINE__;
			goto end;
		}
		msleep(1);
	}
	msleep(2000);

	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	/* print err line */
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret);
	else
		pr_info("%s success!\n", __func__);

	if((dm_hdl_t)NULL != dma_hdl) {
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		}
		if(0 != sw_dma_release(dma_hdl)) {
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		}
	}

	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, src_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, dst_paddr);

	return uret;
}
Пример #9
0
/**
 * __dtc_chain_mode - dma test case for chain mode
 *
 * Returns 0 if success, the err line number if failed.
 */
u32 __dtc_chain_mode(void)
{
	u32 	uret = 0;
	void 	*src_vaddr = NULL, *dst_vaddr = NULL;
	u32 	src_paddr = 0, dst_paddr = 0;
	dm_hdl_t dma_hdl = (dm_hdl_t)NULL;
	struct dma_cb_t done_cb;
	struct dma_op_cb_t op_cb;
	struct dma_config_t dma_config;

	pr_info("%s enter\n", __func__);

	/* prepare the buffer and data */
	src_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&src_paddr, GFP_KERNEL);
	if(NULL == src_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: src_vaddr 0x%08x, src_paddr 0x%08x\n", __func__, (u32)src_vaddr, src_paddr);
	dst_vaddr = dma_alloc_coherent(NULL, DTC_TOTAL_LEN, (dma_addr_t *)&dst_paddr, GFP_KERNEL);
	if(NULL == dst_vaddr) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: dst_vaddr 0x%08x, dst_paddr 0x%08x\n", __func__, (u32)dst_vaddr, dst_paddr);

	/* init src buffer */
	get_random_bytes(src_vaddr, DTC_TOTAL_LEN);
	memset(dst_vaddr, 0x54, DTC_TOTAL_LEN);

	/* init loop para */
	atomic_set(&g_acur_cnt, 0);
	g_src_addr = src_paddr;
	g_dst_addr = dst_paddr;

	/* request dma channel */
	dma_hdl = sw_dma_request("m2m_dma", DMA_WORK_MODE_CHAIN);
	if(NULL == dma_hdl) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_request success, dma_hdl 0x%08x\n", __func__, (u32)dma_hdl);

	/* set queue done callback */
	memset(&done_cb, 0, sizeof(done_cb));
	memset(&op_cb, 0, sizeof(op_cb));
	done_cb.func = __cb_qd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_QD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set queuedone_cb success\n", __func__);
	/* set full done callback */
	done_cb.func = __cb_fd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_FD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set fulldone_cb success\n", __func__);
	/* set half done callback */
	done_cb.func = __cb_hd_chain;
	done_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_HD_CB, (void *)&done_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set halfdone_cb success\n", __func__);
	/* set operation done callback */
	op_cb.func = __cb_op_chain;
	op_cb.parg = NULL;
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_SET_OP_CB, (void *)&op_cb)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: set op_cb success\n", __func__);

	/* set config para */
	memset(&dma_config, 0, sizeof(dma_config));
	dma_config.xfer_type 	= DMAXFER_D_BWORD_S_BWORD;
	dma_config.address_type = DMAADDRT_D_LN_S_LN;
	dma_config.para 	= 0;
	dma_config.irq_spt 	= CHAN_IRQ_HD | CHAN_IRQ_FD | CHAN_IRQ_QD;
	dma_config.src_addr 	= src_paddr;
	dma_config.dst_addr 	= dst_paddr;
	dma_config.byte_cnt 	= DTC_ONE_LEN;
	//dma_config.conti_mode = 1;
	dma_config.bconti_mode = false;
	dma_config.src_drq_type = DRQSRC_SDRAM;
	dma_config.dst_drq_type = DRQDST_SDRAM;
	/* enqueue buffer */
	if(0 != sw_dma_config(dma_hdl, &dma_config, ENQUE_PHASE_NORMAL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_config success\n", __func__);
	/* dump chain */
	sw_dma_dump_chan(dma_hdl);

	/* start dma */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_START, NULL)) {
		uret = __LINE__;
		goto end;
	}

	/* enqueue other buffer, with callback enqueue simutanously */
	{
		u32 	ucur_cnt = 0, ucur_saddr = 0, ucur_daddr = 0;
		u32	uloop_cnt = DTC_TOTAL_LEN / DTC_ONE_LEN;
		while((ucur_cnt = atomic_add_return(1, &g_acur_cnt)) < uloop_cnt) {
			ucur_saddr = g_src_addr + ucur_cnt * DTC_ONE_LEN;
			ucur_daddr = g_dst_addr + ucur_cnt * DTC_ONE_LEN;
			if(0 != sw_dma_enqueue(dma_hdl, ucur_saddr, ucur_daddr, DTC_ONE_LEN, ENQUE_PHASE_NORMAL))
				printk("%s err, line %d\n", __func__, __LINE__);
		}
	}
	pr_info("%s, line %d\n", __func__, __LINE__);

	/* wait dma done */
	if(0 != __waitdone_chain()) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: __waitdone_chain sucess\n", __func__);

	/*
	 * NOTE: must sleep here, becase when __waitdone_chain return, buffer enqueue complete, but
	 * data might not transfer complete, 2012-11-14
	 */
	msleep(1000);

	/* check if data ok */
	if(0 == memcmp(src_vaddr, dst_vaddr, DTC_TOTAL_LEN))
		pr_info("%s: data check ok!\n", __func__);
	else {
		pr_err("%s: data check err!\n", __func__);
		uret = __LINE__; /* return err */
		goto end;
	}

	/* stop and release dma channel */
	if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL)) {
		uret = __LINE__;
		goto end;
	}
	pr_info("%s: sw_dma_stop success\n", __func__);
	if(0 != sw_dma_release(dma_hdl)) {
		uret = __LINE__;
		goto end;
	}
	dma_hdl = (dm_hdl_t)NULL;
	pr_info("%s: sw_dma_release success\n", __func__);

end:
	if(0 != uret)
		pr_err("%s err, line %d!\n", __func__, uret); /* print err line */
	else
		pr_info("%s, success!\n", __func__);

	/* stop and free dma channel, if need */
	if((dm_hdl_t)NULL != dma_hdl) {
		pr_err("%s, stop and release dma handle now!\n", __func__);
		if(0 != sw_dma_ctl(dma_hdl, DMA_OP_STOP, NULL))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
		if(0 != sw_dma_release(dma_hdl))
			pr_err("%s err, line %d!\n", __func__, __LINE__);
	}
	pr_err("%s, line %d!\n", __func__, __LINE__);

	/* free dma memory */
	if(NULL != src_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, src_vaddr, src_paddr);
	if(NULL != dst_vaddr)
		dma_free_coherent(NULL, DTC_TOTAL_LEN, dst_vaddr, dst_paddr);

	pr_err("%s, end!\n", __func__);
	return uret;
}
static int sun7i_pcm_trigger(struct snd_pcm_substream *substream, int cmd)
{
	int ret = 0;
	/*dump_stack();*/
	struct sun7i_playback_runtime_data *play_prtd = NULL;
	struct sun7i_capture_runtime_data *capture_prtd = NULL;
	/*printk("[IIS] func:%s(line:%d),substream->stream == SNDRV_PCM_STREAM_PLAYBACK::%d\n",
							__func__,__LINE__,substream->stream == SNDRV_PCM_STREAM_PLAYBACK);*/
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
		play_prtd = substream->runtime->private_data;
		spin_lock(&play_prtd->lock);
		/*printk("[IIS] func:%s(line:%d),cmd::%d\n",__func__,__LINE__,cmd);*/
		switch (cmd) {
		case SNDRV_PCM_TRIGGER_START:
		case SNDRV_PCM_TRIGGER_RESUME:
		case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
			//printk("play dma trigge start:sun7i-i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
			//printk("[IIS] 0x01c22400+0x24 = %#x, line= %d\n", readl(0xf1c22400+0x24), __LINE__);
			
			 /*
		* start dma transfer
		*/
		if (0 != sw_dma_ctl(play_prtd->dma_hdl, DMA_OP_START, NULL)) {
			printk("%s err, dma start err\n", __FUNCTION__);
			return -EINVAL;
		}
			break;
		case SNDRV_PCM_TRIGGER_SUSPEND:
		case SNDRV_PCM_TRIGGER_STOP:
		case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
	        //pr_info("play dma stop:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
	        //printk("[IIS] 0x01c22400+0x24 = %#x, line= %d\n", readl(0xf1c22400+0x24), __LINE__);
			
			/*
		* stop play dma transfer
		*/
		if (0 != sw_dma_ctl(play_prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			printk("%s err, dma stop err\n", __FUNCTION__);
			return -EINVAL;
		}
			break;
		default:
			ret = -EINVAL;
			break;
		}
		spin_unlock(&play_prtd->lock);
	} else {
		capture_prtd = substream->runtime->private_data;
		spin_lock(&capture_prtd->lock);

		switch (cmd) {
		case SNDRV_PCM_TRIGGER_START:
		case SNDRV_PCM_TRIGGER_RESUME:
		case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
			
			//pr_info("CAPTUR dma start:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
				 /*
		* start dma transfer
		*/
		if (0 != sw_dma_ctl(capture_prtd->dma_hdl, DMA_OP_START, NULL)) {
			printk("%s err, dma start err\n", __FUNCTION__);
			return -EINVAL;
		}
			break;
		case SNDRV_PCM_TRIGGER_SUSPEND:
		case SNDRV_PCM_TRIGGER_STOP:
		case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
	       // pr_info("CAPTUR dma stop:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
	        //printk("[IIS] 0x01c22400+0x24 = %#x, line= %d\n", readl(0xf1c22400+0x24), __LINE__);
		
				/*
		* stop play dma transfer
		*/
		if (0 != sw_dma_ctl(capture_prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			printk("%s err, dma stop err\n", __FUNCTION__);
			return -EINVAL;
		}
			break;
		default:
			ret = -EINVAL;
			break;
		}
		spin_unlock(&capture_prtd->lock);
	}
	return ret;
}
static int sun7i_pcm_hw_free(struct snd_pcm_substream *substream)
{
	struct sun7i_playback_runtime_data *play_prtd = NULL;
	struct sun7i_capture_runtime_data *capture_prtd = NULL;
	
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
 		play_prtd = substream->runtime->private_data;
 		/* TODO - do we need to ensure DMA flushed */
		if (play_prtd->params) {
	  	
		}
	
		snd_pcm_set_runtime_buffer(substream, NULL);

		if (play_prtd->params) {
			
				/*
		 * stop play dma transfer
		 */
		printk("play DMA_OP_STOP:sun7i-i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
		if (0 != sw_dma_ctl(play_prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			return -EINVAL;
		}
		/*
		*	release play dma handle
		*/
		if (0 != sw_dma_release(play_prtd->dma_hdl)) {
			return -EINVAL;
		}
		play_prtd->dma_hdl = (dma_hdl_t)NULL;
			play_prtd->params = NULL;
		}
   	} else if (substream->stream == SNDRV_PCM_STREAM_CAPTURE) {
   
		capture_prtd = substream->runtime->private_data;
   		/* TODO - do we need to ensure DMA flushed */
		if (capture_prtd->params) {
	  		
		}

		snd_pcm_set_runtime_buffer(substream, NULL);

		if (capture_prtd->params) {
		
					/*
		 * stop play dma transfer
		 */
		printk("CAPTURE DMA_OP_STOP:sun7i-i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
		if (0 != sw_dma_ctl(capture_prtd->dma_hdl, DMA_OP_STOP, NULL)) {
			return -EINVAL;
		}
		/*
		*	release play dma handle
		*/
		if (0 != sw_dma_release(capture_prtd->dma_hdl)) {
			return -EINVAL;
		}
		capture_prtd->dma_hdl = (dma_hdl_t)NULL;
			capture_prtd->params = NULL;
		}
   	} else {
		return -EINVAL;
	}

	return 0;
}
static int sun7i_pcm_hw_params(struct snd_pcm_substream *substream,
	struct snd_pcm_hw_params *params)
{	
	/*pr_info("sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);*/
    struct snd_pcm_runtime *play_runtime = NULL, *capture_runtime = NULL;
    struct sun7i_playback_runtime_data *play_prtd = NULL;
    struct sun7i_capture_runtime_data *capture_prtd = NULL;
    struct snd_soc_pcm_runtime *play_rtd = NULL;
    struct snd_soc_pcm_runtime *capture_rtd = NULL;
    struct sun7i_dma_params *play_dma = NULL;
    struct sun7i_dma_params *capture_dma = NULL;
    unsigned long play_totbytes = 0, capture_totbytes = 0;

    if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
		pr_info("play:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
		play_runtime = substream->runtime;
		play_prtd = play_runtime ->private_data;
		play_rtd = substream->private_data;
		play_totbytes = params_buffer_bytes(params);
		play_dma = snd_soc_dai_get_dma_data(play_rtd->cpu_dai, substream);
		
		if (!play_dma) {
			return 0;
		}

		if (play_prtd->params == NULL) {
			play_prtd->params = play_dma;
				/*
		 * requeset audio dma handle(we don't care about the channel!)
		 */
		play_prtd->dma_hdl = sw_dma_request(play_prtd->params->name, CHAN_NORAML);
		if (NULL == play_prtd->dma_hdl) {
			printk(KERN_ERR "failed to request spdif dma handle\n");
			return -EINVAL;
		}
			
	}

	/*
	* set callback
	*/
	memset(&play_prtd->play_done_cb, 0, sizeof(play_prtd->play_done_cb));
	play_prtd->play_done_cb.func = sun7i_audio_play_buffdone;
	play_prtd->play_done_cb.parg = substream;
	/*use the full buffer callback, maybe we should use the half buffer callback?*/
	if (0 != sw_dma_ctl(play_prtd->dma_hdl, DMA_OP_SET_FD_CB, (void *)&(play_prtd->play_done_cb))) {
		printk(KERN_ERR "failed to set dma buffer done!!!\n");
		sw_dma_release(play_prtd->dma_hdl);
		return -EINVAL;
	}
		snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
		play_runtime->dma_bytes = play_totbytes;
		
		spin_lock_irq(&play_prtd->lock);
		play_prtd->dma_loaded = 0;
		play_prtd->dma_limit = play_runtime->hw.periods_min;
		play_prtd->dma_period = params_period_bytes(params);
		play_prtd->dma_start = play_runtime->dma_addr;
		play_prtd->dma_pos = play_prtd->dma_start;
		play_prtd->dma_end = play_prtd->dma_start + play_totbytes;
		spin_unlock_irq(&play_prtd->lock);
    } else if (substream->stream == SNDRV_PCM_STREAM_CAPTURE) {
    	pr_info("CAPTUR:sun7i_i2sdma.c::func:%s(line:%d)\n",__func__,__LINE__);
		capture_runtime = substream->runtime;
		capture_prtd = capture_runtime ->private_data;
		capture_rtd = substream->private_data;
		capture_totbytes = params_buffer_bytes(params);
		capture_dma = snd_soc_dai_get_dma_data(capture_rtd->cpu_dai, substream);
		
		if (!capture_dma) {
			return 0;
		}

		if (capture_prtd->params == NULL) {
			capture_prtd->params = capture_dma;
			
				/*
		 * requeset audio dma handle(we don't care about the channel!)
		 */
		capture_prtd->dma_hdl = sw_dma_request(capture_prtd->params->name, CHAN_NORAML);
		if (NULL == capture_prtd->dma_hdl) {
			printk(KERN_ERR "failed to request spdif dma handle\n");
			return -EINVAL;
		}
		}

							/*
	* set callback
	*/
	memset(&capture_prtd->play_done_cb, 0, sizeof(capture_prtd->play_done_cb));
	capture_prtd->play_done_cb.func = sun7i_audio_capture_buffdone;
	capture_prtd->play_done_cb.parg = substream;
	/*use the full buffer callback, maybe we should use the half buffer callback?*/
	if (0 != sw_dma_ctl(capture_prtd->dma_hdl, DMA_OP_SET_FD_CB, (void *)&(capture_prtd->play_done_cb))) {
		printk(KERN_ERR "failed to set dma buffer done!!!\n");
		sw_dma_release(capture_prtd->dma_hdl);
		return -EINVAL;
	}
		snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
		capture_runtime->dma_bytes = capture_totbytes;
		
		spin_lock_irq(&capture_prtd->lock);
		capture_prtd->dma_loaded = 0;
		capture_prtd->dma_limit = capture_runtime->hw.periods_min;
		capture_prtd->dma_period = params_period_bytes(params);
		capture_prtd->dma_start = capture_runtime->dma_addr;
		capture_prtd->dma_pos = capture_prtd->dma_start;
		capture_prtd->dma_end = capture_prtd->dma_start + capture_totbytes;
		spin_unlock_irq(&capture_prtd->lock);
    } else {
		return -EINVAL;
    }

	return 0;
}