Пример #1
0
int lpc32xx_dma_start_pflow_xfer(int ch,
				void *src,
				void *dst,
				int enable)
{
	u32 tmp;

	if ((!VALID_CHANNEL(ch)) || (dma_ctrl.dma_channels[ch].name == NULL))
		return -EINVAL;

	/* When starting a DMA transfer where the peripheral is the flow
	   controller, DMA must be previously disabled */
	tmp = __raw_readl(DMACH_CONFIG_CH(DMAIOBASE, ch));
	if (tmp & DMAC_CHAN_ENABLE)
		return -EBUSY;

	__dma_regs_lock();
	__raw_writel((u32) src, DMACH_SRC_ADDR(DMAIOBASE, ch));
	__raw_writel((u32) dst, DMACH_DEST_ADDR(DMAIOBASE, ch));
	__raw_writel(0, DMACH_LLI(DMAIOBASE, ch));
	__raw_writel(dma_ctrl.dma_channels[ch].control, DMACH_CONTROL(DMAIOBASE, ch));

	tmp = dma_ctrl.dma_channels[ch].config |
		dma_ctrl.dma_channels[ch].config_int_mask;
	if (enable != 0)
		tmp |= DMAC_CHAN_ENABLE;
	__raw_writel(tmp, DMACH_CONFIG_CH(DMAIOBASE, ch));

	__dma_regs_unlock();

	return 0;
}
Пример #2
0
int dma_prog_channel (unsigned int chn, dma_setup_t *dma_setup)
{
	if ((chn >= DMA_MAX_CHANNELS) || !dma_channels[chn].name ||
		dma_valid_config(dma_setup) )
		return -EINVAL;
	
	DMACH_SRC_ADDR(chn) = dma_setup->src_address;
	DMACH_DST_ADDR(chn) = dma_setup->dest_address;
	DMACH_LEN(chn) = dma_setup->trans_length;
	DMACH_CFG(chn) = dma_setup->cfg;

	return 0;
}
Пример #3
0
int dma_prog_sg_channel(int chn, u32 dma_sg_list)
{
	u32 dma_config;

	if (chn >= DMA_MAX_CHANNELS)
		return -EINVAL;

	dma_config = DMA_CFG_CMP_CH_EN | DMA_CFG_CMP_CH_NR(chn - 1);

	lpc313x_dma_lock();
	DMACH_SRC_ADDR(chn) = dma_sg_list;
	DMACH_DST_ADDR(chn) = DMACH_ALT_PHYS(chn - 1);
	DMACH_LEN(chn) = 0x4;
	DMACH_CFG(chn) = dma_config;
	lpc313x_dma_unlock();

	return 0;
}
Пример #4
0
static void lpc313x_check_dmall(unsigned long data) {
	struct snd_pcm_substream *substream = (struct snd_pcm_substream *) data;
	struct snd_pcm_runtime *rtd = substream->runtime;
	struct lpc313x_dma_data *prtd = rtd->private_data;

	/* Determine buffer position from current DMA position. We don't need
	   the exact address, just the last finished period */
	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
		mod_timer(&prtd->timer[0], jiffies + MINTICKINC);
		prtd->dma_cur = (dma_addr_t) DMACH_SRC_ADDR(prtd->dmach - 1);
	}
	else {
		mod_timer(&prtd->timer[1], jiffies + MINTICKINC);
		prtd->dma_cur = (dma_addr_t) DMACH_DST_ADDR(prtd->dmach - 1);
	}

	/* Tell audio system more buffer space is available */
	snd_pcm_period_elapsed(substream);
}
Пример #5
0
int lpc32xx_dma_start_xfer(int ch, u32 config)
{
	struct dma_list_ctrl *plhead;

	if ((!VALID_CHANNEL(ch)) || (dma_ctrl.dma_channels[ch].name == NULL) ||
		(dma_ctrl.dma_channels[ch].list_vstart == 0))
		return -1;

	plhead = dma_ctrl.dma_channels[ch].list_head;
	__dma_regs_lock();
	__raw_writel(plhead->dmall.src, DMACH_SRC_ADDR(DMAIOBASE, ch));
	__raw_writel(plhead->dmall.dest, DMACH_DEST_ADDR(DMAIOBASE, ch));
	__raw_writel(plhead->dmall.next_lli, DMACH_LLI(DMAIOBASE, ch));
	__raw_writel(plhead->dmall.ctrl, DMACH_CONTROL(DMAIOBASE, ch));
	__raw_writel(config, DMACH_CONFIG_CH(DMAIOBASE, ch));
	__dma_regs_unlock();

	return 0;
}
Пример #6
0
int dma_current_state (unsigned int   chn,
                        unsigned int * psrc,
                        unsigned int * pdst,
                        unsigned int * plen,
                        unsigned int * pcfg,
                        unsigned int * pena,
                        unsigned int * pcnt)
{
	if (chn >= DMA_MAX_CHANNELS || !dma_channels[chn].name) {
		return -EINVAL;
	}

	*psrc = DMACH_SRC_ADDR(chn);
	*pdst = DMACH_DST_ADDR(chn);
	*plen  = DMACH_LEN(chn);
	*pcfg = DMACH_CFG(chn);
	*pena = DMACH_EN(chn);
	*pcnt  = DMACH_TCNT(chn);
	return 0;
}
Пример #7
0
u32 lpc32xx_dma_queue_llist_entry(int ch,
				  void *src,
				  void *dst,
				  int size) {
	struct dma_list_ctrl *plhead;
	u32 ctrl, cfg;

	if ((!VALID_CHANNEL(ch)) || (dma_ctrl.dma_channels[ch].name == NULL) ||
		(dma_ctrl.dma_channels[ch].list_vstart == 0))
		return 0;

	/* Exit if all the buffers are used */
	if (dma_ctrl.dma_channels[ch].free_entries == 0) {
		return 0;
	}

	/* Next available DMA link descriptor */
	plhead = dma_ctrl.dma_channels[ch].list_head;

	/* Adjust size to number of transfers (vs bytes) */
	size = size / dma_ctrl.dma_channels[ch].dmacfg->dst_size;

	/* Setup control and config words */
	ctrl = dma_ctrl.dma_channels[ch].control | size;
	cfg = dma_ctrl.dma_channels[ch].config | DMAC_CHAN_ENABLE |
		dma_ctrl.dma_channels[ch].config_int_mask;

	/* Populate DMA linked data structure */
	plhead->dmall.src = (u32) src;
	plhead->dmall.dest = (u32) dst;
	plhead->dmall.next_lli = 0;
	plhead->dmall.ctrl = ctrl;

	__dma_regs_lock();

	/* Append this link to the end of the previous link */
	plhead->prev_list_addr->dmall.next_lli = lpc32xx_dma_llist_v_to_p(ch, (u32) plhead);

	/* Decrement available buffers */
	dma_ctrl.dma_channels[ch].free_entries--;

	/* If the DMA channel is idle, then the buffer needs to be placed directly into
	   the DMA registers */
	if ((__raw_readl(DMACH_CONFIG_CH(DMAIOBASE, ch)) & DMAC_CHAN_ENABLE) == 0) {
		/* DMA is disabled, so move the current buffer into the
		   channel registers and start transfer */
		__raw_writel((u32) src, DMACH_SRC_ADDR(DMAIOBASE, ch));
		__raw_writel((u32) dst, DMACH_DEST_ADDR(DMAIOBASE, ch));
		__raw_writel(0, DMACH_LLI(DMAIOBASE, ch));
		__raw_writel(ctrl, DMACH_CONTROL(DMAIOBASE, ch));
		__raw_writel(cfg, DMACH_CONFIG_CH(DMAIOBASE, ch));
	}
	else if (__raw_readl(DMACH_LLI(DMAIOBASE, ch)) == 0) {
		/* Update current entry to next entry */
		__raw_writel(dma_ctrl.dma_channels[ch].list_tail->next_list_phy,
			DMACH_LLI(DMAIOBASE, ch));

		/*
		 * If the channel was stopped before the next entry made it into the
		 * hardware descriptor, the next entry didn't make it there fast enough,
		 * so load the new descriptor here.
		 */
		if ((__raw_readl(DMACH_CONFIG_CH(DMAIOBASE, ch)) & DMAC_CHAN_ENABLE) == 0) {
			__raw_writel((u32) src, DMACH_SRC_ADDR(DMAIOBASE, ch));
			__raw_writel((u32) dst, DMACH_DEST_ADDR(DMAIOBASE, ch));
			__raw_writel(0, DMACH_LLI(DMAIOBASE, ch));
			__raw_writel(ctrl, DMACH_CONTROL(DMAIOBASE, ch));
			__raw_writel(cfg, DMACH_CONFIG_CH(DMAIOBASE, ch));
		}
	}

	/* Process next link on next call */
	dma_ctrl.dma_channels[ch].list_head = plhead->next_list_addr;

	__dma_regs_unlock();

	return (u32) plhead;
}