int dma_prog_sg_channel(int chn, u32 dma_sg_list) { u32 dma_config; if (chn >= DMA_MAX_CHANNELS) return -EINVAL; dma_config = DMA_CFG_CMP_CH_EN | DMA_CFG_CMP_CH_NR(chn - 1); lpc313x_dma_lock(); DMACH_SRC_ADDR(chn) = dma_sg_list; DMACH_DST_ADDR(chn) = DMACH_ALT_PHYS(chn - 1); DMACH_LEN(chn) = 0x4; DMACH_CFG(chn) = dma_config; lpc313x_dma_unlock(); return 0; }
static int lpc313x_pcm_prepare(struct snd_pcm_substream *substream) { struct lpc313x_dma_data *prtd = substream->runtime->private_data; /* Setup DMA channel */ if (prtd->dmach == -1) { if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { #if defined (CONFIG_SND_USE_DMA_LINKLIST) prtd->dmach = dma_request_sg_channel("I2STX", lpc313x_pcm_dma_irq, substream, 0); prtd->dma_cfg_base = DMA_CFG_TX_WORD | DMA_CFG_RD_SLV_NR(0) | DMA_CFG_CMP_CH_EN | DMA_CFG_WR_SLV_NR(TX_DMA_CHCFG) | DMA_CFG_CMP_CH_NR(prtd->dmach); #else prtd->dmach = dma_request_channel("I2STX", lpc313x_pcm_dma_irq, substream); prtd->dma_cfg_base = DMA_CFG_TX_WORD | DMA_CFG_RD_SLV_NR(0) | DMA_CFG_CIRC_BUF | DMA_CFG_WR_SLV_NR(TX_DMA_CHCFG); #endif } else { #if defined (CONFIG_SND_USE_DMA_LINKLIST) prtd->dmach = dma_request_sg_channel("I2SRX", lpc313x_pcm_dma_irq, substream, 0); prtd->dma_cfg_base = DMA_CFG_TX_WORD | DMA_CFG_WR_SLV_NR(0) | DMA_CFG_CMP_CH_EN | DMA_CFG_RD_SLV_NR(RX_DMA_CHCFG) | DMA_CFG_CMP_CH_NR(prtd->dmach); #else prtd->dmach = dma_request_channel("I2SRX", lpc313x_pcm_dma_irq, substream); prtd->dma_cfg_base = DMA_CFG_TX_WORD | DMA_CFG_WR_SLV_NR(0) | DMA_CFG_CIRC_BUF | DMA_CFG_RD_SLV_NR(RX_DMA_CHCFG); #endif } if (prtd->dmach < 0) { pr_err("Error allocating DMA channel\n"); return prtd->dmach; } #if defined (CONFIG_SND_USE_DMA_LINKLIST) /* Allocate space for a DMA linked list */ prtd->p_sg_cpu = (dma_sg_ll_t *) dma_alloc_coherent( NULL, DMA_LIST_SIZE, (dma_addr_t *) &prtd->p_sg_dma, GFP_KERNEL); if (prtd->p_sg_cpu == NULL) { dma_release_sg_channel(prtd->dmach); prtd->dmach = -1; return -ENOMEM; } #endif } return 0; }