Example #1
0
/* extensions for test DMA to FIOPIN */
int32_t dma_start_m2pin(int32_t ch,
                     void *src,
                     void *dest,
                     DMAC_LL_T *plli,
                     int32_t trans)
{
	int32_t sts = _ERROR;

	/* Verify that the selected channel has been allocated */
	if (dmadrv_dat.alloc_ch [ch] == TRUE)
	{
		/* Setup source and destination and clear LLI */
		dmadrv_dat.pdma->dma_chan [ch].src_addr = (uint32_t) src;
		dmadrv_dat.pdma->dma_chan [ch].dest_addr = (uint32_t) dest;
		dmadrv_dat.pdma->dma_chan [ch].lli = (uint32_t) plli;

		/* Use linked list control word if available */
		if (plli != NULL)
		{
			dmadrv_dat.pdma->dma_chan [ch].control = plli->next_ctrl;
		}
		else 
		{
			/* Setup channel configuration */
			dmadrv_dat.pdma->dma_chan [ch].control =
			    (DMAC_CHAN_INT_TC_EN | 	
		    	DMAC_CHAN_SRC_AUTOINC | DMAC_CHAN_DEST_WIDTH_8 |
			    DMAC_CHAN_SRC_WIDTH_8 | DMAC_CHAN_DEST_BURST_1 |
			    DMAC_CHAN_SRC_BURST_1 |
			    DMAC_CHAN_TRANSFER_SIZE(trans));
		}

		/* Start channel transfer */
		dmadrv_dat.pdma->dma_chan [ch].config_ch =
			(DMAC_CHAN_FLOW_D_P2M | DMAC_CHAN_ENABLE | DMAC_SRC_PERIP(9) );
		
		sts = _NO_ERROR;
	}

	return sts;
}
Example #2
0
/*
 * DMA read/write transfers with ECC support
 */
static int lpc32xx_dma_xfer(struct mtd_info *mtd, uint8_t *buf,
	int eccsubpages, int read)
{
	struct nand_chip *chip = mtd->priv;
	struct lpc32xx_nand_host *host = chip->priv;
	uint32_t config, tmpreg;
	dma_addr_t buf_phy;
	int i, timeout, dma_mapped = 0, status = 0;

	/* Map DMA buffer */
	if (likely((void *) buf < high_memory)) {
		buf_phy = dma_map_single(mtd->dev.parent, buf, mtd->writesize,
			read ? DMA_FROM_DEVICE : DMA_TO_DEVICE);
		if (unlikely(dma_mapping_error(mtd->dev.parent, buf_phy))) {
			dev_err(mtd->dev.parent,
				"Unable to map DMA buffer\n");
			dma_mapped = 0;
		} else
			dma_mapped = 1;
	}

	/* If a buffer can't be mapped, use the local buffer */
	if (!dma_mapped) {
		buf_phy = host->data_buf_dma;
		if (!read)
			memcpy(host->data_buf, buf, mtd->writesize);
	}

	if (read)
		config = DMAC_CHAN_ITC | DMAC_CHAN_IE | DMAC_CHAN_FLOW_D_P2M |
			DMAC_DEST_PERIP (0) |
			DMAC_SRC_PERIP(DMA_PERID_NAND1) | DMAC_CHAN_ENABLE;
	else
		config = DMAC_CHAN_ITC | DMAC_CHAN_IE | DMAC_CHAN_FLOW_D_M2P |
			DMAC_DEST_PERIP(DMA_PERID_NAND1) |
			DMAC_SRC_PERIP (0) | DMAC_CHAN_ENABLE;

	/* DMA mode with ECC enabled */
	tmpreg = __raw_readl(SLC_CFG(host->io_base));
	__raw_writel(SLCCFG_ECC_EN | SLCCFG_DMA_ECC | tmpreg,
		SLC_CFG(host->io_base));

	/* Clear initial ECC */
	__raw_writel(SLCCTRL_ECC_CLEAR, SLC_CTRL(host->io_base));

	/* Prepare DMA descriptors */
	lpc32xx_nand_dma_configure(mtd, buf_phy, chip->ecc.steps, read);

	/* Setup DMA direction and burst mode */
	if (read)
		__raw_writel(__raw_readl(SLC_CFG(host->io_base)) |
			SLCCFG_DMA_DIR, SLC_CFG(host->io_base));
	else
		__raw_writel(__raw_readl(SLC_CFG(host->io_base)) &
			~SLCCFG_DMA_DIR, SLC_CFG(host->io_base));
	__raw_writel(__raw_readl(SLC_CFG(host->io_base)) | SLCCFG_DMA_BURST,
		SLC_CFG(host->io_base));

	/* Transfer size is data area only */
	__raw_writel(mtd->writesize, SLC_TC(host->io_base));

	/* Start transfer in the NAND controller */
	__raw_writel(__raw_readl(SLC_CTRL(host->io_base)) | SLCCTRL_DMA_START,
		SLC_CTRL(host->io_base));

	/* Start DMA to process NAND controller DMA FIFO */
	host->dmapending = 0;
	lpc32xx_dma_start_xfer(host->dmach, config);

	/*
	 * On some systems, the DMA transfer will be very fast, so there is no
	 * point in waiting for the transfer to complete using the interrupt
	 * method. It's best to just poll the transfer here to prevent several
	 * costly context changes. This is especially true for systems that
	 * use small page devices or NAND devices with very fast access.
	 */
	if (host->ncfg->polled_completion) {
		timeout = LPC32XX_DMA_SIMPLE_TIMEOUT;
		while ((timeout > 0) && lpc32xx_dma_is_active(host->dmach))
			timeout--;
		if (timeout == 0) {
			dev_err(mtd->dev.parent,
				"DMA transfer timeout error\n");
			status = -EIO;

			/* Switch to non-polled mode */
			host->ncfg->polled_completion = false;
		}
	}

	if (!host->ncfg->polled_completion) {
		/* Wait till DMA transfer is done or timeout occurs */
		wait_event_timeout(host->dma_waitq, host->dmapending,
			msecs_to_jiffies(LPC32XX_DMA_WAIT_TIMEOUT_MS));
		if (host->dma_xfer_status != 0) {
			dev_err(mtd->dev.parent, "DMA transfer error\n");
			status = -EIO;
		}
	}

	/*
	 * The DMA is finished, but the NAND controller may still have
	 * buffered data. Wait until all the data is sent.
	 */
	timeout = LPC32XX_DMA_SIMPLE_TIMEOUT;
	while ((__raw_readl(SLC_STAT(host->io_base)) & SLCSTAT_DMA_FIFO)
		&& (timeout > 0))
		timeout--;
	if (timeout == 0) {
		dev_err(mtd->dev.parent, "FIFO held data too long\n");
		status = -EIO;
	}

	/* Read last calculated ECC value */
	if (read)
		host->ecc_buf[chip->ecc.steps - 1] =
			__raw_readl(SLC_ECC(host->io_base));
	else {
		for (i = 0; i < LPC32XX_DMA_ECC_REP_READ; i++)
			host->ecc_buf[chip->ecc.steps - 1] =
				__raw_readl(SLC_ECC(host->io_base));
	}

	/*
	 * For reads, get the OOB data. For writes, the data will be written
	 * later
	 */
	if (read)
		chip->read_buf(mtd, chip->oob_poi, mtd->oobsize);

	/* Flush DMA link list */
	lpc32xx_dma_flush_llist(host->dmach);

	if (__raw_readl(SLC_STAT(host->io_base)) & SLCSTAT_DMA_FIFO ||
		__raw_readl(SLC_TC(host->io_base))) {
		/* Something is left in the FIFO, something is wrong */
		dev_err(mtd->dev.parent, "DMA FIFO failure\n");
		status = -EIO;
	}

	if (dma_mapped)
		dma_unmap_single(mtd->dev.parent, buf_phy, mtd->writesize,
			read ? DMA_FROM_DEVICE : DMA_TO_DEVICE);
	else if (read)
		memcpy(buf, host->data_buf, mtd->writesize);

	/* Stop DMA & HW ECC */
	__raw_writel(__raw_readl(SLC_CTRL(host->io_base)) &
		~SLCCTRL_DMA_START, SLC_CTRL(host->io_base));
	__raw_writel(tmpreg, SLC_CFG(host->io_base));

	return status;
}
Example #3
0
static int mmc_dma_setup(struct mmci_platform_data *plat)
{
	u32 llptrrx, llptrtx;
	int ret = 0;

	/*
	 * There is a quirk with the LPC32XX and SD burst DMA. DMA sg
	 * transfers where DMA is the flow controller will not transfer
	 * the last few bytes to or from the SD card controller and
	 * memory. For RX, the last few bytes in the SD transfer can be
	 * forced out with a software DMA burst request. For TX, this
	 * can't be done, so TX sg support cannot be supported. For TX,
	 * a temporary bouncing buffer is used if more than 1 sg segment
	 * is passed in the data request. The bouncing buffer will get a
	 * contiguous copy of the TX data and it will be used instead.
	 */

	if (plat->dma_tx_size) {
		/* Use pre-allocated memory for the DMA Tx buffer */
		dmac_drvdat.dma_handle_tx = (dma_addr_t)plat->dma_tx_v_base;
		dmac_drvdat.dma_v_base = plat->dma_tx_v_base;
		dmac_drvdat.preallocated_tx_buf = 1;
	} else {
		/* Allocate a chunk of memory for the DMA TX buffers */
		dmac_drvdat.dma_v_base = dma_alloc_coherent(dmac_drvdat.dev,
			DMA_BUFF_SIZE, &dmac_drvdat.dma_handle_tx, GFP_KERNEL);
		dmac_drvdat.preallocated_tx_buf = 0;
	}

	if (dmac_drvdat.dma_v_base == NULL) {
		dev_err(dmac_drvdat.dev, "error getting DMA region\n");
		ret = -ENOMEM;
		goto dma_no_tx_buff;
	}
	dev_info(dmac_drvdat.dev, "DMA buffer: phy:%p, virt:%p\n",
		(void *) dmac_drvdat.dma_handle_tx,
		dmac_drvdat.dma_v_base);

	/* Setup TX DMA channel */
	dmac_drvdat.dmacfgtx.ch = DMA_CH_SDCARD_TX;
	dmac_drvdat.dmacfgtx.tc_inten = 0;
	dmac_drvdat.dmacfgtx.err_inten = 0;
	dmac_drvdat.dmacfgtx.src_size = 4;
	dmac_drvdat.dmacfgtx.src_inc = 1;
	dmac_drvdat.dmacfgtx.src_bsize = DMAC_CHAN_SRC_BURST_8;
	dmac_drvdat.dmacfgtx.src_prph = DMAC_SRC_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgtx.dst_size = 4;
	dmac_drvdat.dmacfgtx.dst_inc = 0;
	dmac_drvdat.dmacfgtx.dst_bsize = DMAC_CHAN_DEST_BURST_8;
	dmac_drvdat.dmacfgtx.dst_prph = DMAC_DEST_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgtx.flowctrl = DMAC_CHAN_FLOW_P_M2P;
	if (lpc178x_dma_ch_get(
		&dmac_drvdat.dmacfgtx, "dma_sd_tx", NULL, NULL) < 0)
	{
		dev_err(dmac_drvdat.dev,
			"Error setting up SD card TX DMA channel\n");
		ret = -ENODEV;
		goto dma_no_txch;
	}

	/* Allocate a linked list for DMA support */
	llptrtx = lpc178x_dma_alloc_llist(
		dmac_drvdat.dmacfgtx.ch, NR_SG * 2);
	if (llptrtx == 0) {
		dev_err(dmac_drvdat.dev,
			"Error allocating list buffer (MMC TX)\n");
		ret = -ENOMEM;
		goto dma_no_txlist;
	}

	/* Setup RX DMA channel */
	dmac_drvdat.dmacfgrx.ch = DMA_CH_SDCARD_RX;
	dmac_drvdat.dmacfgrx.tc_inten = 0;
	dmac_drvdat.dmacfgrx.err_inten = 0;
	dmac_drvdat.dmacfgrx.src_size = 4;
	dmac_drvdat.dmacfgrx.src_inc = 0;
	dmac_drvdat.dmacfgrx.src_bsize = DMAC_CHAN_SRC_BURST_8;
	dmac_drvdat.dmacfgrx.src_prph = DMAC_SRC_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgrx.dst_size = 4;
	dmac_drvdat.dmacfgrx.dst_inc = 1;
	dmac_drvdat.dmacfgrx.dst_bsize = DMAC_CHAN_DEST_BURST_8;
	dmac_drvdat.dmacfgrx.dst_prph = DMAC_DEST_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgrx.flowctrl = DMAC_CHAN_FLOW_D_P2M;
	if (lpc178x_dma_ch_get(
		&dmac_drvdat.dmacfgrx, "dma_sd_rx", NULL, NULL) < 0)
	{
		dev_err(dmac_drvdat.dev,
			"Error setting up SD card RX DMA channel\n");
		ret = -ENODEV;
		goto dma_no_rxch;
	}

	/* Allocate a linked list for DMA support */
	llptrrx = lpc178x_dma_alloc_llist(
		dmac_drvdat.dmacfgrx.ch, NR_SG * 2);
	if (llptrrx == 0) {
		dev_err(dmac_drvdat.dev,
			"Error allocating list buffer (MMC RX)\n");
		ret = -ENOMEM;
		goto dma_no_rxlist;
	}

	return 0;

dma_no_rxlist:
	lpc178x_dma_ch_put(dmac_drvdat.dmacfgrx.ch);
	dmac_drvdat.dmacfgrx.ch = -1;
dma_no_rxch:
	lpc178x_dma_dealloc_llist(dmac_drvdat.dmacfgtx.ch);
dma_no_txlist:
	lpc178x_dma_ch_put(dmac_drvdat.dmacfgtx.ch);
	dmac_drvdat.dmacfgtx.ch = -1;
dma_no_txch:
	if (!dmac_drvdat.preallocated_tx_buf) {
		dma_free_coherent(dmac_drvdat.dev, DMA_BUFF_SIZE,
			dmac_drvdat.dma_v_base,
			dmac_drvdat.dma_handle_tx);
	}
dma_no_tx_buff:
	return ret;
}
/***********************************************************************
 *
 * Function: i2s_dma_init_dev
 *
 * Purpose: Initialize DMA for I2S
 *
 * Processing:
 *     See function
 *
 * Parameters:
 *          dmach:  DMA Channel number
 *          p_i2s_dma_prms: dma parameters
 *
 *
 * Outputs: None
 *
 * Returns:	if ok returns TRUE
 *
 *
 **********************************************************************/
INT_32 i2s_dma_init_dev(INT_32 devid, I2S_DMA_PRMS_T *p_i2s_dma_prms)

{

  INT_32 DMAC_CHAN_DEST_WIDTH;
  INT_32 DMAC_CHAN_SRC_WIDTH ;
  INT_32 i2s_ww, i2sch;
  INT_32 dmach, dir, mem, sz;

  I2S_CFG_T *pi2s = (I2S_CFG_T *) devid;
  i2sch = pi2s->i2snum;
  i2s_ww = pi2s->i2s_w_sz ;

 
  dmach = p_i2s_dma_prms->dmach;
  dir   = p_i2s_dma_prms->dir;
  mem   = p_i2s_dma_prms->mem;
  sz    = p_i2s_dma_prms->sz;

  /* clear TC for the  selected dma channel */
  DMAC->int_tc_clear |= _SBF(0, dmach);

  /* Set the DMA src and dst word width based on I2S Word 
  width setting */
  if (i2s_ww == I2S_WW8)
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_8;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_8;
  }
  else if (i2s_ww == I2S_WW16)
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_16;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_16;
  }
  else
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_32;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_32;
  }

  /* Setup DMA for I2S Channel 0, DEST uses AHB1, SRC uses AHB0 */
  if (i2sch == I2S_CH0)
  {
    /* dma is flow controller */
    if (dir == DMAC_CHAN_FLOW_D_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S0->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA0)
                              | DMAC_CHAN_FLOW_D_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;

    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S0->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control =  DMAC_CHAN_SRC_BURST_4
                                       | DMAC_CHAN_DEST_BURST_4 
                                       | DMAC_CHAN_DEST_AHB1
                                       | DMAC_CHAN_SRC_AUTOINC 
                                       | DMAC_CHAN_INT_TC_EN
                                       | DMAC_CHAN_SRC_WIDTH 
                                       | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA0)
                              | DMAC_CHAN_FLOW_P_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;

    }
    /* dma is flow controller */
    else if (dir == DMAC_CHAN_FLOW_D_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S0->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA1)
                              | DMAC_CHAN_FLOW_D_P2M | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S0->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA1)
                              | DMAC_CHAN_FLOW_P_P2M | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
  }
  /* Setup DMA for I2S Channel 1 */
  else if (i2sch == I2S_CH1)
  {
    /* dma is flow controller */
    if (dir == DMAC_CHAN_FLOW_D_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S1->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA0)
                              | DMAC_CHAN_FLOW_D_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S1->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA0)
                              | DMAC_CHAN_FLOW_P_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* dma is flow controller */
    else if (dir == DMAC_CHAN_FLOW_D_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S1->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA1)
                              | DMAC_CHAN_FLOW_D_P2M 
                              | DMAC_CHAN_IE
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S1->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA1)
                              | DMAC_CHAN_FLOW_P_P2M 
                              | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
  }

  else
  {
    return (FALSE);
  }

  return(TRUE);
} 
static int lpc3xxx_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct lpc3xxx_dma_data *prtd = substream->runtime->private_data;

	/* Setup DMA channel */
	if (prtd->dmach == -1) {
		if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
			prtd->dmach = DMA_CH_I2S_TX;
			prtd->dmacfg.ch = DMA_CH_I2S_TX;
			prtd->dmacfg.tc_inten = 1;
			prtd->dmacfg.err_inten = 1;
			prtd->dmacfg.src_size = 4;
			prtd->dmacfg.src_inc = 1;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.src_ahb1 = 1;
#endif
			prtd->dmacfg.src_bsize = DMAC_CHAN_SRC_BURST_4;
			prtd->dmacfg.src_prph = 0;
			prtd->dmacfg.dst_size = 4;
			prtd->dmacfg.dst_inc = 0;
			prtd->dmacfg.dst_bsize = DMAC_CHAN_DEST_BURST_4;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.dst_ahb1 = 0;
#endif

#if defined(CONFIG_SND_LPC32XX_USEI2S1)
			prtd->dmacfg.dst_prph = DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA1);
#else
			prtd->dmacfg.dst_prph = DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA1);
#endif
			prtd->dmacfg.flowctrl = DMAC_CHAN_FLOW_D_M2P;
			if (lpc32xx_dma_ch_get(&prtd->dmacfg, "dma_i2s_tx",
				&lpc3xxx_pcm_dma_irq, substream) < 0) {
				pr_debug(KERN_ERR "Error setting up I2S TX DMA channel\n");
				return -ENODEV;
			}

			/* Allocate a linked list for audio buffers */
			prtd->llptr = lpc32xx_dma_alloc_llist(prtd->dmach, NUMLINKS);
			if (prtd->llptr == 0) {
				lpc32xx_dma_ch_put(prtd->dmach);
				prtd->dmach = -1;
				pr_debug(KERN_ERR "Error allocating list buffer (I2S TX)\n");
				return -ENOMEM;
			}
		}
		else {
			prtd->dmach = DMA_CH_I2S_RX;
			prtd->dmacfg.ch = DMA_CH_I2S_RX;
			prtd->dmacfg.tc_inten = 1;
			prtd->dmacfg.err_inten = 1;
			prtd->dmacfg.src_size = 4;
			prtd->dmacfg.src_inc = 0;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.src_ahb1 = 1;
#endif
			prtd->dmacfg.src_bsize = DMAC_CHAN_SRC_BURST_4;
#if defined(CONFIG_SND_LPC32XX_USEI2S1)
			prtd->dmacfg.src_prph = DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA0);
#else
			prtd->dmacfg.src_prph = DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA0);
#endif
			prtd->dmacfg.dst_size = 4;
			prtd->dmacfg.dst_inc = 1;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.dst_ahb1 = 0;
#endif
			prtd->dmacfg.dst_bsize = DMAC_CHAN_DEST_BURST_4;
			prtd->dmacfg.dst_prph = 0;
			prtd->dmacfg.flowctrl = DMAC_CHAN_FLOW_D_P2M;
			if (lpc32xx_dma_ch_get(&prtd->dmacfg, "dma_i2s_rx",
				&lpc3xxx_pcm_dma_irq, substream) < 0) {
				pr_debug(KERN_ERR "Error setting up I2S RX DMA channel\n");
				return -ENODEV;
			}

			/* Allocate a linked list for audio buffers */
			prtd->llptr = lpc32xx_dma_alloc_llist(prtd->dmach, NUMLINKS);
			if (prtd->llptr == 0) {
				lpc32xx_dma_ch_put(prtd->dmach);
				prtd->dmach = -1;
				pr_debug(KERN_ERR "Error allocating list buffer (I2S RX)\n");
				return -ENOMEM;
			}
		}
	}

	return 0;
}