Ejemplo n.º 1
0
/*
 * Get DMA channel and allocate DMA descriptors memory.
 * Prepare DMA descriptors link lists
 */
static int lpc32xx_nand_dma_setup(struct lpc32xx_nand_host *host,
	int num_entries)
{
	int ret = 0;

	host->dmach = DMA_CH_SLCNAND;
	host->dmacfg.ch = DMA_CH_SLCNAND;

	/*
	 * All the DMA configuration parameters will
	 * be overwritten in lpc32xx_nand_dma_configure().
	 */
	host->dmacfg.tc_inten = 1;
	host->dmacfg.err_inten = 1;
	host->dmacfg.src_size = 4;
	host->dmacfg.src_inc = 1;
	host->dmacfg.src_ahb1 = 1;
	host->dmacfg.src_bsize = DMAC_CHAN_SRC_BURST_4;
	host->dmacfg.src_prph = 0;
	host->dmacfg.dst_size = 4;
	host->dmacfg.dst_inc = 0;
	host->dmacfg.dst_bsize = DMAC_CHAN_DEST_BURST_4;
	host->dmacfg.dst_ahb1 = 0;
	host->dmacfg.dst_prph = DMAC_DEST_PERIP(DMA_PERID_NAND1);
	host->dmacfg.flowctrl = DMAC_CHAN_FLOW_D_M2P;
	if (lpc32xx_dma_ch_get(&host->dmacfg, LPC32XX_MODNAME,
		&lpc3xxx_nand_dma_irq, host) < 0) {
		dev_err(host->mtd.dev.parent, "Error setting up SLC NAND "
			"DMA channel\n");
		ret = -ENODEV;
		goto dma_ch_err;
	}

	/*
	 * Allocate Linked list of DMA Descriptors
	 */
	host->llptr = lpc32xx_dma_alloc_llist(host->dmach, num_entries);
	if (host->llptr == 0) {
		lpc32xx_dma_ch_put(host->dmach);
		host->dmach = -1;
		dev_err(host->mtd.dev.parent,
			"Error allocating list buffer for SLC NAND\n");
		ret = -ENOMEM;
		goto dma_alloc_err;
	}

	return ret;
dma_alloc_err:
	lpc32xx_dma_ch_put(host->dmach);
dma_ch_err:
	return ret;
}
Ejemplo n.º 2
0
/*
 * DMA read/write transfers with ECC support
 */
static int lpc32xx_dma_xfer(struct mtd_info *mtd, uint8_t *buf,
	int eccsubpages, int read)
{
	struct nand_chip *chip = mtd->priv;
	struct lpc32xx_nand_host *host = chip->priv;
	uint32_t config, tmpreg;
	dma_addr_t buf_phy;
	int i, timeout, dma_mapped = 0, status = 0;

	/* Map DMA buffer */
	if (likely((void *) buf < high_memory)) {
		buf_phy = dma_map_single(mtd->dev.parent, buf, mtd->writesize,
			read ? DMA_FROM_DEVICE : DMA_TO_DEVICE);
		if (unlikely(dma_mapping_error(mtd->dev.parent, buf_phy))) {
			dev_err(mtd->dev.parent,
				"Unable to map DMA buffer\n");
			dma_mapped = 0;
		} else
			dma_mapped = 1;
	}

	/* If a buffer can't be mapped, use the local buffer */
	if (!dma_mapped) {
		buf_phy = host->data_buf_dma;
		if (!read)
			memcpy(host->data_buf, buf, mtd->writesize);
	}

	if (read)
		config = DMAC_CHAN_ITC | DMAC_CHAN_IE | DMAC_CHAN_FLOW_D_P2M |
			DMAC_DEST_PERIP (0) |
			DMAC_SRC_PERIP(DMA_PERID_NAND1) | DMAC_CHAN_ENABLE;
	else
		config = DMAC_CHAN_ITC | DMAC_CHAN_IE | DMAC_CHAN_FLOW_D_M2P |
			DMAC_DEST_PERIP(DMA_PERID_NAND1) |
			DMAC_SRC_PERIP (0) | DMAC_CHAN_ENABLE;

	/* DMA mode with ECC enabled */
	tmpreg = __raw_readl(SLC_CFG(host->io_base));
	__raw_writel(SLCCFG_ECC_EN | SLCCFG_DMA_ECC | tmpreg,
		SLC_CFG(host->io_base));

	/* Clear initial ECC */
	__raw_writel(SLCCTRL_ECC_CLEAR, SLC_CTRL(host->io_base));

	/* Prepare DMA descriptors */
	lpc32xx_nand_dma_configure(mtd, buf_phy, chip->ecc.steps, read);

	/* Setup DMA direction and burst mode */
	if (read)
		__raw_writel(__raw_readl(SLC_CFG(host->io_base)) |
			SLCCFG_DMA_DIR, SLC_CFG(host->io_base));
	else
		__raw_writel(__raw_readl(SLC_CFG(host->io_base)) &
			~SLCCFG_DMA_DIR, SLC_CFG(host->io_base));
	__raw_writel(__raw_readl(SLC_CFG(host->io_base)) | SLCCFG_DMA_BURST,
		SLC_CFG(host->io_base));

	/* Transfer size is data area only */
	__raw_writel(mtd->writesize, SLC_TC(host->io_base));

	/* Start transfer in the NAND controller */
	__raw_writel(__raw_readl(SLC_CTRL(host->io_base)) | SLCCTRL_DMA_START,
		SLC_CTRL(host->io_base));

	/* Start DMA to process NAND controller DMA FIFO */
	host->dmapending = 0;
	lpc32xx_dma_start_xfer(host->dmach, config);

	/*
	 * On some systems, the DMA transfer will be very fast, so there is no
	 * point in waiting for the transfer to complete using the interrupt
	 * method. It's best to just poll the transfer here to prevent several
	 * costly context changes. This is especially true for systems that
	 * use small page devices or NAND devices with very fast access.
	 */
	if (host->ncfg->polled_completion) {
		timeout = LPC32XX_DMA_SIMPLE_TIMEOUT;
		while ((timeout > 0) && lpc32xx_dma_is_active(host->dmach))
			timeout--;
		if (timeout == 0) {
			dev_err(mtd->dev.parent,
				"DMA transfer timeout error\n");
			status = -EIO;

			/* Switch to non-polled mode */
			host->ncfg->polled_completion = false;
		}
	}

	if (!host->ncfg->polled_completion) {
		/* Wait till DMA transfer is done or timeout occurs */
		wait_event_timeout(host->dma_waitq, host->dmapending,
			msecs_to_jiffies(LPC32XX_DMA_WAIT_TIMEOUT_MS));
		if (host->dma_xfer_status != 0) {
			dev_err(mtd->dev.parent, "DMA transfer error\n");
			status = -EIO;
		}
	}

	/*
	 * The DMA is finished, but the NAND controller may still have
	 * buffered data. Wait until all the data is sent.
	 */
	timeout = LPC32XX_DMA_SIMPLE_TIMEOUT;
	while ((__raw_readl(SLC_STAT(host->io_base)) & SLCSTAT_DMA_FIFO)
		&& (timeout > 0))
		timeout--;
	if (timeout == 0) {
		dev_err(mtd->dev.parent, "FIFO held data too long\n");
		status = -EIO;
	}

	/* Read last calculated ECC value */
	if (read)
		host->ecc_buf[chip->ecc.steps - 1] =
			__raw_readl(SLC_ECC(host->io_base));
	else {
		for (i = 0; i < LPC32XX_DMA_ECC_REP_READ; i++)
			host->ecc_buf[chip->ecc.steps - 1] =
				__raw_readl(SLC_ECC(host->io_base));
	}

	/*
	 * For reads, get the OOB data. For writes, the data will be written
	 * later
	 */
	if (read)
		chip->read_buf(mtd, chip->oob_poi, mtd->oobsize);

	/* Flush DMA link list */
	lpc32xx_dma_flush_llist(host->dmach);

	if (__raw_readl(SLC_STAT(host->io_base)) & SLCSTAT_DMA_FIFO ||
		__raw_readl(SLC_TC(host->io_base))) {
		/* Something is left in the FIFO, something is wrong */
		dev_err(mtd->dev.parent, "DMA FIFO failure\n");
		status = -EIO;
	}

	if (dma_mapped)
		dma_unmap_single(mtd->dev.parent, buf_phy, mtd->writesize,
			read ? DMA_FROM_DEVICE : DMA_TO_DEVICE);
	else if (read)
		memcpy(buf, host->data_buf, mtd->writesize);

	/* Stop DMA & HW ECC */
	__raw_writel(__raw_readl(SLC_CTRL(host->io_base)) &
		~SLCCTRL_DMA_START, SLC_CTRL(host->io_base));
	__raw_writel(tmpreg, SLC_CFG(host->io_base));

	return status;
}
/***********************************************************************
 *
 * Function: i2s_dma_init_dev
 *
 * Purpose: Initialize DMA for I2S
 *
 * Processing:
 *     See function
 *
 * Parameters:
 *          dmach:  DMA Channel number
 *          p_i2s_dma_prms: dma parameters
 *
 *
 * Outputs: None
 *
 * Returns:	if ok returns TRUE
 *
 *
 **********************************************************************/
INT_32 i2s_dma_init_dev(INT_32 devid, I2S_DMA_PRMS_T *p_i2s_dma_prms)

{

  INT_32 DMAC_CHAN_DEST_WIDTH;
  INT_32 DMAC_CHAN_SRC_WIDTH ;
  INT_32 i2s_ww, i2sch;
  INT_32 dmach, dir, mem, sz;

  I2S_CFG_T *pi2s = (I2S_CFG_T *) devid;
  i2sch = pi2s->i2snum;
  i2s_ww = pi2s->i2s_w_sz ;

 
  dmach = p_i2s_dma_prms->dmach;
  dir   = p_i2s_dma_prms->dir;
  mem   = p_i2s_dma_prms->mem;
  sz    = p_i2s_dma_prms->sz;

  /* clear TC for the  selected dma channel */
  DMAC->int_tc_clear |= _SBF(0, dmach);

  /* Set the DMA src and dst word width based on I2S Word 
  width setting */
  if (i2s_ww == I2S_WW8)
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_8;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_8;
  }
  else if (i2s_ww == I2S_WW16)
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_16;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_16;
  }
  else
  {
    DMAC_CHAN_DEST_WIDTH = DMAC_CHAN_DEST_WIDTH_32;
    DMAC_CHAN_SRC_WIDTH = DMAC_CHAN_SRC_WIDTH_32;
  }

  /* Setup DMA for I2S Channel 0, DEST uses AHB1, SRC uses AHB0 */
  if (i2sch == I2S_CH0)
  {
    /* dma is flow controller */
    if (dir == DMAC_CHAN_FLOW_D_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S0->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA0)
                              | DMAC_CHAN_FLOW_D_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;

    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S0->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control =  DMAC_CHAN_SRC_BURST_4
                                       | DMAC_CHAN_DEST_BURST_4 
                                       | DMAC_CHAN_DEST_AHB1
                                       | DMAC_CHAN_SRC_AUTOINC 
                                       | DMAC_CHAN_INT_TC_EN
                                       | DMAC_CHAN_SRC_WIDTH 
                                       | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA0)
                              | DMAC_CHAN_FLOW_P_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;

    }
    /* dma is flow controller */
    else if (dir == DMAC_CHAN_FLOW_D_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S0->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA1)
                              | DMAC_CHAN_FLOW_D_P2M | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S0->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA1)
                              | DMAC_CHAN_FLOW_P_P2M | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
  }
  /* Setup DMA for I2S Channel 1 */
  else if (i2sch == I2S_CH1)
  {
    /* dma is flow controller */
    if (dir == DMAC_CHAN_FLOW_D_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S1->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA0)
                              | DMAC_CHAN_FLOW_D_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_M2P)
    {
      DMAC->dma_chan[dmach].src_addr = mem;
      DMAC->dma_chan[dmach].dest_addr = (UNS_32) & I2S1->i2s_tx_fifo;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_DEST_AHB1
                                      | DMAC_CHAN_SRC_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA0)
                              | DMAC_CHAN_FLOW_P_M2P | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
    /* dma is flow controller */
    else if (dir == DMAC_CHAN_FLOW_D_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S1->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_TRANSFER_SIZE(sz) 
                                      | DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA1)
                              | DMAC_CHAN_FLOW_D_P2M 
                              | DMAC_CHAN_IE
                              | DMAC_CHAN_ITC;
    }
    /* peripheral is flow controller */
    else if (dir == DMAC_CHAN_FLOW_P_P2M)
    {
      DMAC->dma_chan[dmach].src_addr = (UNS_32) & I2S1->i2s_rx_fifo;
      DMAC->dma_chan[dmach].dest_addr = mem;
      DMAC->dma_chan[dmach].control = DMAC_CHAN_SRC_BURST_4
                                      | DMAC_CHAN_DEST_BURST_4 
                                      | DMAC_CHAN_SRC_AHB1
                                      | DMAC_CHAN_DEST_AUTOINC 
                                      | DMAC_CHAN_INT_TC_EN
                                      | DMAC_CHAN_SRC_WIDTH 
                                      | DMAC_CHAN_DEST_WIDTH;

      DMAC->dma_chan[dmach].config_ch |= DMAC_CHAN_ENABLE 
                              | DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA1)
                              | DMAC_CHAN_FLOW_P_P2M 
                              | DMAC_CHAN_IE 
                              | DMAC_CHAN_ITC;
    }
  }

  else
  {
    return (FALSE);
  }

  return(TRUE);
} 
Ejemplo n.º 4
0
static int mmc_dma_setup(struct mmci_platform_data *plat)
{
	u32 llptrrx, llptrtx;
	int ret = 0;

	/*
	 * There is a quirk with the LPC32XX and SD burst DMA. DMA sg
	 * transfers where DMA is the flow controller will not transfer
	 * the last few bytes to or from the SD card controller and
	 * memory. For RX, the last few bytes in the SD transfer can be
	 * forced out with a software DMA burst request. For TX, this
	 * can't be done, so TX sg support cannot be supported. For TX,
	 * a temporary bouncing buffer is used if more than 1 sg segment
	 * is passed in the data request. The bouncing buffer will get a
	 * contiguous copy of the TX data and it will be used instead.
	 */

	if (plat->dma_tx_size) {
		/* Use pre-allocated memory for the DMA Tx buffer */
		dmac_drvdat.dma_handle_tx = (dma_addr_t)plat->dma_tx_v_base;
		dmac_drvdat.dma_v_base = plat->dma_tx_v_base;
		dmac_drvdat.preallocated_tx_buf = 1;
	} else {
		/* Allocate a chunk of memory for the DMA TX buffers */
		dmac_drvdat.dma_v_base = dma_alloc_coherent(dmac_drvdat.dev,
			DMA_BUFF_SIZE, &dmac_drvdat.dma_handle_tx, GFP_KERNEL);
		dmac_drvdat.preallocated_tx_buf = 0;
	}

	if (dmac_drvdat.dma_v_base == NULL) {
		dev_err(dmac_drvdat.dev, "error getting DMA region\n");
		ret = -ENOMEM;
		goto dma_no_tx_buff;
	}
	dev_info(dmac_drvdat.dev, "DMA buffer: phy:%p, virt:%p\n",
		(void *) dmac_drvdat.dma_handle_tx,
		dmac_drvdat.dma_v_base);

	/* Setup TX DMA channel */
	dmac_drvdat.dmacfgtx.ch = DMA_CH_SDCARD_TX;
	dmac_drvdat.dmacfgtx.tc_inten = 0;
	dmac_drvdat.dmacfgtx.err_inten = 0;
	dmac_drvdat.dmacfgtx.src_size = 4;
	dmac_drvdat.dmacfgtx.src_inc = 1;
	dmac_drvdat.dmacfgtx.src_bsize = DMAC_CHAN_SRC_BURST_8;
	dmac_drvdat.dmacfgtx.src_prph = DMAC_SRC_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgtx.dst_size = 4;
	dmac_drvdat.dmacfgtx.dst_inc = 0;
	dmac_drvdat.dmacfgtx.dst_bsize = DMAC_CHAN_DEST_BURST_8;
	dmac_drvdat.dmacfgtx.dst_prph = DMAC_DEST_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgtx.flowctrl = DMAC_CHAN_FLOW_P_M2P;
	if (lpc178x_dma_ch_get(
		&dmac_drvdat.dmacfgtx, "dma_sd_tx", NULL, NULL) < 0)
	{
		dev_err(dmac_drvdat.dev,
			"Error setting up SD card TX DMA channel\n");
		ret = -ENODEV;
		goto dma_no_txch;
	}

	/* Allocate a linked list for DMA support */
	llptrtx = lpc178x_dma_alloc_llist(
		dmac_drvdat.dmacfgtx.ch, NR_SG * 2);
	if (llptrtx == 0) {
		dev_err(dmac_drvdat.dev,
			"Error allocating list buffer (MMC TX)\n");
		ret = -ENOMEM;
		goto dma_no_txlist;
	}

	/* Setup RX DMA channel */
	dmac_drvdat.dmacfgrx.ch = DMA_CH_SDCARD_RX;
	dmac_drvdat.dmacfgrx.tc_inten = 0;
	dmac_drvdat.dmacfgrx.err_inten = 0;
	dmac_drvdat.dmacfgrx.src_size = 4;
	dmac_drvdat.dmacfgrx.src_inc = 0;
	dmac_drvdat.dmacfgrx.src_bsize = DMAC_CHAN_SRC_BURST_8;
	dmac_drvdat.dmacfgrx.src_prph = DMAC_SRC_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgrx.dst_size = 4;
	dmac_drvdat.dmacfgrx.dst_inc = 1;
	dmac_drvdat.dmacfgrx.dst_bsize = DMAC_CHAN_DEST_BURST_8;
	dmac_drvdat.dmacfgrx.dst_prph = DMAC_DEST_PERIP(DMA_PERID_SDCARD);
	dmac_drvdat.dmacfgrx.flowctrl = DMAC_CHAN_FLOW_D_P2M;
	if (lpc178x_dma_ch_get(
		&dmac_drvdat.dmacfgrx, "dma_sd_rx", NULL, NULL) < 0)
	{
		dev_err(dmac_drvdat.dev,
			"Error setting up SD card RX DMA channel\n");
		ret = -ENODEV;
		goto dma_no_rxch;
	}

	/* Allocate a linked list for DMA support */
	llptrrx = lpc178x_dma_alloc_llist(
		dmac_drvdat.dmacfgrx.ch, NR_SG * 2);
	if (llptrrx == 0) {
		dev_err(dmac_drvdat.dev,
			"Error allocating list buffer (MMC RX)\n");
		ret = -ENOMEM;
		goto dma_no_rxlist;
	}

	return 0;

dma_no_rxlist:
	lpc178x_dma_ch_put(dmac_drvdat.dmacfgrx.ch);
	dmac_drvdat.dmacfgrx.ch = -1;
dma_no_rxch:
	lpc178x_dma_dealloc_llist(dmac_drvdat.dmacfgtx.ch);
dma_no_txlist:
	lpc178x_dma_ch_put(dmac_drvdat.dmacfgtx.ch);
	dmac_drvdat.dmacfgtx.ch = -1;
dma_no_txch:
	if (!dmac_drvdat.preallocated_tx_buf) {
		dma_free_coherent(dmac_drvdat.dev, DMA_BUFF_SIZE,
			dmac_drvdat.dma_v_base,
			dmac_drvdat.dma_handle_tx);
	}
dma_no_tx_buff:
	return ret;
}
Ejemplo n.º 5
0
/***********************************************************************
 *
 * Function: sd0_cmd_interrupt
 *
 * Purpose: Default SD card command interrupt handler
 *
 * Processing:
 *     This function is called when an SD0 card interrupt is generated.
 *     This will save the command status and response if it is valid.
 *     The command state machine is cleared and any commands interrupts
 *     are cleared and masked. If the user defined command callback
 *     function exists, then it is called before exiting this function.
 *
 * Parameters: None
 *
 * Outputs: None
 *
 * Returns: Nothing
 *
 * Notes: The user callback function is called in interrupt context.
 *
 **********************************************************************/
static void sd0_cmd_interrupt(void)
{
  int idx;
  volatile UNS_32 tmp;
  UNS_32 cmd;

  /* Save status */
  sdcarddat.dctrl.resp.cmd_status = sdcarddat.regptr->sd_status;

  /* Save response */
  sdcarddat.dctrl.resp.cmd_resp [0] = sdcarddat.regptr->sd_respcmd;
  for (idx = 0; idx < 4; idx++)
  {
    sdcarddat.dctrl.resp.cmd_resp [idx + 1] =
      sdcarddat.regptr->sd_resp [idx];
  }

  /* Stop command state machine */
  sdcarddat.regptr->sd_cmd &= ~SD_CPST_EN;

  /* Clear pending command statuses */
  sdcarddat.regptr->sd_clear = (SD_CMD_CRC_FAIL | SD_CMD_TIMEOUT |
    SD_CMD_RESP_RECEIVED | SD_CMD_SENT);
  sdcarddat.regptr->sd_mask0 = 0;

  if (sdcarddat.dctrl.xferdat.dataop == SD_DATAOP_WRITE)
  {
    /* Start data state machine */
    sdcarddat.regptr->sd_dctrl |= SD_DATATRANSFER_EN;

    /* Clear FIFO conditions */
    sdcarddat.regptr->sd_clear = (SD_FIFO_TXDATA_HEMPTY |
      SD_DATABLK_END | SD_FIFO_TXDATA_UFLOW | SD_DATA_TIMEOUT |
                                  SD_DATA_CRC_FAIL);

    if (sdcarddat.dmact.dma_enabled == TRUE)
    {
      /* Set DMA receive handler */
      int_install_irq_handler(IRQ_SD1, (PFV) sd1_dmatx_interrupt);

      /* Setup FIFO control conditions to interrupt when data
         block has been received, start bit errors, data FIFO
         overflow, data timeout, or data CRC error */
      sdcarddat.regptr->sd_mask1 = (SD_DATA_END |
        SD_FIFO_TXDATA_UFLOW | SD_DATA_TIMEOUT |
                                    SD_DATA_CRC_FAIL);
      sdcarddat.regptr->sd_dctrl |= SD_DMA_EN;

      /* Setup DMA transfer */
      if (sdcarddat.dctrl.xferdat.buff != NULL)
      {
        /* Setup source */
        sdcarddat.dmact.pdmaregs->dma_chan [sdcarddat.
          dmact.dmach].src_addr = (UNS_32) sdcarddat.dctrl.xferdat.buff;
        sdcarddat.dmact.pdmaregs->dma_chan [sdcarddat.
                                            dmact.dmach].lli = 0;
        sdcarddat.dmact.pdmaregs->dma_chan [sdcarddat.
          dmact.dmach].dest_addr = (UNS_32) & SDCARD->sd_fifo;
        sdcarddat.dmact.pdmaregs->dma_chan [sdcarddat.
          dmact.dmach].control = (DMAC_CHAN_SRC_AUTOINC |
		  DMAC_CHAN_SRC_AHB1 | DMAC_CHAN_DEST_WIDTH_32 |
		  DMAC_CHAN_SRC_WIDTH_32 | DMAC_CHAN_DEST_BURST_8 |
		  DMAC_CHAN_SRC_BURST_8);
      }

      /* Set DMA initial control */
      sdcarddat.dmact.pdmaregs->sync |= DMA_PER_SDCARD;

      /* Setup DMA config and start DMA controller */
      sdcarddat.dmact.pdmaregs->dma_chan [sdcarddat.
        dmact.dmach].config_ch = (DMAC_CHAN_IE | DMAC_CHAN_FLOW_P_M2P |
		DMAC_DEST_PERIP(DMA_PERID_SDCARD) | DMAC_CHAN_ENABLE);
    }
    else
    {
      /* If not DMA, use FIFO mode */
      /* Setup FIFO control conditions to interrupt when FIFO is
         empty or half empty, data block has been trasmitted, data
         has ended, data underflow, or data CRC error */
      sdcarddat.regptr->sd_mask1 = (SD_FIFO_TXDATA_HEMPTY |
		  SD_DATABLK_END | SD_FIFO_TXDATA_UFLOW | SD_DATA_TIMEOUT |
                                    SD_DATA_CRC_FAIL);

      /* Enable DMA if needed */
      sdcarddat.regptr->sd_dctrl &= ~SD_DMA_EN;

      /* Fill transmit FIFO */
      while ((sdcarddat.regptr->sd_status &
              SD_FIFO_TXDATA_FULL) == 0)
      {
        sdcarddat.regptr->sd_fifo [0] =
          *sdcarddat.dctrl.xferdat.buff;
        sdcarddat.dctrl.xferdat.buff++;
        sdcarddat.dctrl.tosend -= 1;
      }
    }
  }

  /* If a pending command is being used to stop this transfer,
     then set it up now */
  if (sdcarddat.dctrl.xferdat.usependcmd == TRUE)
  {
    sdcarddat.dctrl.xferdat.usependcmd = FALSE;
    cmd = prep_cmd(&sdcarddat.dctrl.xferdat.pendcmd);
    sdcarddat.regptr->sd_cmd = (cmd | 0 |
                                SD_CMDPEND_WAIT);
  }
  else
  {
    /* Disable SDMMC interrupt for now */
    int_disable(IRQ_SD0);

    /* Call command callback function if it exists */
    if (sdcarddat.sd0_cb != NULL)
    {
      sdcarddat.sd0_cb();
    }
  }
}
Ejemplo n.º 6
0
static int lpc3xxx_pcm_prepare(struct snd_pcm_substream *substream)
{
	struct lpc3xxx_dma_data *prtd = substream->runtime->private_data;

	/* Setup DMA channel */
	if (prtd->dmach == -1) {
		if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
			prtd->dmach = DMA_CH_I2S_TX;
			prtd->dmacfg.ch = DMA_CH_I2S_TX;
			prtd->dmacfg.tc_inten = 1;
			prtd->dmacfg.err_inten = 1;
			prtd->dmacfg.src_size = 4;
			prtd->dmacfg.src_inc = 1;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.src_ahb1 = 1;
#endif
			prtd->dmacfg.src_bsize = DMAC_CHAN_SRC_BURST_4;
			prtd->dmacfg.src_prph = 0;
			prtd->dmacfg.dst_size = 4;
			prtd->dmacfg.dst_inc = 0;
			prtd->dmacfg.dst_bsize = DMAC_CHAN_DEST_BURST_4;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.dst_ahb1 = 0;
#endif

#if defined(CONFIG_SND_LPC32XX_USEI2S1)
			prtd->dmacfg.dst_prph = DMAC_DEST_PERIP(DMA_PERID_I2S1_DMA1);
#else
			prtd->dmacfg.dst_prph = DMAC_DEST_PERIP(DMA_PERID_I2S0_DMA1);
#endif
			prtd->dmacfg.flowctrl = DMAC_CHAN_FLOW_D_M2P;
			if (lpc32xx_dma_ch_get(&prtd->dmacfg, "dma_i2s_tx",
				&lpc3xxx_pcm_dma_irq, substream) < 0) {
				pr_debug(KERN_ERR "Error setting up I2S TX DMA channel\n");
				return -ENODEV;
			}

			/* Allocate a linked list for audio buffers */
			prtd->llptr = lpc32xx_dma_alloc_llist(prtd->dmach, NUMLINKS);
			if (prtd->llptr == 0) {
				lpc32xx_dma_ch_put(prtd->dmach);
				prtd->dmach = -1;
				pr_debug(KERN_ERR "Error allocating list buffer (I2S TX)\n");
				return -ENOMEM;
			}
		}
		else {
			prtd->dmach = DMA_CH_I2S_RX;
			prtd->dmacfg.ch = DMA_CH_I2S_RX;
			prtd->dmacfg.tc_inten = 1;
			prtd->dmacfg.err_inten = 1;
			prtd->dmacfg.src_size = 4;
			prtd->dmacfg.src_inc = 0;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.src_ahb1 = 1;
#endif
			prtd->dmacfg.src_bsize = DMAC_CHAN_SRC_BURST_4;
#if defined(CONFIG_SND_LPC32XX_USEI2S1)
			prtd->dmacfg.src_prph = DMAC_SRC_PERIP(DMA_PERID_I2S1_DMA0);
#else
			prtd->dmacfg.src_prph = DMAC_SRC_PERIP(DMA_PERID_I2S0_DMA0);
#endif
			prtd->dmacfg.dst_size = 4;
			prtd->dmacfg.dst_inc = 1;
#ifdef CONFIG_ARM_LPC32XX
			prtd->dmacfg.dst_ahb1 = 0;
#endif
			prtd->dmacfg.dst_bsize = DMAC_CHAN_DEST_BURST_4;
			prtd->dmacfg.dst_prph = 0;
			prtd->dmacfg.flowctrl = DMAC_CHAN_FLOW_D_P2M;
			if (lpc32xx_dma_ch_get(&prtd->dmacfg, "dma_i2s_rx",
				&lpc3xxx_pcm_dma_irq, substream) < 0) {
				pr_debug(KERN_ERR "Error setting up I2S RX DMA channel\n");
				return -ENODEV;
			}

			/* Allocate a linked list for audio buffers */
			prtd->llptr = lpc32xx_dma_alloc_llist(prtd->dmach, NUMLINKS);
			if (prtd->llptr == 0) {
				lpc32xx_dma_ch_put(prtd->dmach);
				prtd->dmach = -1;
				pr_debug(KERN_ERR "Error allocating list buffer (I2S RX)\n");
				return -ENOMEM;
			}
		}
	}

	return 0;
}