Esempio n. 1
0
static void
a10dmac_write_ctl(struct a10dmac_channel *ch, uint32_t val)
{
	if (ch->ch_type == CH_NDMA) {
		DMACH_WRITE(ch, AWIN_NDMA_CTL_REG, val);
	} else {
		DMACH_WRITE(ch, AWIN_DDMA_CTL_REG, val);
	}
}
Esempio n. 2
0
static int
a10dmac_attach(device_t dev)
{
	struct a10dmac_softc *sc;
	unsigned int index;
	int error;

	sc = device_get_softc(dev);

	if (bus_alloc_resources(dev, a10dmac_spec, sc->sc_res)) {
		device_printf(dev, "cannot allocate resources for device\n");
		return (ENXIO);
	}

	mtx_init(&sc->sc_mtx, "a10 dmac", NULL, MTX_SPIN);

	/* Activate DMA controller clock */
	a10_clk_dmac_activate();

	/* Disable all interrupts and clear pending status */
	DMA_WRITE(sc, AWIN_DMA_IRQ_EN_REG, 0);
	DMA_WRITE(sc, AWIN_DMA_IRQ_PEND_STA_REG, ~0);

	/* Initialize channels */
	for (index = 0; index < NDMA_CHANNELS; index++) {
		sc->sc_ndma_channels[index].ch_sc = sc;
		sc->sc_ndma_channels[index].ch_index = index;
		sc->sc_ndma_channels[index].ch_type = CH_NDMA;
		sc->sc_ndma_channels[index].ch_callback = NULL;
		sc->sc_ndma_channels[index].ch_callbackarg = NULL;
		sc->sc_ndma_channels[index].ch_regoff = AWIN_NDMA_REG(index);
		DMACH_WRITE(&sc->sc_ndma_channels[index], AWIN_NDMA_CTL_REG, 0);
	}
	for (index = 0; index < DDMA_CHANNELS; index++) {
		sc->sc_ddma_channels[index].ch_sc = sc;
		sc->sc_ddma_channels[index].ch_index = index;
		sc->sc_ddma_channels[index].ch_type = CH_DDMA;
		sc->sc_ddma_channels[index].ch_callback = NULL;
		sc->sc_ddma_channels[index].ch_callbackarg = NULL;
		sc->sc_ddma_channels[index].ch_regoff = AWIN_DDMA_REG(index);
		DMACH_WRITE(&sc->sc_ddma_channels[index], AWIN_DDMA_CTL_REG, 0);
	}

	error = bus_setup_intr(dev, sc->sc_res[1], INTR_MPSAFE | INTR_TYPE_MISC,
	    NULL, a10dmac_intr, sc, &sc->sc_ih);
	if (error != 0) {
		device_printf(dev, "could not setup interrupt handler\n");
		bus_release_resources(dev, a10dmac_spec, sc->sc_res);
		mtx_destroy(&sc->sc_mtx);
		return (ENXIO);
	}

	return (0);
}
Esempio n. 3
0
static int
a10dmac_transfer(device_t dev, void *priv, bus_addr_t src, bus_addr_t dst,
    size_t nbytes)
{
	struct a10dmac_channel *ch = priv;
	uint32_t cfg;

	cfg = a10dmac_read_ctl(ch);
	if (ch->ch_type == CH_NDMA) {
		if (cfg & AWIN_NDMA_CTL_DMA_LOADING)
			return (EBUSY);

		DMACH_WRITE(ch, AWIN_NDMA_SRC_ADDR_REG, src);
		DMACH_WRITE(ch, AWIN_NDMA_DEST_ADDR_REG, dst);
		DMACH_WRITE(ch, AWIN_NDMA_BC_REG, nbytes);

		cfg |= AWIN_NDMA_CTL_DMA_LOADING;
		a10dmac_write_ctl(ch, cfg);
	} else {
		if (cfg & AWIN_DDMA_CTL_DMA_LOADING)
			return (EBUSY);

		DMACH_WRITE(ch, AWIN_DDMA_SRC_START_ADDR_REG, src);
		DMACH_WRITE(ch, AWIN_DDMA_DEST_START_ADDR_REG, dst);
		DMACH_WRITE(ch, AWIN_DDMA_BC_REG, nbytes);

		cfg |= AWIN_DDMA_CTL_DMA_LOADING;
		a10dmac_write_ctl(ch, cfg);
	}

	return (0);
}
Esempio n. 4
0
static int
a10dmac_set_config(device_t dev, void *priv, const struct sunxi_dma_config *cfg)
{
	struct a10dmac_channel *ch = priv;
	uint32_t val;
	unsigned int dst_dw, dst_bl, dst_bs, dst_wc, dst_am;
	unsigned int src_dw, src_bl, src_bs, src_wc, src_am;

	switch (cfg->dst_width) {
	case 8:
		dst_dw = AWIN_DMA_CTL_DATA_WIDTH_8;
		break;
	case 16:
		dst_dw = AWIN_DMA_CTL_DATA_WIDTH_16;
		break;
	case 32:
		dst_dw = AWIN_DMA_CTL_DATA_WIDTH_32;
		break;
	default:
		return (EINVAL);
	}
	switch (cfg->dst_burst_len) {
	case 1:
		dst_bl = AWIN_DMA_CTL_BURST_LEN_1;
		break;
	case 4:
		dst_bl = AWIN_DMA_CTL_BURST_LEN_4;
		break;
	case 8:
		dst_bl = AWIN_DMA_CTL_BURST_LEN_8;
		break;
	default:
		return (EINVAL);
	}
	switch (cfg->src_width) {
	case 8:
		src_dw = AWIN_DMA_CTL_DATA_WIDTH_8;
		break;
	case 16:
		src_dw = AWIN_DMA_CTL_DATA_WIDTH_16;
		break;
	case 32:
		src_dw = AWIN_DMA_CTL_DATA_WIDTH_32;
		break;
	default:
		return (EINVAL);
	}
	switch (cfg->src_burst_len) {
	case 1:
		src_bl = AWIN_DMA_CTL_BURST_LEN_1;
		break;
	case 4:
		src_bl = AWIN_DMA_CTL_BURST_LEN_4;
		break;
	case 8:
		src_bl = AWIN_DMA_CTL_BURST_LEN_8;
		break;
	default:
		return (EINVAL);
	}

	val = (dst_dw << AWIN_DMA_CTL_DST_DATA_WIDTH_SHIFT) |
	      (dst_bl << AWIN_DMA_CTL_DST_BURST_LEN_SHIFT) |
	      (cfg->dst_drqtype << AWIN_DMA_CTL_DST_DRQ_TYPE_SHIFT) |
	      (src_dw << AWIN_DMA_CTL_SRC_DATA_WIDTH_SHIFT) |
	      (src_bl << AWIN_DMA_CTL_SRC_BURST_LEN_SHIFT) |
	      (cfg->src_drqtype << AWIN_DMA_CTL_SRC_DRQ_TYPE_SHIFT);

	if (ch->ch_type == CH_NDMA) {
		if (cfg->dst_noincr)
			val |= AWIN_NDMA_CTL_DST_ADDR_NOINCR;
		if (cfg->src_noincr)
			val |= AWIN_NDMA_CTL_SRC_ADDR_NOINCR;

		DMACH_WRITE(ch, AWIN_NDMA_CTL_REG, val);
	} else {
		dst_am = cfg->dst_noincr ? AWIN_DDMA_CTL_DMA_ADDR_IO :
		    AWIN_DDMA_CTL_DMA_ADDR_LINEAR;
		src_am = cfg->src_noincr ? AWIN_DDMA_CTL_DMA_ADDR_IO :
		    AWIN_DDMA_CTL_DMA_ADDR_LINEAR;

		val |= (dst_am << AWIN_DDMA_CTL_DST_ADDR_MODE_SHIFT);
		val |= (src_am << AWIN_DDMA_CTL_SRC_ADDR_MODE_SHIFT);

		DMACH_WRITE(ch, AWIN_DDMA_CTL_REG, val);

		dst_bs = cfg->dst_blksize - 1;
		dst_wc = cfg->dst_wait_cyc - 1;
		src_bs = cfg->src_blksize - 1;
		src_wc = cfg->src_wait_cyc - 1;

		DMACH_WRITE(ch, AWIN_DDMA_PARA_REG,
		    (dst_bs << AWIN_DDMA_PARA_DST_DATA_BLK_SIZ_SHIFT) |
		    (dst_wc << AWIN_DDMA_PARA_DST_WAIT_CYC_SHIFT) |
		    (src_bs << AWIN_DDMA_PARA_SRC_DATA_BLK_SIZ_SHIFT) |
		    (src_wc << AWIN_DDMA_PARA_SRC_WAIT_CYC_SHIFT));
	}

	return (0);
}