static void a10dmac_free(device_t dev, void *priv) { struct a10dmac_channel *ch = priv; struct a10dmac_softc *sc = ch->ch_sc; uint32_t irqen, sta, cfg; mtx_lock_spin(&sc->sc_mtx); irqen = DMA_READ(sc, AWIN_DMA_IRQ_EN_REG); cfg = a10dmac_read_ctl(ch); if (ch->ch_type == CH_NDMA) { sta = AWIN_DMA_IRQ_NDMA_END(ch->ch_index); cfg &= ~AWIN_NDMA_CTL_DMA_LOADING; } else { sta = AWIN_DMA_IRQ_DDMA_END(ch->ch_index); cfg &= ~AWIN_DDMA_CTL_DMA_LOADING; } irqen &= ~sta; a10dmac_write_ctl(ch, cfg); DMA_WRITE(sc, AWIN_DMA_IRQ_EN_REG, irqen); DMA_WRITE(sc, AWIN_DMA_IRQ_PEND_STA_REG, sta); ch->ch_callback = NULL; ch->ch_callbackarg = NULL; mtx_unlock_spin(&sc->sc_mtx); }
static int a10dmac_attach(device_t dev) { struct a10dmac_softc *sc; unsigned int index; int error; sc = device_get_softc(dev); if (bus_alloc_resources(dev, a10dmac_spec, sc->sc_res)) { device_printf(dev, "cannot allocate resources for device\n"); return (ENXIO); } mtx_init(&sc->sc_mtx, "a10 dmac", NULL, MTX_SPIN); /* Activate DMA controller clock */ a10_clk_dmac_activate(); /* Disable all interrupts and clear pending status */ DMA_WRITE(sc, AWIN_DMA_IRQ_EN_REG, 0); DMA_WRITE(sc, AWIN_DMA_IRQ_PEND_STA_REG, ~0); /* Initialize channels */ for (index = 0; index < NDMA_CHANNELS; index++) { sc->sc_ndma_channels[index].ch_sc = sc; sc->sc_ndma_channels[index].ch_index = index; sc->sc_ndma_channels[index].ch_type = CH_NDMA; sc->sc_ndma_channels[index].ch_callback = NULL; sc->sc_ndma_channels[index].ch_callbackarg = NULL; sc->sc_ndma_channels[index].ch_regoff = AWIN_NDMA_REG(index); DMACH_WRITE(&sc->sc_ndma_channels[index], AWIN_NDMA_CTL_REG, 0); } for (index = 0; index < DDMA_CHANNELS; index++) { sc->sc_ddma_channels[index].ch_sc = sc; sc->sc_ddma_channels[index].ch_index = index; sc->sc_ddma_channels[index].ch_type = CH_DDMA; sc->sc_ddma_channels[index].ch_callback = NULL; sc->sc_ddma_channels[index].ch_callbackarg = NULL; sc->sc_ddma_channels[index].ch_regoff = AWIN_DDMA_REG(index); DMACH_WRITE(&sc->sc_ddma_channels[index], AWIN_DDMA_CTL_REG, 0); } error = bus_setup_intr(dev, sc->sc_res[1], INTR_MPSAFE | INTR_TYPE_MISC, NULL, a10dmac_intr, sc, &sc->sc_ih); if (error != 0) { device_printf(dev, "could not setup interrupt handler\n"); bus_release_resources(dev, a10dmac_spec, sc->sc_res); mtx_destroy(&sc->sc_mtx); return (ENXIO); } return (0); }
static void a10dmac_intr(void *priv) { struct a10dmac_softc *sc = priv; uint32_t sta, bit, mask; uint8_t index; sta = DMA_READ(sc, AWIN_DMA_IRQ_PEND_STA_REG); DMA_WRITE(sc, AWIN_DMA_IRQ_PEND_STA_REG, sta); while ((bit = ffs(sta & AWIN_DMA_IRQ_END_MASK)) != 0) { mask = (1U << (bit - 1)); sta &= ~mask; /* * Map status bit to channel number. The status register is * encoded with two bits of status per channel (lowest bit * is half transfer pending, highest bit is end transfer * pending). The 8 normal DMA channel status are in the lower * 16 bits and the 8 dedicated DMA channel status are in * the upper 16 bits. The output is a channel number from 0-7. */ index = ((bit - 1) / 2) & 7; if (mask & AWIN_DMA_IRQ_NDMA) { if (sc->sc_ndma_channels[index].ch_callback == NULL) continue; sc->sc_ndma_channels[index].ch_callback( sc->sc_ndma_channels[index].ch_callbackarg); } else { if (sc->sc_ddma_channels[index].ch_callback == NULL) continue; sc->sc_ddma_channels[index].ch_callback( sc->sc_ddma_channels[index].ch_callbackarg); } } }
static void * a10dmac_alloc(device_t dev, bool dedicated, void (*cb)(void *), void *cbarg) { struct a10dmac_softc *sc = device_get_softc(dev); struct a10dmac_channel *ch_list; struct a10dmac_channel *ch = NULL; uint32_t irqen; uint8_t ch_count, index; if (dedicated) { ch_list = sc->sc_ddma_channels; ch_count = DDMA_CHANNELS; } else { ch_list = sc->sc_ndma_channels; ch_count = NDMA_CHANNELS; } mtx_lock_spin(&sc->sc_mtx); for (index = 0; index < ch_count; index++) { if (ch_list[index].ch_callback == NULL) { ch = &ch_list[index]; ch->ch_callback = cb; ch->ch_callbackarg = cbarg; irqen = DMA_READ(sc, AWIN_DMA_IRQ_EN_REG); if (ch->ch_type == CH_NDMA) irqen |= AWIN_DMA_IRQ_NDMA_END(index); else irqen |= AWIN_DMA_IRQ_DDMA_END(index); DMA_WRITE(sc, AWIN_DMA_IRQ_EN_REG, irqen); break; } } mtx_unlock_spin(&sc->sc_mtx); return (ch); }