void
bcm_dmac_free(struct bcm_dmac_channel *ch)
{
	struct bcm_dmac_softc *sc = ch->ch_sc;
	uint32_t val;

	bcm_dmac_halt(ch);

	val = DMAC_READ(sc, DMAC_CS(ch->ch_index));
	val |= DMAC_CS_RESET;
	val |= DMAC_CS_ABORT;
	val &= ~DMAC_CS_ACTIVE;
	DMAC_WRITE(sc, DMAC_CS(ch->ch_index), val);

	mutex_enter(&sc->sc_lock);
	intr_disestablish(ch->ch_ih);
	ch->ch_ih = NULL;
	ch->ch_callback = NULL;
	ch->ch_callbackarg = NULL;
	mutex_exit(&sc->sc_lock);
}
예제 #2
0
static int
bcmemmc_xfer_data_dma(struct sdhc_softc *sdhc_sc, struct sdmmc_command *cmd)
{
	struct bcmemmc_softc * const sc = device_private(sdhc_sc->sc_dev);
	kmutex_t *plock = sdhc_host_lock(sc->sc_hosts[0]);
	size_t seg;
	int error;

	KASSERT(mutex_owned(plock));

	for (seg = 0; seg < cmd->c_dmamap->dm_nsegs; seg++) {
		sc->sc_cblk[seg].cb_ti =
		    __SHIFTIN(11, DMAC_TI_PERMAP); /* e.MMC */
		sc->sc_cblk[seg].cb_txfr_len =
		    cmd->c_dmamap->dm_segs[seg].ds_len;
		/*
		 * All transfers are assumed to be multiples of 32-bits.
		 */
		KASSERTMSG((sc->sc_cblk[seg].cb_txfr_len & 0x3) == 0,
		    "seg %zu len %d", seg, sc->sc_cblk[seg].cb_txfr_len);
		if (ISSET(cmd->c_flags, SCF_CMD_READ)) {
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_DEST_INC;
			/*
			 * Use 128-bit mode if transfer is a multiple of
			 * 16-bytes.
			 */
			if ((sc->sc_cblk[seg].cb_txfr_len & 0xf) == 0)
				sc->sc_cblk[seg].cb_ti |= DMAC_TI_DEST_WIDTH;
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_SRC_DREQ;
			sc->sc_cblk[seg].cb_source_ad =
			    BCM2835_PERIPHERALS_TO_BUS(sc->sc_physaddr +
			    SDHC_DATA);
			sc->sc_cblk[seg].cb_dest_ad =
			    cmd->c_dmamap->dm_segs[seg].ds_addr;
		} else {
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_SRC_INC;
			/*
			 * Use 128-bit mode if transfer is a multiple of
			 * 16-bytes.
			 */
			if ((sc->sc_cblk[seg].cb_txfr_len & 0xf) == 0)
				sc->sc_cblk[seg].cb_ti |= DMAC_TI_SRC_WIDTH;
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_DEST_DREQ;
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_WAIT_RESP;
			sc->sc_cblk[seg].cb_source_ad =
			    cmd->c_dmamap->dm_segs[seg].ds_addr;
			sc->sc_cblk[seg].cb_dest_ad =
			    BCM2835_PERIPHERALS_TO_BUS(sc->sc_physaddr +
			    SDHC_DATA);
		}
		sc->sc_cblk[seg].cb_stride = 0;
		if (seg == cmd->c_dmamap->dm_nsegs - 1) {
			sc->sc_cblk[seg].cb_ti |= DMAC_TI_INTEN;
			sc->sc_cblk[seg].cb_nextconbk = 0;
		} else {
			sc->sc_cblk[seg].cb_nextconbk =
			    sc->sc_dmamap->dm_segs[0].ds_addr +
			    sizeof(struct bcm_dmac_conblk) * (seg+1);
		}
		sc->sc_cblk[seg].cb_padding[0] = 0;
		sc->sc_cblk[seg].cb_padding[1] = 0;
	}

	bus_dmamap_sync(sc->sc.sc_dmat, sc->sc_dmamap, 0,
	    sc->sc_dmamap->dm_mapsize, BUS_DMASYNC_PREWRITE);

	error = 0;

	KASSERT(sc->sc_state == EMMC_DMA_STATE_IDLE);
	sc->sc_state = EMMC_DMA_STATE_BUSY;
	bcm_dmac_set_conblk_addr(sc->sc_dmac,
	    sc->sc_dmamap->dm_segs[0].ds_addr);
	error = bcm_dmac_transfer(sc->sc_dmac);
	if (error)
		return error;

	while (sc->sc_state == EMMC_DMA_STATE_BUSY) {
		error = cv_timedwait(&sc->sc_cv, plock, hz * 10);
		if (error == EWOULDBLOCK) {
			device_printf(sc->sc.sc_dev, "transfer timeout!\n");
			bcm_dmac_halt(sc->sc_dmac);
			sc->sc_state = EMMC_DMA_STATE_IDLE;
			error = ETIMEDOUT;
			break;
		}
	}

	bus_dmamap_sync(sc->sc.sc_dmat, sc->sc_dmamap, 0,
	    sc->sc_dmamap->dm_mapsize, BUS_DMASYNC_POSTWRITE);

	return error;
}