/** * md_mic_dma_request_chan * @owner: DMA channel owner: MIC or Host * * Return - The DMA channel handle or NULL if failed * * Note: Allocating a Host owned channel is not allowed currently */ struct md_mic_dma_chan *md_mic_dma_request_chan(struct mic_dma_device *dma_dev, enum md_mic_dma_chan_owner owner) { struct md_mic_dma_chan *tmp = NULL; int i; for (i = 0; i < MAX_NUM_DMA_CHAN; i++) { if (CHAN_AVAILABLE == atomic_cmpxchg(&(dma_dev->chan_info[i].in_use), CHAN_AVAILABLE, CHAN_INUSE)) { tmp = &dma_dev->chan_info[i]; tmp->owner = owner; tmp->ch_num = i; /* * Setting endianness by default to MIC_LITTLE_ENDIAN * in case the AES channel is used for clear transfers * This is a don't care for clear transfers. */ tmp->endianness = MIC_LITTLE_ENDIAN; #ifdef _MIC_SCIF_ md_mic_dma_chan_init_attr(dma_dev, tmp); #endif break; } } return tmp; }
/* * dma_resume: DMA tasks after wake up from low power state. * @dma_handle: Handle for a DMA driver context. * * Performs the following tasks before the device transitions * from a low power state to active state: * 1) As a test, reset the value in DMA configuration register. * 2) Reset the next_write_index for the DMA descriptor ring to 0 * since the DMA channel will be reset shortly. * 3) Reinitialize the DMA MD layer for the channel. * * Return: none * Notes: * Notes: Invoked only on MIC. */ void dma_resume(mic_dma_handle_t dma_handle) { int i; struct dma_channel *ch; struct mic_dma_ctx_t *dma_ctx = (struct mic_dma_ctx_t *)dma_handle; struct mic_dma_device *dma_dev = &dma_ctx->dma_dev; /* TODO: Remove test write to SBOX_DCR */ mic_sbox_write_mmio(dma_dev->mm_sbox, SBOX_DCR, 0); for (i = 0; i < MAX_NUM_DMA_CHAN; i++) { ch = &dma_ctx->dma_channels[i]; ch->next_write_index = 0; md_mic_dma_chan_init_attr(dma_dev, ch->chan); md_mic_dma_chan_setup(dma_ctx, ch); } }