/* Queue up DMA descriptors and buffers for UART RX */ static void dmaRXQueue(void) { int i; /* Linked list of descriptors that map to the 3 receive buffers */ for (i = 0; i < UARTRXDESC; i++) { /* Setup next descriptor */ if (i == (UARTRXDESC - 1)) { /* Wrap descriptors */ dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[0]); } else { dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[i + 1]); } /* Create a descriptor for the data */ dmaRXDesc[i].source = DMA_ADDR(&LPC_USART0->RXDATA) + 0; /* Byte aligned */ dmaRXDesc[i].dest = DMA_ADDR(&dmaRXBuffs[i][0] + UARTRXBUFFSIZE - 1); /* Setup transfer configuration */ dmaRXDesc[i].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_RELOAD | DMA_XFERCFG_XFERCOUNT(UARTRXBUFFSIZE); } /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_RX, &dmaRXDesc[0]); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_RX, dmaRXDesc[0].xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_RX); Chip_DMA_SWTriggerChannel(LPC_DMA, DMAREQ_USART0_RX); }
/* Setup and start a DMA transfer */ static setupI2CDMAXfer(void *buff, uint8_t bytes, bool tx) { /* Enable DMA for I2C controller */ I2C_SENSOR_BUS->MSTCTL = I2C_MSTCTL_MSTDMA; /* Master to slave */ if (tx) { dmaI2CMDesc.source = DMA_ADDR(buff) + bytes - 1; dmaI2CMDesc.dest = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT); dmaI2CMDesc.next = DMA_ADDR(0); dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 | DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes); } else { dmaI2CMDesc.source = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT); dmaI2CMDesc.dest = DMA_ADDR(buff) + bytes - 1; dmaI2CMDesc.next = DMA_ADDR(0); dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_XFERCOUNT(bytes); } /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID, &dmaI2CMDesc); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, I2C_SENSOR_BUS_DMAID, dmaI2CMDesc.xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID); }
/** * @brief DMA Interrupt Handler * @return None */ void DMA_IRQHandler(void) { uint32_t errors, pending; /* Get DMA error and interrupt channels */ errors = Chip_DMA_GetErrorIntChannels(LPC_DMA); pending = Chip_DMA_GetActiveIntAChannels(LPC_DMA); /* Check DMA interrupts of UART 0 TX channel */ if ((errors | pending) & (1 << DMAREQ_USART0_TX)) { /* Clear DMA interrupt for the channel */ Chip_DMA_ClearActiveIntAChannel(LPC_DMA, DMAREQ_USART0_TX); /* Handle errors if needed */ if (errors & (1 << DMAREQ_USART0_TX)) { /* DMA error, channel needs to be reset */ dmaClearChannel(DMAREQ_USART0_TX); dmaTXSetup(); } else { /* Descriptor is consumed */ countTXDescUsed--; } /* Is another DMA descriptor waiting that was not chained? */ if (countTXDescUsed > 0) { nextTXDesc++; /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_TX, &dmaTXDesc[nextTXDesc]); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_TX, dmaTXDesc[nextTXDesc].xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_TX); } } /* Check DMA interrupts of UART 0 RX channel */ if ((errors | pending) & (1 << DMAREQ_USART0_RX)) { /* Clear DMA interrupt for the channel */ Chip_DMA_ClearActiveIntAChannel(LPC_DMA, DMAREQ_USART0_RX); /* Handle errors if needed */ if (errors & (1 << DMAREQ_USART0_RX)) { /* DMA error, channel needs to be reset */ dmaClearChannel(DMAREQ_USART0_RX); dmaRXSetup(); dmaRXQueue(); } else { uartRxAvail = true; } } }
/* Send data via the UART */ static bool dmaTXSend(uint8_t *data, int bytes) { /* Disable the DMA IRQ to prevent race conditions with shared data */ NVIC_DisableIRQ(DMA_IRQn); /* This is a limited example, limit descriptor and byte count */ if ((countTXDescUsed >= UARTTXDESC) || (bytes > 1024)) { /* Re-enable the DMA IRQ */ NVIC_EnableIRQ(DMA_IRQn); /* All DMA descriptors are used, so just exit */ return false; } else if (countTXDescUsed == 0) { /* No descriptors are currently used, so take the first one */ nextTXDesc = 0; } /* Create a descriptor for the data */ dmaTXDesc[countTXDescUsed].source = DMA_ADDR(data + bytes - 1); /* Last address here */ dmaTXDesc[countTXDescUsed].dest = DMA_ADDR(&LPC_USART0->TXDATA); /* Byte aligned */ /* If there are multiple buffers with non-contiguous addresses, they can be chained together here (it is recommended to only use the DMA_XFERCFG_SETINTA on the last chained descriptor). If another TX buffer needs to be sent, the DMA IRQ handler will re-queue and send the buffer there without using chaining. */ dmaTXDesc[countTXDescUsed].next = DMA_ADDR(0); /* Setup transfer configuration */ dmaTXDesc[countTXDescUsed].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 | DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes); /* If a transfer is currently in progress, then stop here and let the DMA handler re-queue the next transfer. Otherwise, start the transfer here. */ if (countTXDescUsed == 0) { /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_TX, &dmaTXDesc[countTXDescUsed]); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_TX, dmaTXDesc[countTXDescUsed].xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_TX); } /* Update used descriptor count */ countTXDescUsed++; /* Re-enable the DMA IRQ */ NVIC_EnableIRQ(DMA_IRQn); return true; }