/* Setup and start a DMA transfer */ static setupI2CDMAXfer(void *buff, uint8_t bytes, bool tx) { /* Enable DMA for I2C controller */ I2C_SENSOR_BUS->MSTCTL = I2C_MSTCTL_MSTDMA; /* Master to slave */ if (tx) { dmaI2CMDesc.source = DMA_ADDR(buff) + bytes - 1; dmaI2CMDesc.dest = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT); dmaI2CMDesc.next = DMA_ADDR(0); dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 | DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes); } else { dmaI2CMDesc.source = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT); dmaI2CMDesc.dest = DMA_ADDR(buff) + bytes - 1; dmaI2CMDesc.next = DMA_ADDR(0); dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_XFERCOUNT(bytes); } /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID, &dmaI2CMDesc); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, I2C_SENSOR_BUS_DMAID, dmaI2CMDesc.xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID); }
/* Queue up DMA descriptors and buffers for UART RX */ static void dmaRXQueue(void) { int i; /* Linked list of descriptors that map to the 3 receive buffers */ for (i = 0; i < UARTRXDESC; i++) { /* Setup next descriptor */ if (i == (UARTRXDESC - 1)) { /* Wrap descriptors */ dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[0]); } else { dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[i + 1]); } /* Create a descriptor for the data */ dmaRXDesc[i].source = DMA_ADDR(&LPC_USART0->RXDATA) + 0; /* Byte aligned */ dmaRXDesc[i].dest = DMA_ADDR(&dmaRXBuffs[i][0] + UARTRXBUFFSIZE - 1); /* Setup transfer configuration */ dmaRXDesc[i].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_RELOAD | DMA_XFERCFG_XFERCOUNT(UARTRXBUFFSIZE); } /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_RX, &dmaRXDesc[0]); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_RX, dmaRXDesc[0].xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_RX); Chip_DMA_SWTriggerChannel(LPC_DMA, DMAREQ_USART0_RX); }
/* Send data via the UART */ static bool dmaTXSend(uint8_t *data, int bytes) { /* Disable the DMA IRQ to prevent race conditions with shared data */ NVIC_DisableIRQ(DMA_IRQn); /* This is a limited example, limit descriptor and byte count */ if ((countTXDescUsed >= UARTTXDESC) || (bytes > 1024)) { /* Re-enable the DMA IRQ */ NVIC_EnableIRQ(DMA_IRQn); /* All DMA descriptors are used, so just exit */ return false; } else if (countTXDescUsed == 0) { /* No descriptors are currently used, so take the first one */ nextTXDesc = 0; } /* Create a descriptor for the data */ dmaTXDesc[countTXDescUsed].source = DMA_ADDR(data + bytes - 1); /* Last address here */ dmaTXDesc[countTXDescUsed].dest = DMA_ADDR(&LPC_USART0->TXDATA); /* Byte aligned */ /* If there are multiple buffers with non-contiguous addresses, they can be chained together here (it is recommended to only use the DMA_XFERCFG_SETINTA on the last chained descriptor). If another TX buffer needs to be sent, the DMA IRQ handler will re-queue and send the buffer there without using chaining. */ dmaTXDesc[countTXDescUsed].next = DMA_ADDR(0); /* Setup transfer configuration */ dmaTXDesc[countTXDescUsed].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 | DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes); /* If a transfer is currently in progress, then stop here and let the DMA handler re-queue the next transfer. Otherwise, start the transfer here. */ if (countTXDescUsed == 0) { /* Setup transfer descriptor and validate it */ Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_TX, &dmaTXDesc[countTXDescUsed]); /* Setup data transfer */ Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_TX, dmaTXDesc[countTXDescUsed].xfercfg); Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_TX); } /* Update used descriptor count */ countTXDescUsed++; /* Re-enable the DMA IRQ */ NVIC_EnableIRQ(DMA_IRQn); return true; }
/* Update the transfer size in an existing DMA channel transfer configuration */ void Chip_DMA_SetupChannelTransferSize(LPC_DMA_T *pDMA, DMA_CHID_T ch, uint32_t trans) { Chip_DMA_ClearTranBits(pDMA, ch, (0x3FF << 16)); Chip_DMA_SetTranBits(pDMA, ch, DMA_XFERCFG_XFERCOUNT(trans)); }
// Magicoe OSStatus platform_spi_transfer( const platform_spi_t* spi, const platform_spi_config_t* config, const platform_spi_message_segment_t* segments, uint16_t number_of_segments ) OSStatus platform_spi_transfer( platform_spi_driver_t* driver, const platform_spi_config_t* config, const platform_spi_message_segment_t* segments, uint16_t number_of_segments ) { OSStatus err = kNoErr; uint32_t count = 0; uint32_t i; const platform_spi_message_segment_t *pSeg; uint32_t dmaXferLen; DMA_CHDESC_T *pTxDesc, *pRxDesc; LPC_SPI_T *pSPI; uint32_t dmaRxChnNdx, dmaTxChnNdx; const uint8_t *pcTx; uint8_t *pRx; require_action_quiet( ( driver != NULL ) && ( config != NULL ) && ( segments != NULL ) && ( number_of_segments != 0 ), exit, err = kParamErr); // save the driver pointer so that in DMA IRQ callback we can access its members platform_mcu_powersave_disable(); pSeg = segments; pTxDesc = (DMA_CHDESC_T *) g_pDMA->SRAMBASE + driver->peripheral->dmaTxChnNdx; pRxDesc = (DMA_CHDESC_T *) g_pDMA->SRAMBASE + driver->peripheral->dmaRxChnNdx; pSPI = driver->peripheral->port; if (pSPI == LPC_SPI0) s_pSPIDrvs[0] = driver; dmaRxChnNdx = driver->peripheral->dmaRxChnNdx , dmaTxChnNdx = driver->peripheral->dmaTxChnNdx; driver->xferErr = 0; /* Activate chip select */ platform_gpio_output_low( config->chip_select ); for ( i = 0; i < number_of_segments; i++, pSeg++ ) { // transfer one seg count = pSeg->length; if (0 == count) continue; pcTx = pSeg->tx_buffer , pRx = pSeg->rx_buffer; do { dmaXferLen = count > DMA_MAX_XFER_CNT ? DMA_MAX_XFER_CNT : count; count -= dmaXferLen; driver->isRxDone = driver->isTxDone = 0; #if 0 { if (pRx != 0) { pSPI->TXCTRL &= ~(1UL<<22); if (pSPI->STAT & SPI_STAT_RXRDY) pSPI->RXDAT; while (dmaXferLen--) { while (!(pSPI->STAT & SPI_STAT_TXRDY)); pSPI->TXDAT = *pcTx++; while (!(pSPI->STAT & SPI_STAT_RXRDY)); *pRx++ = (uint8_t) pSPI->RXDAT; } } else { pSPI->TXCTRL |= (1UL<<22); while (dmaXferLen--) { while (!(pSPI->STAT & SPI_STAT_TXRDY)); pSPI->TXDAT = *pcTx++; } } while (!(pSPI->STAT & SPI_STAT_TXRDY)); } #else pTxDesc->next = 0; pTxDesc->dest = DMA_ADDR(&pSPI->TXDAT); pTxDesc->source = DMA_ADDR(pcTx) + dmaXferLen - 1; pTxDesc->xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 | DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(dmaXferLen); if (pRx != 0) { pSPI->TXCTRL &= ~(1UL<<22); driver->isRx = 1; pRxDesc->next = 0; pRxDesc->source = DMA_ADDR(&pSPI->RXDAT); pRxDesc->dest = DMA_ADDR(pRx) + dmaXferLen - 1; pRxDesc->xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA | DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_XFERCOUNT(dmaXferLen); // start RX DMA g_pDMA->DMACH[dmaRxChnNdx].XFERCFG = pRxDesc->xfercfg; } else { driver->isRx = 0; pSPI->TXCTRL |= (1UL<<22); } // start TX DMA g_pDMA->DMACH[dmaTxChnNdx].XFERCFG = pTxDesc->xfercfg; #ifndef NO_MICO_RTOS mico_rtos_get_semaphore(&driver->sem_xfer_done, MICO_WAIT_FOREVER); #else while(1) { if (driver->isTxDone) { if (!driver->isRx || driver->isRxDone) break; } __WFI(); } #endif #endif if (driver->xferErr) { err = kGeneralErr; break; } // >>> update read and/or write pointers pcTx += dmaXferLen; if (pRx != 0) pRx += dmaXferLen; // <<< } while (count); } platform_gpio_output_high( config->chip_select ); exit: platform_mcu_powersave_enable( ); return err; }