Esempio n. 1
0
/* Queue up DMA descriptors and buffers for UART RX */
static void dmaRXQueue(void)
{
	int i;

	/* Linked list of descriptors that map to the 3 receive buffers */
	for (i = 0; i < UARTRXDESC; i++) {
		/* Setup next descriptor */
		if (i == (UARTRXDESC - 1)) {
			/* Wrap descriptors */
			dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[0]);
		}
		else {
			dmaRXDesc[i].next = DMA_ADDR(&dmaRXDesc[i + 1]);
		}

		/* Create a descriptor for the data */
		dmaRXDesc[i].source = DMA_ADDR(&LPC_USART0->RXDATA) + 0;	/* Byte aligned */
		dmaRXDesc[i].dest = DMA_ADDR(&dmaRXBuffs[i][0] + UARTRXBUFFSIZE - 1);

		/* Setup transfer configuration */
		dmaRXDesc[i].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
							   DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 |
							   DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_RELOAD |
							   DMA_XFERCFG_XFERCOUNT(UARTRXBUFFSIZE);
	}

	/* Setup transfer descriptor and validate it */
	Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_RX, &dmaRXDesc[0]);

	/* Setup data transfer */
	Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_RX,
								  dmaRXDesc[0].xfercfg);
	Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_RX);
	Chip_DMA_SWTriggerChannel(LPC_DMA, DMAREQ_USART0_RX);
}
Esempio n. 2
0
/* Send data via the UART */
static bool dmaTXSend(uint8_t *data, int bytes)
{
	/* Disable the DMA IRQ to prevent race conditions with shared data */
	NVIC_DisableIRQ(DMA_IRQn);

	/* This is a limited example, limit descriptor and byte count */
	if ((countTXDescUsed >= UARTTXDESC) || (bytes > 1024)) {
		/* Re-enable the DMA IRQ */
		NVIC_EnableIRQ(DMA_IRQn);

		/* All DMA descriptors are used, so just exit */
		return false;
	}
	else if (countTXDescUsed == 0) {
		/* No descriptors are currently used, so take the first one */
		nextTXDesc = 0;
	}

	/* Create a descriptor for the data */
	dmaTXDesc[countTXDescUsed].source = DMA_ADDR(data + bytes - 1);	/* Last address here */
	dmaTXDesc[countTXDescUsed].dest = DMA_ADDR(&LPC_USART0->TXDATA);	/* Byte aligned */

	/* If there are multiple buffers with non-contiguous addresses, they can be chained
	   together here (it is recommended to only use the DMA_XFERCFG_SETINTA on the
	   last chained descriptor). If another TX buffer needs to be sent, the DMA
	   IRQ handler will re-queue and send the buffer there without using chaining. */
	dmaTXDesc[countTXDescUsed].next = DMA_ADDR(0);

	/* Setup transfer configuration */
	dmaTXDesc[countTXDescUsed].xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
										 DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 |
										 DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes);

	/* If a transfer is currently in progress, then stop here and let the DMA
	   handler re-queue the next transfer. Otherwise, start the transfer here. */
	if (countTXDescUsed == 0) {
		/* Setup transfer descriptor and validate it */
		Chip_DMA_SetupTranChannel(LPC_DMA, DMAREQ_USART0_TX, &dmaTXDesc[countTXDescUsed]);

		/* Setup data transfer */
		Chip_DMA_SetupChannelTransfer(LPC_DMA, DMAREQ_USART0_TX,
									  dmaTXDesc[countTXDescUsed].xfercfg);

		Chip_DMA_SetValidChannel(LPC_DMA, DMAREQ_USART0_TX);
	}

	/* Update used descriptor count */
	countTXDescUsed++;

	/* Re-enable the DMA IRQ */
	NVIC_EnableIRQ(DMA_IRQn);

	return true;
}
Esempio n. 3
0
/* Setup and start a DMA transfer */
static setupI2CDMAXfer(void *buff, uint8_t bytes, bool tx)
{
	/* Enable DMA for I2C controller */
	I2C_SENSOR_BUS->MSTCTL = I2C_MSTCTL_MSTDMA;

	/* Master to slave */
	if (tx) {
		dmaI2CMDesc.source = DMA_ADDR(buff) + bytes - 1;
		dmaI2CMDesc.dest = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT);
		dmaI2CMDesc.next = DMA_ADDR(0);
		dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
						 DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 |
						 DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(bytes);
	}
	else {
		dmaI2CMDesc.source = DMA_ADDR(&I2C_SENSOR_BUS->MSTDAT);
		dmaI2CMDesc.dest = DMA_ADDR(buff) + bytes - 1;
		dmaI2CMDesc.next = DMA_ADDR(0);
		dmaI2CMDesc.xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
						 DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_0 |
						 DMA_XFERCFG_DSTINC_1 | DMA_XFERCFG_XFERCOUNT(bytes);
	}

	/* Setup transfer descriptor and validate it */
	Chip_DMA_SetupTranChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID, &dmaI2CMDesc);

	/* Setup data transfer */
	Chip_DMA_SetupChannelTransfer(LPC_DMA, I2C_SENSOR_BUS_DMAID, dmaI2CMDesc.xfercfg);

	Chip_DMA_SetValidChannel(LPC_DMA, I2C_SENSOR_BUS_DMAID);
}
Esempio n. 4
0
File: xdma.c Progetto: Limius/cox
//! This function dynamic allocate a channel according the DMA requests.
//! The \ref ulDMASrcRequest and \ref ulDMADestRequest can be:
//! - \ref DMA_REQUEST_ADC0_RX
//! - \ref DMA_REQUEST_TIM2_CH3
//! - \ref DMA_REQUEST_TIM4_CH1
//! - \ref DMA_REQUEST_UART3_TX
//! - \ref DMA_REQUEST_TIM1_CH1
//! - \ref DMA_REQUEST_TIM2_UP
//! - \ref DMA_REQUEST_TIM3_CH3
//! - \ref DMA_REQUEST_SPI1_RX
//! - \ref DMA_REQUEST_UART3_RX
//! - \ref DMA_REQUEST_TIM1_CH2
//! - \ref DMA_REQUEST_TIM3_CH4
//! - \ref DMA_REQUEST_TIM3_UP
//! - others refrence \ref STM32F1xx_DMA_Request_Connections
//! .
//!
//! \note ulDMASrcRequest can only be XX_RX and TIMx_CHx
//! ulDMADestRequest can only be XX_TX,TIMx_TRIG,TIMx_COM and TIMx_UP.
//!
//! \return Returns a Channel ID that dynamic assignment.
//! The channel ID can be:
//! - DMA1_CHANNEL_1
//! - DMA1_CHANNEL_2
//! - others refrence \ref STM32F1xx_DMA_Channel_IDs
//! .
//
//*****************************************************************************
unsigned long 
DMAChannelDynamicAssign(unsigned long ulDMASrcRequest,    
                         unsigned long ulDMADestRequest)
{
    unsigned long ulChannelID;
    //
    // Check the arguments.
    //
    xASSERT((ulDMASrcRequest == DMA_REQUEST_ADC0_RX) || 
           (ulDMASrcRequest == DMA_REQUEST_TIM2_CH3) || 
           (ulDMASrcRequest == DMA_REQUEST_TIM4_CH1) || 
           (ulDMASrcRequest == xDMA_REQUEST_MEM) || 
           (ulDMASrcRequest == DMA_REQUEST_TIM1_CH1)  ||     
           (ulDMASrcRequest == DMA_REQUEST_TIM3_CH3)  ||
           (ulDMASrcRequest == DMA_REQUEST_SPI1_RX)  ||     
           (ulDMASrcRequest == DMA_REQUEST_TIM3_CH4)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM1_CH4)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM4_CH2)  ||
           (ulDMASrcRequest == DMA_REQUEST_SPI2_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_I2S2_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM2_CH1)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM4_CH3)  ||
           (ulDMASrcRequest == DMA_REQUEST_I2C2_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM1_CH3)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM3_CH1)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM2_CH2)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM2_CH4)  ||
           (ulDMASrcRequest == DMA_REQUEST_I2C1_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM5_CH4)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM8_CH3)  ||
           (ulDMASrcRequest == DMA_REQUEST_SPI3_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_I2S3_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM8_CH4)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM5_CH3)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM8_CH1)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM5_CH2)  ||
           (ulDMASrcRequest == DMA_REQUEST_SDIO_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_ADC3_RX)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM8_CH2)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM5_CH1)  ||
           (ulDMASrcRequest == DMA_REQUEST_TIM1_CH2)     
           );
    xASSERT((ulDMADestRequest == DMA_REQUEST_UART3_TX) || 
           (ulDMADestRequest == DMA_REQUEST_TIM1_UP) ||  
           (ulDMADestRequest == xDMA_REQUEST_MEM) || 
           (ulDMADestRequest == DMA_REQUEST_TIM3_UP)  ||     
           (ulDMADestRequest == DMA_REQUEST_SPI1_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_UART1_TX)  ||     
           (ulDMADestRequest == DMA_REQUEST_TIM1_TRIG)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM1_COM)  ||
           (ulDMADestRequest == DMA_REQUEST_I2C2_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM1_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_SPI2_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_I2S2_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM3_TRIG)  ||
           (ulDMADestRequest == DMA_REQUEST_I2C1_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_UART2_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM4_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM5_TRIG)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM8_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM8_TRIG)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM8_COM)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM5_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_SPI3_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_I2S3_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM6_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_DAC_CH1)  ||
           (ulDMADestRequest == DMA_REQUEST_SDIO_TX)  ||
           (ulDMADestRequest == DMA_REQUEST_TIM7_UP)  ||
           (ulDMADestRequest == DMA_REQUEST_DAC_CH2)  ||
           (ulDMADestRequest == DMA_REQUEST_UART4_TX)     
           );

    //
    // STM32F1xx DMA support P to P
    //
    if((ulDMASrcRequest != xDMA_REQUEST_MEM) &&
       (ulDMADestRequest != xDMA_REQUEST_MEM))
    {
        if(g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].bChannelAssigned == 
            xfalse)
        {  
            g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].bChannelAssigned = 
            xtrue;
            xHWREG(DMA_ADDR(ulDMASrcRequest)) &= 
            ~(DMA_CCR1_DIR | DMA_CCR1_MEM2MEM);
            xHWREG(DMA_ADDR(ulDMASrcRequest)) |= 
            DMA_CCR1_DIR;
            return g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].ulChannelID;
        }
        else
        {
            return xDMA_CHANNEL_NOT_EXIST;
        }
    }
    
    if((ulDMASrcRequest == xDMA_REQUEST_MEM) && (ulDMADestRequest & 0x00000100))
    {
        if(g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMADestRequest)].bChannelAssigned == 
            xfalse)
        {  
            g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMADestRequest)].bChannelAssigned = 
            xtrue;
            xHWREG(DMA_ADDR(ulDMADestRequest)) &= 
            ~(DMA_CCR1_DIR | DMA_CCR1_MEM2MEM);
            xHWREG(DMA_ADDR(ulDMADestRequest)) |= 
            DMA_CCR1_DIR;
            return g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMADestRequest)].ulChannelID;
        }
        else
        {
            return xDMA_CHANNEL_NOT_EXIST;
        }
    }
    if((ulDMADestRequest == xDMA_REQUEST_MEM) && (ulDMASrcRequest != xDMA_REQUEST_MEM) &&
       !(ulDMASrcRequest & 0x00000100))
    {
        if(g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].bChannelAssigned == 
            xfalse)
        {  
            g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].bChannelAssigned = 
            xtrue;
            xHWREG(DMA_ADDR(ulDMASrcRequest)) &= ~(DMA_CCR1_DIR | DMA_CCR1_MEM2MEM);

            return g_psDMAChannelAssignTable[DMA_CHANNEL(ulDMASrcRequest)].ulChannelID;
        }
        else
        {
            return xDMA_CHANNEL_NOT_EXIST;
        }
    }
    
    //
    // Mem to Mem type
    //
    if((ulDMASrcRequest & xDMA_REQUEST_MEM) &&
       (ulDMADestRequest & xDMA_REQUEST_MEM))
    {
        for(ulChannelID = 0; 
            g_psDMAChannelAssignTable[ulChannelID].ulChannelID != xDMA_CHANNEL_NOT_EXIST;
            ulChannelID++)
        {
            if(g_psDMAChannelAssignTable[ulChannelID].bChannelAssigned == xfalse)
            {
                g_psDMAChannelAssignTable[ulChannelID].bChannelAssigned = xtrue;
                break;
            }
        }
        xHWREG(g_psDMAChannel[ulChannelID]) &= ~(DMA_CCR1_DIR | DMA_CCR1_MEM2MEM);
        xHWREG(g_psDMAChannel[ulChannelID]) |= (DMA_CCR1_MEM2MEM);
        return g_psDMAChannelAssignTable[ulChannelID].ulChannelID;
        
    }

    //
    // when the src request is tx type, or dest request is rx type, assign false.
    //
    return xDMA_CHANNEL_NOT_EXIST;
}
/* Send data via the UART */
static bool dmaTXSend(uint8_t *data, int bytes)
{
	/* Disable the DMA IRQ to prevent race conditions with shared data */
	NVIC_DisableIRQ(DMA_IRQn);

	/* This is a limited example, limit descriptor and byte count */
	if ((countTXDescUsed >= UARTTXDESC) || (bytes > 1024)) {
		/* Re-enable the DMA IRQ */
		NVIC_EnableIRQ(DMA_IRQn);

		/* All DMA descriptors are used, so just exit */
		return false;
	}
	else if (countTXDescUsed == 0) {
		/* No descriptors are currently used, so take the first one */
		nextTXDesc = 0;
	}

	/* Create a descriptor for the data */
	dmaTXDesc[countTXDescUsed].source = (uint32_t) (data + bytes - 1);	/* Last address here */
	dmaTXDesc[countTXDescUsed].dest = (uint32_t) &LPC_USART0->TXDATA;	/* Byte aligned */

	/* If there are multiple buffers with non-contiguous addresses, they can be chained
	   together here. If another TX buffer needs to be sent, the DMA
	   IRQ handler will re-queue and send the buffer there without using chaining. */
	dmaTXDesc[countTXDescUsed].next = DMA_ADDR(0);

	/* Temporarily store length in transfer configuration */
	dmaTXDesc[countTXDescUsed].xfercfg = bytes - 1;

	/* If a transfer is currently in progress, then stop here and let the DMA
	   handler re-queue the next transfer. Otherwise, start the transfer here. */
	if (countTXDescUsed == 0) {
		/* Setup the Channel configuration structure with Peripheral request enabled for UART Tx
		     priority is 3 and setup callback routine */
		dma_ch_cfg.event = DMA_ROM_CH_EVENT_PERIPH;
		dma_ch_cfg.hd_trigger = 0;
		dma_ch_cfg.priority = 3;
		dma_ch_cfg.cb_func = dmaTXDone;
		/* Setup Task Configuration structure, enable SW Trigger and INTA, 8 bit data width,
		   with no destination increment. The transfer length is retrieved from the descriptor */
		dma_task_cfg.ch_num = DMAREQ_USART0_TX;
		dma_task_cfg.config = DMA_ROM_TASK_CFG_SW_TRIGGER | DMA_ROM_TASK_CFG_SEL_INTA;
		dma_task_cfg.data_type = DMA_ROM_TASK_DATA_WIDTH_8 | DMA_ROM_TASK_SRC_INC_1 | DMA_ROM_TASK_DEST_INC_0;
		dma_task_cfg.data_length = dmaTXDesc[countTXDescUsed].xfercfg;
		dma_task_cfg.src = dmaTXDesc[countTXDescUsed].source;
		dma_task_cfg.dst = dmaTXDesc[countTXDescUsed].dest;
		dma_task_cfg.task_addr = (uint32_t) &dmaTXDesc[countTXDescUsed];
		/* Call DMA Init routine to start Tx transfer for newly added descriptor */
		err_code = pDMAApi->dma_init(dma_handle, &dma_ch_cfg, &dma_task_cfg);
		err_code = pDMAApi->dma_set_valid(dma_handle, DMAREQ_USART0_TX);
	}

	/* Update used descriptor count */
	countTXDescUsed++;

	/* Re-enable the DMA IRQ */
	NVIC_EnableIRQ(DMA_IRQn);

	return true;
}
Esempio n. 6
0
/**
 * @brief	Main UART/DMA program body
 * @return	Does not exit
 */
int main(void)
{
	int bytes, idx;
	uint8_t buff[UARTRXBUFFSIZE];

	SystemCoreClockUpdate();
	Board_Init();
	Init_UART_PinMux();
	Board_LED_Set(0, false);

#if defined(USE_INTEGER_CLOCK)
	/* Use main clock rate as base for UART baud rate divider */
	Chip_Clock_SetUARTBaseClockRate(Chip_Clock_GetMainClockRate(), false);

#else
	/* Use 128x expected UART baud rate for fractional baud mode. */
	Chip_Clock_SetUARTBaseClockRate((115200 * 128), true);
#endif
	/* Setup UART */
	Chip_UART_Init(LPC_USART0);
	Chip_UART_ConfigData(LPC_USART0, UART_CFG_DATALEN_8 | UART_CFG_PARITY_NONE | UART_CFG_STOPLEN_1);
	Chip_UART_SetBaud(LPC_USART0, 115200);
	/* Optional for low clock rates only: Chip_UART_SetBaudWithRTC32K(LPC_USART, 300); */
	Chip_UART_Enable(LPC_USART0);
	Chip_UART_TXEnable(LPC_USART0);

	/* DMA initialization - enable DMA clocking and reset DMA if needed */
	Chip_DMA_Init(LPC_DMA);

	/* Enable DMA controller and use driver provided DMA table for current descriptors */
	Chip_DMA_Enable(LPC_DMA);
	Chip_DMA_SetSRAMBase(LPC_DMA, DMA_ADDR(Chip_DMA_Table));

	/* Setup UART 0 TX DMA support */
	dmaTXSetup();

	/* Setup UART 0 RX DMA support */
	dmaRXSetup();

	/* Enable the DMA IRQ */
	NVIC_EnableIRQ(DMA_IRQn);

	/* Enqueue a bunch of strings in DMA transmit descriptors and start
	   transmit. In this use of DMA, the descriptors aren't chained, so
	     the DMA restarts the next queued descriptor in the DMA interrupt
	     handler. */
	for (idx = 0; idx < DMASENDSTRCNT; idx++) {
		sprintf(dmaSendStr[idx], "DMA send string (unlinked) #%d\r\n", idx);
		dmaTXSend((uint8_t *) dmaSendStr[idx], strlen(dmaSendStr[idx]));
	}

	/* Wait for UART TX DMA channel to go inactive */
	while (1) {
		__WFI();
		if (Chip_DMA_GetActiveChannels(LPC_DMA) & (1 << DMAREQ_USART0_TX)) {
			break;
		}
	}

	/* Receive buffers are queued. The DMA interrupt will only trigger on a
	   full DMA buffer receive, so if the UART is idle, but the DMA is only
	   partially complete, the DMA interrupt won't fire. For UART data
	   receive where data is not continuous, a timeout method will be
	   required to flush the DMA when the DMA has pending data and no
	   data has been received on the UART in a specified timeout */
	dmaRXQueue();

	/* Get RX data via DMA and send it out on TX via DMA */
	while (1) {
		/* Sleep until something happens */
		__WFI();

		/* Did any data come in? */
		bytes = checkRxData(buff);
		if (bytes > 0) {
			/* RX data received, send it via TX DMA */
			dmaTXSend(buff, bytes);
		}
	}

	return 1;
}
// Magicoe OSStatus platform_spi_transfer( const platform_spi_t* spi, const platform_spi_config_t* config, const platform_spi_message_segment_t* segments, uint16_t number_of_segments )
OSStatus platform_spi_transfer( platform_spi_driver_t* driver, const platform_spi_config_t* config, const platform_spi_message_segment_t* segments, uint16_t number_of_segments )
{
	OSStatus err    = kNoErr;
	uint32_t count  = 0;
	uint32_t i;
	const platform_spi_message_segment_t *pSeg;
	uint32_t dmaXferLen;
	DMA_CHDESC_T *pTxDesc, *pRxDesc;
	LPC_SPI_T *pSPI;
	uint32_t dmaRxChnNdx, dmaTxChnNdx;

	const uint8_t *pcTx;
	uint8_t *pRx;

	require_action_quiet( ( driver != NULL ) && ( config != NULL ) && ( segments != NULL ) && ( number_of_segments != 0 ), exit, err = kParamErr);

	// save the driver pointer so that in DMA IRQ callback we can access its members


	platform_mcu_powersave_disable();

	pSeg = segments;
	pTxDesc = (DMA_CHDESC_T *) g_pDMA->SRAMBASE + driver->peripheral->dmaTxChnNdx;
	pRxDesc = (DMA_CHDESC_T *) g_pDMA->SRAMBASE + driver->peripheral->dmaRxChnNdx;
	pSPI = driver->peripheral->port;
	if (pSPI == LPC_SPI0)
	s_pSPIDrvs[0] = driver;
	dmaRxChnNdx = driver->peripheral->dmaRxChnNdx , dmaTxChnNdx = driver->peripheral->dmaTxChnNdx;
	driver->xferErr = 0;
	/* Activate chip select */
	platform_gpio_output_low( config->chip_select );
	for ( i = 0; i < number_of_segments; i++, pSeg++ )
	{
		// transfer one seg
		count = pSeg->length;
		if (0 == count)
			continue;

		pcTx = pSeg->tx_buffer , pRx = pSeg->rx_buffer;

		do
		{
			dmaXferLen = count > DMA_MAX_XFER_CNT ? DMA_MAX_XFER_CNT : count;
			count -= dmaXferLen;
            driver->isRxDone = driver->isTxDone = 0;
			#if 0
			{
				if (pRx != 0)
				{
					pSPI->TXCTRL &= ~(1UL<<22);
                    if (pSPI->STAT & SPI_STAT_RXRDY)
                        pSPI->RXDAT;
					while (dmaXferLen--)
					{
						while (!(pSPI->STAT & SPI_STAT_TXRDY));
						pSPI->TXDAT = *pcTx++;
						while (!(pSPI->STAT & SPI_STAT_RXRDY));
						*pRx++ = (uint8_t) pSPI->RXDAT;
					}
				}
				else
				{
					pSPI->TXCTRL |= (1UL<<22);
					while (dmaXferLen--)
					{
						while (!(pSPI->STAT & SPI_STAT_TXRDY));
						pSPI->TXDAT = *pcTx++;
					}
				}

                while (!(pSPI->STAT & SPI_STAT_TXRDY));
			}
	        #else
				pTxDesc->next = 0;
				pTxDesc->dest = DMA_ADDR(&pSPI->TXDAT);
				pTxDesc->source = DMA_ADDR(pcTx) + dmaXferLen - 1;
				pTxDesc->xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
					DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_SRCINC_1 |
					DMA_XFERCFG_DSTINC_0 | DMA_XFERCFG_XFERCOUNT(dmaXferLen);

				if (pRx != 0)
				{
                    pSPI->TXCTRL &= ~(1UL<<22);
					driver->isRx = 1;
					pRxDesc->next = 0;
					pRxDesc->source = DMA_ADDR(&pSPI->RXDAT);
					pRxDesc->dest = DMA_ADDR(pRx) + dmaXferLen - 1;
					pRxDesc->xfercfg = DMA_XFERCFG_CFGVALID | DMA_XFERCFG_SETINTA |
						DMA_XFERCFG_SWTRIG | DMA_XFERCFG_WIDTH_8 | DMA_XFERCFG_DSTINC_1 |
						DMA_XFERCFG_SRCINC_0 | DMA_XFERCFG_XFERCOUNT(dmaXferLen);

					// start RX DMA
					g_pDMA->DMACH[dmaRxChnNdx].XFERCFG = pRxDesc->xfercfg;
				} else {
					driver->isRx = 0;
                    pSPI->TXCTRL |= (1UL<<22);
				}

				// start TX DMA
				g_pDMA->DMACH[dmaTxChnNdx].XFERCFG = pTxDesc->xfercfg;



				#ifndef NO_MICO_RTOS
				mico_rtos_get_semaphore(&driver->sem_xfer_done, MICO_WAIT_FOREVER);
				#else
				while(1)
				{
					if (driver->isTxDone)
					{
						if (!driver->isRx || driver->isRxDone)
							break;
					}
					__WFI();
				}

				#endif
			#endif
			if (driver->xferErr)
			{
				err = kGeneralErr;
				break;
			}
            // >>> update read and/or write pointers
			pcTx += dmaXferLen;
			if (pRx != 0)
				pRx += dmaXferLen;
            // <<<
		} while (count);
	}
	platform_gpio_output_high( config->chip_select );
exit:


  platform_mcu_powersave_enable( );
  return err;
}