static void prepare_for_transfer(struct device *dev) { struct spi_nrfx_data *dev_data = get_dev_data(dev); const struct spi_nrfx_config *dev_config = get_dev_config(dev); struct spi_context *ctx = &dev_data->ctx; int status; size_t buf_len = spi_context_longest_current_buf(ctx); if (buf_len > 0) { nrfx_err_t result; if (buf_len > dev_config->max_buf_len) { buf_len = dev_config->max_buf_len; } result = nrfx_spis_buffers_set( &dev_config->spis, ctx->tx_buf, spi_context_tx_buf_on(ctx) ? buf_len : 0, ctx->rx_buf, spi_context_rx_buf_on(ctx) ? buf_len : 0); if (result == NRFX_SUCCESS) { return; } /* Cannot prepare for transfer. */ status = -EIO; } else { /* Zero-length buffer provided. */ status = 0; } spi_context_complete(ctx, status); }
static void pull_data(struct device *dev) { const struct spi_dw_config *info = dev->config->config_info; struct spi_dw_data *spi = dev->driver_data; DBG_COUNTER_INIT(); while (read_rxflr(info->regs)) { u32_t data = read_dr(info->regs); DBG_COUNTER_INC(); if (spi_context_rx_buf_on(&spi->ctx)) { switch (spi->dfs) { case 1: UNALIGNED_PUT(data, (u8_t *)spi->ctx.rx_buf); break; case 2: UNALIGNED_PUT(data, (u16_t *)spi->ctx.rx_buf); break; #ifndef CONFIG_ARC case 4: UNALIGNED_PUT(data, (u32_t *)spi->ctx.rx_buf); break; #endif } } spi_context_update_rx(&spi->ctx, spi->dfs, 1); spi->fifo_diff--; } if (!spi->ctx.rx_len && spi->ctx.tx_len < DW_SPI_FIFO_DEPTH) { write_rxftlr(spi->ctx.tx_len - 1, info->regs); } else if (read_rxftlr(info->regs) >= spi->ctx.rx_len) { write_rxftlr(spi->ctx.rx_len - 1, info->regs); } LOG_DBG("Pulled: %d", DBG_COUNTER_RESULT()); }