Esempio n. 1
0
static void i2s_disable(uint8_t channel)
{
	uint32_t reg;
	uint32_t num_active;
	int i;

	// Release DMA resources
	if (i2s_info->en[channel]) 
	{
		soc_dma_stop_transfer(&(i2s_info->dma_ch[channel]));
		soc_dma_release(&(i2s_info->dma_ch[channel]));
		soc_dma_free_list(&(i2s_info->dma_cfg[channel]));
	}
 	// Clear enabled flag for channel
	i2s_info->en[channel] = 0;

	// Let the processor do whatever power down it wants
	num_active = 0;
	for (i = 0; i < I2S_NUM_CHANNELS; i++) 
	{
		if (i2s_info->en[i]) 
		{
			num_active++;
		}
	}
 
	// Disable channel and hold parts in reset
	reg = MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].ctrl);
	reg &= ~(1 << (i2s_reg_map[channel].ctrl_fifo_rst));
	reg &= ~(1 << (i2s_reg_map[channel].ctrl_sync_rst));
	MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].ctrl) = reg;

	// Clear all interrupts
	reg = MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].stat);
	reg &= ~(i2s_reg_map[channel].stat_mask);
	reg &= ~(1 << i2s_reg_map[channel].stat_err);
	MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].stat) = reg;

	// Disable local clock and interrupts
	reg = MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].cid_ctrl);
	reg |= (1 << (i2s_reg_map[channel].cid_ctrl_strobe));
	reg |= (1 << (i2s_reg_map[channel].cid_ctrl_strobe_sync));
	reg &= ~(1 << (i2s_reg_map[channel].cid_ctrl_mask));
	MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].cid_ctrl) = reg;

	return;

}
Esempio n. 2
0
static void free_dma_ch(struct snd_pcm_substream *substream)
{
	struct nx_runtime_data *rtd = NULL;
	int  stream = substream->stream;
	int device = substream->pcm->device;
	int card = substream->pcm->card->number;

	if ( substream->runtime &&
		substream->runtime->private_data)
		rtd = substream->runtime->private_data;
	else
		rtd = g_rtd[card][device][stream];

	DBGOUT("%s\n", __func__);

	if (rtd->irq)
		free_irq(rtd->irq, (void*)substream);

	if (rtd->dma_tr)
		soc_dma_release(rtd->dma_tr);

	rtd->irq = 0;
	rtd->dma_tr = NULL;
}
Esempio n. 3
0
DRIVER_API_RC soc_i2s_stream(uint32_t *buf, uint32_t len, uint32_t num_bufs)
{
	DRIVER_API_RC ret;
	uint8_t channel = I2S_CHANNEL_TX;
	uint32_t reg;
	uint32_t len_per_buf;
	int i;
	struct soc_dma_xfer_item *dma_list;

	// Check channel no in use and configured
	if (channel >= I2S_NUM_CHANNELS) 
	{
		return DRV_RC_FAIL;
	} 
	else if (i2s_info->en[channel] || !(i2s_info->cfgd[channel])) 
	{
		return DRV_RC_FAIL;
	}

	// Get a DMA channel
	ret = soc_dma_acquire(&(i2s_info->dma_ch[channel]));

	if (ret != DRV_RC_OK) 
	{
		return DRV_RC_FAIL;
	}

	// Enable the channel
	i2s_enable(channel);

	// Determine the length of a single buffer
	if (num_bufs == 0) 
	{
		len_per_buf = len;
	} 
	else 
	{
		len_per_buf = len / num_bufs;
	}

	// Prep some configuration
	i2s_info->dma_cfg[channel].type = SOC_DMA_TYPE_MEM2PER;
	i2s_info->dma_cfg[channel].dest_interface = SOC_DMA_INTERFACE_I2S_TX;
	i2s_info->dma_cfg[channel].dest_step_count = 0;
	i2s_info->dma_cfg[channel].src_step_count = 0;

	i2s_info->dma_cfg[channel].xfer.dest.delta = SOC_DMA_DELTA_NONE;
	i2s_info->dma_cfg[channel].xfer.dest.width = SOC_DMA_WIDTH_32;
	i2s_info->dma_cfg[channel].xfer.dest.addr = (void *)(SOC_I2S_BASE + SOC_I2S_DATA_REG);
	i2s_info->dma_cfg[channel].xfer.src.delta = SOC_DMA_DELTA_INCR;
	i2s_info->dma_cfg[channel].xfer.src.width = SOC_DMA_WIDTH_32;

	if (num_bufs == 0) 
	{
		i2s_info->dma_cfg[channel].cb_done = i2s_dma_cb_done;
		i2s_info->dma_cfg[channel].cb_done_arg = (void *)((uint32_t)channel);
	} 
	else 
	{
		i2s_info->dma_cfg[channel].cb_block = i2s_dma_cb_block;
		i2s_info->dma_cfg[channel].cb_block_arg = (void *)((uint32_t)channel);
	}

	i2s_info->dma_cfg[channel].cb_err = i2s_dma_cb_err;
	i2s_info->dma_cfg[channel].cb_err_arg = (void *)((uint32_t)channel);

	// Setup the linked list
	for (i = 0; i < ((num_bufs == 0) ? 1 : num_bufs); i++) 
	{
		if (i == 0) 
		{
			dma_list = &(i2s_info->dma_cfg[channel].xfer);
		} 
		else 
		{
			ret = soc_dma_alloc_list_item(&dma_list, dma_list);

			if (ret != DRV_RC_OK) 
			{
				goto fail;
			}
		}

		dma_list->src.addr = (void *)(&(buf[i * (len_per_buf / sizeof(uint32_t))]));
		dma_list->size = len_per_buf / sizeof(uint32_t);
	}

	// Create a circular list if we are doing circular buffering
	if (num_bufs != 0) 
	{
		dma_list->next = &(i2s_info->dma_cfg[channel].xfer);
	}

	// Setup and start the DMA engine
	ret = soc_dma_config(&(i2s_info->dma_ch[channel]), &(i2s_info->dma_cfg[channel]));

	if (ret != DRV_RC_OK) 
	{
		goto fail;
	}

	ret = soc_dma_start_transfer(&(i2s_info->dma_ch[channel]));

	if (ret != DRV_RC_OK) 
	{
		goto fail;
	}

	// Enable the channel and let it go!
	reg = MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].ctrl);
	reg |= (1 << (i2s_reg_map[channel].ctrl_en));
	reg |= (1 << (i2s_reg_map[channel].ctrl_sync_rst));
	MMIO_REG_VAL_FROM_BASE(SOC_I2S_BASE, i2s_reg_map[channel].ctrl) = reg;

	return DRV_RC_OK;

fail:
	i2s_disable(channel);
	soc_dma_release(&(i2s_info->dma_ch[channel]));
	return DRV_RC_FAIL;
}