/***************************************************************************//** * @brief dac_write_custom_data *******************************************************************************/ void dac_write_custom_data(dac_core *core, const uint32_t *custom_data_iq, uint32_t custom_tx_count) { uint32_t index; uint32_t index_mem = 0; uint32_t length; uint32_t dmac_flags; uint8_t chan; uint8_t num_tx_channels = core->no_of_channels / 2; for(index = 0; index < custom_tx_count; index++) { for (chan = 0; chan < num_tx_channels; chan++) // send the same data on all the channels { Xil_Out32(core->dac_ddr_baseaddr + index_mem * sizeof(uint32_t), custom_data_iq[index]); index_mem++; } } Xil_DCacheFlushRange(core->dac_ddr_baseaddr, index_mem * sizeof(uint32_t)); length = index_mem * 4; switch (core->dma_type) { case DMA_STREAM: dmac_flags = 0; break; case DMA_CYCLIC: dmac_flags = AXI_DMAC_FLAG_CYCLIC; break; case DMA_PLDDR_FIFO: dmac_flags = AXI_DMAC_FLAG_LAST; break; } dac_dma_write(AXI_DMAC_REG_CTRL, 0); dac_dma_write(AXI_DMAC_REG_CTRL, AXI_DMAC_CTRL_ENABLE); dac_dma_write(AXI_DMAC_REG_FLAGS, dmac_flags); dac_dma_write(AXI_DMAC_REG_SRC_ADDRESS, core->dac_ddr_baseaddr); dac_dma_write(AXI_DMAC_REG_SRC_STRIDE, 0x0); dac_dma_write(AXI_DMAC_REG_X_LENGTH, length - 1); dac_dma_write(AXI_DMAC_REG_Y_LENGTH, 0x0); dac_dma_write(AXI_DMAC_REG_START_TRANSFER, 0x1); }
/***************************************************************************//** * @brief dac_init *******************************************************************************/ void dac_init(struct ad9361_rf_phy *phy, uint8_t data_sel) { uint32_t tx_count; uint32_t index; uint32_t index_i1; uint32_t index_q1; uint32_t index_i2; uint32_t index_q2; uint32_t data_i1; uint32_t data_q1; uint32_t data_i2; uint32_t data_q2; dac_write(ADI_REG_RSTN, 0x0); dac_write(ADI_REG_RSTN, ADI_RSTN | ADI_MMCM_RSTN); dac_write(ADI_REG_RATECNTRL, ADI_RATE(3)); dds_st.dac_clk = &phy->clks[TX_SAMPL_CLK]->rate; dds_st.num_dds_channels = 8; // FIXME dac_read(ADI_REG_VERSION, &dds_st.pcore_version); dac_stop(); switch (data_sel) { case DATA_SEL_DDS: dds_default_setup(DDS_CHAN_TX1_I_F1, 90000, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX1_I_F2, 90000, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX1_Q_F1, 0, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX1_Q_F2, 0, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX2_I_F1, 90000, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX2_I_F2, 90000, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX2_Q_F1, 0, 1000000, 0.25); dds_default_setup(DDS_CHAN_TX2_Q_F2, 0, 1000000, 0.25); dac_write(ADI_REG_CNTRL_2, 0); dac_datasel(-1, DATA_SEL_DDS); break; case DATA_SEL_DMA: tx_count = sizeof(sine_lut) / sizeof(uint16_t); for(index = 0; index < (tx_count * 2); index += 2) { index_i1 = index; index_q1 = index + (tx_count / 2); if(index_q1 >= (tx_count * 2)) index_q1 -= (tx_count * 2); data_i1 = (sine_lut[index_i1 / 2] << 20); data_q1 = (sine_lut[index_q1 / 2] << 4); // FIXME index_i2 = index_i1; index_q2 = index_q1; if(index_i2 >= (tx_count * 2)) index_i2 -= (tx_count * 2); if(index_q2 >= (tx_count * 2)) index_q2 -= (tx_count * 2); data_i2 = (sine_lut[index_i2 / 2] << 20); data_q2 = (sine_lut[index_q2 / 2] << 4); // FIXME } dac_dma_write(AXI_DMAC_REG_CTRL, 0); dac_dma_write(AXI_DMAC_REG_CTRL, AXI_DMAC_CTRL_ENABLE); // FIXME dac_dma_write(AXI_DMAC_REG_SRC_ADDRESS, DAC_DDR_BASEADDR); dac_dma_write(AXI_DMAC_REG_SRC_STRIDE, 0x0); dac_dma_write(AXI_DMAC_REG_X_LENGTH, (tx_count * 8) - 1); dac_dma_write(AXI_DMAC_REG_Y_LENGTH, 0x0); dac_dma_write(AXI_DMAC_REG_START_TRANSFER, 0x1); dac_write(ADI_REG_CNTRL_2, 0); dac_datasel(-1, DATA_SEL_DMA); break; default: break; } dds_st.enable = true; dac_start_sync(0); }
/***************************************************************************//** * @brief dac_init *******************************************************************************/ void dac_init(struct ad9361_rf_phy *phy, uint8_t data_sel, uint8_t config_dma) { #ifdef DMA_UIO uint32_t tx_count; uint32_t index, index_i1, index_q1, index_i2, index_q2; uint32_t index_mem; uint32_t data_i1, data_q1, data_i2, data_q2; uint32_t length; int dev_mem_fd; uint32_t mapping_length, page_mask, page_size; void *mapping_addr, *tx_buff_virt_addr; tx_dma_uio_fd = open(TX_DMA_UIO_DEV, O_RDWR); if(tx_dma_uio_fd < 1) { printf("%s: Can't open tx_dma_uio device\n\r", __func__); return; } tx_dma_uio_addr = mmap(NULL, 4096, PROT_READ|PROT_WRITE, MAP_SHARED, tx_dma_uio_fd, 0); #endif dac_write(phy, DAC_REG_RSTN, 0x0); dac_write(phy, DAC_REG_RSTN, DAC_RSTN); dac_write(phy, DAC_REG_RATECNTRL, DAC_RATE(3)); dds_st[phy->id_no].dac_clk = &phy->clks[TX_SAMPL_CLK]->rate; dds_st[phy->id_no].rx2tx2 = phy->pdata->rx2tx2; if(dds_st[phy->id_no].rx2tx2) { dds_st[phy->id_no].num_dds_channels = 8; } else { dds_st[phy->id_no].num_dds_channels = 4; } dac_read(phy, DAC_REG_VERSION, &dds_st[phy->id_no].pcore_version); dac_write(phy, DAC_REG_CNTRL_1, 0); switch (data_sel) { case DATA_SEL_DDS: dds_default_setup(phy, DDS_CHAN_TX1_I_F1, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_I_F2, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_Q_F1, 0, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_Q_F2, 0, 1000000, 250000); if(dds_st[phy->id_no].rx2tx2) { dds_default_setup(phy, DDS_CHAN_TX2_I_F1, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_I_F2, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_Q_F1, 0, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_Q_F2, 0, 1000000, 250000); } dac_write(phy, DAC_REG_CNTRL_2, 0); dac_datasel(phy, -1, DATA_SEL_DDS); break; case DATA_SEL_DMA: if(config_dma) { #ifdef DMA_UIO get_file_info(TX_BUFF_MEM_SIZE, &tx_buff_mem_size); get_file_info(TX_BUFF_MEM_ADDR, &tx_buff_mem_addr); dev_mem_fd = open("/dev/mem", O_RDWR | O_SYNC); if(dev_mem_fd == -1) { printf("%s: Can't open /dev/mem device\n\r", __func__); return; } page_size = sysconf(_SC_PAGESIZE); mapping_length = (((tx_buff_mem_size / page_size) + 1) * page_size); page_mask = (page_size - 1); mapping_addr = mmap(NULL, mapping_length, PROT_READ | PROT_WRITE, MAP_SHARED, dev_mem_fd, (tx_buff_mem_addr & ~page_mask)); if(mapping_addr == MAP_FAILED) { printf("%s: mmap error\n\r", __func__); return; } tx_buff_virt_addr = (mapping_addr + (tx_buff_mem_addr & page_mask)); tx_count = sizeof(sine_lut) / sizeof(uint16_t); if(dds_st[phy->id_no].rx2tx2) { #ifdef FMCOMMS5 for(index = 0, index_mem = 0; index < (tx_count * 2); index += 2, index_mem += 4) #else for(index = 0, index_mem = 0; index < (tx_count * 2); index += 2, index_mem += 2) #endif { index_i1 = index; index_q1 = index + (tx_count / 2); if(index_q1 >= (tx_count * 2)) index_q1 -= (tx_count * 2); data_i1 = (sine_lut[index_i1 / 2] << 20); data_q1 = (sine_lut[index_q1 / 2] << 4); *((unsigned *) (tx_buff_virt_addr + (index_mem* 4))) = data_i1 | data_q1; index_i2 = index_i1; index_q2 = index_q1; if(index_i2 >= (tx_count * 2)) index_i2 -= (tx_count * 2); if(index_q2 >= (tx_count * 2)) index_q2 -= (tx_count * 2); data_i2 = (sine_lut[index_i2 / 2] << 20); data_q2 = (sine_lut[index_q2 / 2] << 4); *((unsigned *) (tx_buff_virt_addr + ((index_mem+ 1) * 4))) = data_i2 | data_q2; #ifdef FMCOMMS5 *((unsigned *) (tx_buff_virt_addr + ((index_mem+ 2) * 4))) = data_i1 | data_q1; *((unsigned *) (tx_buff_virt_addr + ((index_mem+ 3) * 4))) = data_i2 | data_q2; #endif } } else { for(index = 0; index < tx_count; index += 1) { index_i1 = index; index_q1 = index + (tx_count / 4); if(index_q1 >= tx_count) index_q1 -= tx_count; data_i1 = (sine_lut[index_i1 / 2] << 20); data_q1 = (sine_lut[index_q1 / 2] << 4); *((unsigned *) (tx_buff_virt_addr + (index * 4))) = data_i1 | data_q1; } } munmap(mapping_addr, mapping_length); close(dev_mem_fd); if(dds_st[phy->id_no].rx2tx2) { length = (tx_count * 8); } else { length = (tx_count * 4); } #ifdef FMCOMMS5 length = (tx_count * 16); #endif dac_dma_write(AXI_DMAC_REG_CTRL, 0); dac_dma_write(AXI_DMAC_REG_CTRL, AXI_DMAC_CTRL_ENABLE); dac_dma_write(AXI_DMAC_REG_SRC_ADDRESS, tx_buff_mem_addr); dac_dma_write(AXI_DMAC_REG_SRC_STRIDE, 0x0); dac_dma_write(AXI_DMAC_REG_X_LENGTH, length - 1); dac_dma_write(AXI_DMAC_REG_Y_LENGTH, 0x0); dac_dma_write(AXI_DMAC_REG_START_TRANSFER, 0x1); #endif } dac_write(phy, DAC_REG_CNTRL_2, 0); dac_datasel(phy, -1, DATA_SEL_DMA); break; default: break; } dds_st[phy->id_no].enable = true; dac_start_sync(phy, 0); }
/***************************************************************************//** * @brief dac_init *******************************************************************************/ void dac_init(struct ad9361_rf_phy *phy, uint8_t data_sel, uint8_t config_dma) { uint32_t tx_count; uint32_t index; uint32_t index_i1; uint32_t index_q1; uint32_t index_i2; uint32_t index_q2; uint32_t index_mem; uint32_t data_i1; uint32_t data_q1; uint32_t data_i2; uint32_t data_q2; uint32_t length; dac_write(phy, DAC_REG_RSTN, 0x0); dac_write(phy, DAC_REG_RSTN, DAC_RSTN | DAC_MMCM_RSTN); dds_st[phy->id_no].dac_clk = &phy->clks[TX_SAMPL_CLK]->rate; dds_st[phy->id_no].rx2tx2 = phy->pdata->rx2tx2; if(dds_st[phy->id_no].rx2tx2) { dds_st[phy->id_no].num_buf_channels = 4; dac_write(phy, DAC_REG_RATECNTRL, DAC_RATE(3)); } else { dds_st[phy->id_no].num_buf_channels = 2; dac_write(phy, DAC_REG_RATECNTRL, DAC_RATE(1)); } dac_read(phy, DAC_REG_VERSION, &dds_st[phy->id_no].pcore_version); dac_stop(phy); switch (data_sel) { case DATA_SEL_DDS: dds_default_setup(phy, DDS_CHAN_TX1_I_F1, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_I_F2, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_Q_F1, 0, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX1_Q_F2, 0, 1000000, 250000); if(dds_st[phy->id_no].rx2tx2) { dds_default_setup(phy, DDS_CHAN_TX2_I_F1, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_I_F2, 90000, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_Q_F1, 0, 1000000, 250000); dds_default_setup(phy, DDS_CHAN_TX2_Q_F2, 0, 1000000, 250000); } dac_write(phy, DAC_REG_CNTRL_2, 0); dac_datasel(phy, -1, DATA_SEL_DDS); break; case DATA_SEL_DMA: if(config_dma) { tx_count = sizeof(sine_lut) / sizeof(uint16_t); if(dds_st[phy->id_no].rx2tx2) { #ifdef FMCOMMS5 for(index = 0, index_mem = 0; index < (tx_count * 2); index += 2, index_mem += 4) #else for(index = 0, index_mem = 0; index < (tx_count * 2); index += 2, index_mem += 2) #endif { index_i1 = index; index_q1 = index + (tx_count / 2); if(index_q1 >= (tx_count * 2)) index_q1 -= (tx_count * 2); data_i1 = (sine_lut[index_i1 / 2] << 20); data_q1 = (sine_lut[index_q1 / 2] << 4); Xil_Out32(DAC_DDR_BASEADDR + index_mem * 4, data_i1 | data_q1); index_i2 = index_i1; index_q2 = index_q1; if(index_i2 >= (tx_count * 2)) index_i2 -= (tx_count * 2); if(index_q2 >= (tx_count * 2)) index_q2 -= (tx_count * 2); data_i2 = (sine_lut[index_i2 / 2] << 20); data_q2 = (sine_lut[index_q2 / 2] << 4); Xil_Out32(DAC_DDR_BASEADDR + (index_mem + 1) * 4, data_i2 | data_q2); #ifdef FMCOMMS5 Xil_Out32(DAC_DDR_BASEADDR + (index_mem + 2) * 4, data_i1 | data_q1); Xil_Out32(DAC_DDR_BASEADDR + (index_mem + 3) * 4, data_i2 | data_q2); #endif } } else { for(index = 0; index < tx_count; index += 1) { index_i1 = index; index_q1 = index + (tx_count / 4); if(index_q1 >= tx_count) index_q1 -= tx_count; data_i1 = (sine_lut[index_i1] << 20); data_q1 = (sine_lut[index_q1] << 4); Xil_Out32(DAC_DDR_BASEADDR + index * 4, data_i1 | data_q1); } } Xil_DCacheFlush(); if(dds_st[phy->id_no].rx2tx2) { length = (tx_count * 8); } else { length = (tx_count * 4); } #ifdef FMCOMMS5 length = (tx_count * 16); #endif dac_dma_write(AXI_DMAC_REG_CTRL, 0); dac_dma_write(AXI_DMAC_REG_CTRL, AXI_DMAC_CTRL_ENABLE); dac_dma_write(AXI_DMAC_REG_SRC_ADDRESS, DAC_DDR_BASEADDR); dac_dma_write(AXI_DMAC_REG_SRC_STRIDE, 0x0); dac_dma_write(AXI_DMAC_REG_X_LENGTH, length - 1); dac_dma_write(AXI_DMAC_REG_Y_LENGTH, 0x0); dac_dma_write(AXI_DMAC_REG_START_TRANSFER, 0x1); } dac_write(phy, DAC_REG_CNTRL_2, 0); dac_datasel(phy, -1, DATA_SEL_DMA); break; default: break; } dds_st[phy->id_no].enable = true; dac_start_sync(phy, 0); }