static uint32_t get_io_pll_freq(void) { LTRACEF("IO_PLL_CTRL 0x%x\n", SLCR_REG(IO_PLL_CTRL)); // XXX test that the pll is actually enabled uint32_t fdiv = BITS_SHIFT(SLCR_REG(IO_PLL_CTRL), 18, 12); return EXTERNAL_CLOCK_FREQ * fdiv; }
void platform_init(void) { uart_init(); /* enable if we want to see some hardware boot status */ #if 0 printf("zynq boot status:\n"); printf("\tREBOOT_STATUS 0x%x\n", SLCR_REG(REBOOT_STATUS)); printf("\tBOOT_MODE 0x%x\n", SLCR_REG(BOOT_MODE)); zynq_dump_clocks(); #endif }
void platform_early_init(void) { zynq_mio_init(); zynq_pll_init(); zynq_clk_init(); #if ZYNQ_SDRAM_INIT zynq_ddr_init(); #endif zynq_slcr_unlock(); /* Enable all level shifters */ SLCR_REG(LVL_SHFTR_EN) = 0xF; /* FPGA SW reset (not documented, but mandatory) */ SLCR_REG(FPGA_RST_CTRL) = 0x0; /* zynq manual says this is mandatory for cache init */ *REG32(SLCR_BASE + 0xa1c) = 0x020202; zynq_slcr_lock(); /* early initialize the uart so we can printf */ uart_init_early(); /* initialize the interrupt controller */ arm_gic_init(); /* initialize the timer block */ arm_cortex_a9_timer_init(CPUPRIV_BASE, zynq_get_arm_timer_freq()); /* add the main memory arena */ #if !ZYNQ_CODE_IN_SDRAM && SDRAM_SIZE != 0 /* In the case of running from SRAM, and we are using SDRAM, * there is a discontinuity between the end of SRAM (256K) and the start of SDRAM (1MB), * so intentionally bump the boot-time allocator to start in the base of SDRAM. */ extern uintptr_t boot_alloc_start; extern uintptr_t boot_alloc_end; boot_alloc_start = KERNEL_BASE + MB; boot_alloc_end = KERNEL_BASE + MB; #endif #if SDRAM_SIZE != 0 pmm_add_arena(&sdram_arena); #endif pmm_add_arena(&sram_arena); }
int zynq_mio_init(void) { /* This DDRIOB configuration applies to both zybo and uzed, but it's possible * it may not work for all boards in the future. Just something to keep in mind * with different memory configurations. */ SLCR_REG(GPIOB_CTRL) = GPIOB_CTRL_VREF_EN; for (size_t pin = 0; pin < countof(zynq_mio_cfg); pin++) { if (zynq_mio_cfg[pin] != MIO_DEFAULT) { SLCR_REG(MIO_PIN_00 + (pin * 4)) = zynq_mio_cfg[pin]; } } SLCR_REG(SD0_WP_CD_SEL) = SDIO0_WP_SEL(0x37) | SDIO0_CD_SEL(0x2F); return 0; }
static inline int pll_poll(uint32_t mask) { uint32_t iters = UINT_MAX; while (iters-- && !(SLCR_REG(PLL_STATUS) & mask)) ; if (iters) { return 0; } return -1; }
void platform_quiesce(void) { #if ZYNQ_WITH_GEM_ETH gem_disable(); #endif platform_stop_timer(); /* stop the 2nd cpu and hold in reset */ SLCR_REG(A9_CPU_RST_CTRL) |= (1<<1); // reset cpu 1 }
void platform_init(void) { uart_init(); /* enable if we want to see some hardware boot status */ #if 0 printf("zynq boot status:\n"); printf("\tREBOOT_STATUS 0x%x\n", SLCR_REG(REBOOT_STATUS)); printf("\tBOOT_MODE 0x%x\n", SLCR_REG(BOOT_MODE)); zynq_dump_clocks(); printf("zynq mio:\n"); for (size_t i = 0; i < 54; i++) { printf("\t%02u: 0x%08x", i, *REG32((uintptr_t)&SLCR->MIO_PIN_00 + (i * 4))); if (i % 4 == 3 || i == 53) { putchar('\n'); } } #endif gem_init(GEM0_BASE, 256*1024); }
static uint32_t get_cpu_input_freq(void) { LTRACEF("ARM_CLK_CTRL 0x%x\n", SLCR_REG(ARM_CLK_CTRL)); uint32_t divisor = BITS_SHIFT(SLCR_REG(ARM_CLK_CTRL), 13, 8); uint32_t srcsel = BITS_SHIFT(SLCR_REG(ARM_CLK_CTRL), 5, 4); uint32_t srcclk; switch (srcsel) { default: case 0: case 1: // arm pll srcclk = get_arm_pll_freq(); break; case 2: // ddr pll srcclk = get_ddr_pll_freq(); break; case 3: // io pll srcclk = get_io_pll_freq(); break; } // cpu 6x4x return srcclk / divisor; }
static uint32_t get_cpu_1x_freq(void) { // cpu 1x is either /6 or /4 the speed of 6x4x return get_cpu_input_freq() / ((SLCR_REG(CLK_621_TRUE) & 1) ? 6 : 4); }
static uint32_t get_cpu_2x_freq(void) { // cpu 2x is either /3 or /2 the speed of 6x4x return get_cpu_input_freq() / ((SLCR_REG(CLK_621_TRUE) & 1) ? 3 : 2); }
void zynq_clk_init(void) { zynq_slcr_unlock(); SLCR_REG(DCI_CLK_CTRL) = zynq_clk_cfg.dci_clk; SLCR_REG(GEM0_CLK_CTRL) = zynq_clk_cfg.gem0_clk; SLCR_REG(GEM0_RCLK_CTRL) = zynq_clk_cfg.gem0_rclk; SLCR_REG(GEM1_CLK_CTRL) = zynq_clk_cfg.gem1_clk; SLCR_REG(GEM1_RCLK_CTRL) = zynq_clk_cfg.gem1_rclk; SLCR_REG(LQSPI_CLK_CTRL) = zynq_clk_cfg.lqspi_clk; SLCR_REG(SDIO_CLK_CTRL) = zynq_clk_cfg.sdio_clk; SLCR_REG(UART_CLK_CTRL) = zynq_clk_cfg.uart_clk; SLCR_REG(PCAP_CLK_CTRL) = zynq_clk_cfg.pcap_clk; SLCR_REG(FPGA0_CLK_CTRL) = zynq_clk_cfg.fpga0_clk; SLCR_REG(FPGA1_CLK_CTRL) = zynq_clk_cfg.fpga1_clk; SLCR_REG(FPGA2_CLK_CTRL) = zynq_clk_cfg.fpga2_clk; SLCR_REG(FPGA3_CLK_CTRL) = zynq_clk_cfg.fpga3_clk; SLCR_REG(APER_CLK_CTRL) = zynq_clk_cfg.aper_clk; SLCR_REG(CLK_621_TRUE) = zynq_clk_cfg.clk_621_true; zynq_slcr_lock(); }
void platform_early_init(void) { #if 0 ps7_init(); #else /* Unlock the registers and leave them that way */ zynq_slcr_unlock(); zynq_mio_init(); zynq_pll_init(); zynq_clk_init(); #if ZYNQ_SDRAM_INIT zynq_ddr_init(); #endif #endif /* Enable all level shifters */ SLCR_REG(LVL_SHFTR_EN) = 0xF; /* FPGA SW reset (not documented, but mandatory) */ SLCR_REG(FPGA_RST_CTRL) = 0x0; /* zynq manual says this is mandatory for cache init */ *REG32(SLCR_BASE + 0xa1c) = 0x020202; /* early initialize the uart so we can printf */ uart_init_early(); /* initialize the interrupt controller */ arm_gic_init(); zynq_gpio_init(); /* initialize the timer block */ arm_cortex_a9_timer_init(CPUPRIV_BASE, zynq_get_arm_timer_freq()); /* bump the 2nd cpu into our code space and remap the top SRAM block */ if (KERNEL_LOAD_OFFSET != 0) { /* construct a trampoline to get the 2nd cpu up to the trap routine */ /* figure out the offset of the trampoline routine in physical space from address 0 */ extern void platform_reset(void); addr_t tramp = (addr_t)&platform_reset; tramp -= KERNEL_BASE; tramp += MEMBASE; /* stuff in a ldr pc, [nextaddrress], and a target address */ uint32_t *ptr = (uint32_t *)KERNEL_BASE; ptr[0] = 0xe51ff004; // ldr pc, [pc, #-4] ptr[1] = tramp; arch_clean_invalidate_cache_range((addr_t)ptr, 8); } /* reset the 2nd cpu, letting it go through its reset vector (at 0x0 physical) */ SLCR_REG(A9_CPU_RST_CTRL) |= (1<<1); // reset cpu 1 spin(10); SLCR_REG(A9_CPU_RST_CTRL) &= ~(1<<1); // unreset cpu 1 /* wait for the 2nd cpu to reset, go through the usual reset vector, and get trapped by our code */ /* see platform/zynq/reset.S */ extern volatile int __cpu_trapped; uint count = 100000; while (--count) { arch_clean_invalidate_cache_range((addr_t)&__cpu_trapped, sizeof(__cpu_trapped)); if (__cpu_trapped != 0) break; } if (count == 0) { panic("ZYNQ: failed to trap 2nd cpu\n"); } /* bounce the 4th sram region down to lower address */ SLCR_REG(OCM_CFG) &= ~0xf; /* all banks at low address */ /* add the main memory arena */ #if !ZYNQ_CODE_IN_SDRAM && SDRAM_SIZE != 0 /* In the case of running from SRAM, and we are using SDRAM, * there is a discontinuity between the end of SRAM (256K) and the start of SDRAM (1MB), * so intentionally bump the boot-time allocator to start in the base of SDRAM. */ extern uintptr_t boot_alloc_start; extern uintptr_t boot_alloc_end; boot_alloc_start = KERNEL_BASE + MB; boot_alloc_end = KERNEL_BASE + MB; #endif #if SDRAM_SIZE != 0 pmm_add_arena(&sdram_arena); #endif pmm_add_arena(&sram_arena); }
void zynq_ddr_init(void) { SLCR_REG(DDRIOB_ADDR0) = zynq_ddriob_cfg.addr0; SLCR_REG(DDRIOB_ADDR1) = zynq_ddriob_cfg.addr1; SLCR_REG(DDRIOB_DATA0) = zynq_ddriob_cfg.data0; SLCR_REG(DDRIOB_DATA1) = zynq_ddriob_cfg.data1; SLCR_REG(DDRIOB_DIFF0) = zynq_ddriob_cfg.diff0; SLCR_REG(DDRIOB_DIFF1) = zynq_ddriob_cfg.diff1; SLCR_REG(DDRIOB_CLOCK) = DDRIOB_OUTPUT_EN(0x3); /* These register fields are not documented in the TRM. These * values represent the defaults generated via the Zynq tools */ SLCR_REG(DDRIOB_DRIVE_SLEW_ADDR) = 0x0018C61CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DATA) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DIFF) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_CLOCK) = 0x00F9861CU; SLCR_REG(DDRIOB_DDR_CTRL) = 0x00000E60U; SLCR_REG(DDRIOB_DCI_CTRL) = 0x00000001U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000020U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000823U; /* Write addresss / value pairs from target table */ for (size_t i = 0; i < zynq_ddr_cfg_cnt; i += 2) { *REG32(zynq_ddr_cfg[i]) = zynq_ddr_cfg[i+1]; } /* Wait for DCI done */ reg_poll((uintptr_t)&SLCR->DDRIOB_DCI_STATUS, 0x2000); /* Bring ddr out of reset and wait until self refresh */ *REG32(DDRC_CTRL) |= DDRC_CTRL_OUT_OF_RESET; reg_poll(DDRC_MODE_STATUS, DDRC_STS_SELF_REFRESH); /* Switch timer to 64k */ *REG32(0XF8007000) = *REG32(0xF8007000) & ~0x20000000U; if (zynq_ddriob_cfg.ibuf_disable) { SLCR_REG(DDRIOB_DATA0) |= DDRIOB_IBUF_DISABLE_MODE; SLCR_REG(DDRIOB_DATA1) |= DDRIOB_IBUF_DISABLE_MODE; SLCR_REG(DDRIOB_DIFF0) |= DDRIOB_IBUF_DISABLE_MODE; SLCR_REG(DDRIOB_DIFF1) |= DDRIOB_IBUF_DISABLE_MODE; } if (zynq_ddriob_cfg.term_disable) { SLCR_REG(DDRIOB_DATA0) |= DDRIOB_TERM_DISABLE_MODE; SLCR_REG(DDRIOB_DATA1) |= DDRIOB_TERM_DISABLE_MODE; SLCR_REG(DDRIOB_DIFF0) |= DDRIOB_TERM_DISABLE_MODE; SLCR_REG(DDRIOB_DIFF1) |= DDRIOB_TERM_DISABLE_MODE; } }
void zynq_clk_init(void) { SLCR_REG(DCI_CLK_CTRL) = zynq_clk_cfg.dci_clk; SLCR_REG(GEM0_CLK_CTRL) = zynq_clk_cfg.gem0_clk; SLCR_REG(GEM0_RCLK_CTRL) = zynq_clk_cfg.gem0_rclk; SLCR_REG(GEM1_CLK_CTRL) = zynq_clk_cfg.gem1_clk; SLCR_REG(GEM1_RCLK_CTRL) = zynq_clk_cfg.gem1_rclk; SLCR_REG(SMC_CLK_CTRL) = zynq_clk_cfg.smc_clk; SLCR_REG(LQSPI_CLK_CTRL) = zynq_clk_cfg.lqspi_clk; SLCR_REG(SDIO_CLK_CTRL) = zynq_clk_cfg.sdio_clk; SLCR_REG(UART_CLK_CTRL) = zynq_clk_cfg.uart_clk; SLCR_REG(SPI_CLK_CTRL) = zynq_clk_cfg.spi_clk; SLCR_REG(CAN_CLK_CTRL) = zynq_clk_cfg.can_clk; SLCR_REG(CAN_MIOCLK_CTRL)= zynq_clk_cfg.can_mioclk; SLCR_REG(USB0_CLK_CTRL) = zynq_clk_cfg.usb0_clk; SLCR_REG(USB1_CLK_CTRL) = zynq_clk_cfg.usb1_clk; SLCR_REG(PCAP_CLK_CTRL) = zynq_clk_cfg.pcap_clk; SLCR_REG(FPGA0_CLK_CTRL) = zynq_clk_cfg.fpga0_clk; SLCR_REG(FPGA1_CLK_CTRL) = zynq_clk_cfg.fpga1_clk; SLCR_REG(FPGA2_CLK_CTRL) = zynq_clk_cfg.fpga2_clk; SLCR_REG(FPGA3_CLK_CTRL) = zynq_clk_cfg.fpga3_clk; SLCR_REG(APER_CLK_CTRL) = zynq_clk_cfg.aper_clk; SLCR_REG(CLK_621_TRUE) = zynq_clk_cfg.clk_621_true; }
/* For each PLL we need to configure the cp / res / lock_cnt and then place the PLL in bypass * before doing a reset to switch to the new values. Then bypass is removed to switch back to using * the PLL once its locked. */ int zynq_pll_init(void) { const zynq_pll_cfg_tree_t *cfg = &zynq_pll_cfg; zynq_slcr_unlock(); SLCR_REG(ARM_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->arm.lock_cnt) | PLL_CFG_PLL_CP(cfg->arm.cp) | PLL_CFG_PLL_RES(cfg->arm.res); SLCR_REG(ARM_PLL_CTRL) = PLL_FDIV(cfg->arm.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(ARM_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_ARM_PLL_LOCK) == -1) { return -1; } SLCR_REG(ARM_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(ARM_CLK_CTRL) = zynq_clk_cfg.arm_clk; #if ZYNQ_SDRAM_INIT SLCR_REG(DDR_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->ddr.lock_cnt) | PLL_CFG_PLL_CP(cfg->ddr.cp) | PLL_CFG_PLL_RES(cfg->ddr.res); SLCR_REG(DDR_PLL_CTRL) = PLL_FDIV(cfg->ddr.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(DDR_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_DDR_PLL_LOCK) == -1) { return -1; } SLCR_REG(DDR_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(DDR_CLK_CTRL) = zynq_clk_cfg.ddr_clk; #endif SLCR_REG(IO_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->io.lock_cnt) | PLL_CFG_PLL_CP(cfg->io.cp) | PLL_CFG_PLL_RES(cfg->io.res); SLCR_REG(IO_PLL_CTRL) = PLL_FDIV(cfg->io.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(IO_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_IO_PLL_LOCK) == -1) { return -1; } SLCR_REG(IO_PLL_CTRL) &= ~PLL_BYPASS_FORCE; zynq_slcr_lock(); return 0; }
int zynq_clk_init(void) { zynq_slcr_unlock(); SLCR_REG(DCI_CLK_CTRL) = CLK_CTRL_CLKACT1 | CLK_CTRL_DIVISOR1(52) | CLK_CTRL_DIVISOR2(2); SLCR_REG(GEM0_RCLK_CTRL) = CLK_CTRL_CLKACT1; SLCR_REG(GEM0_CLK_CTRL) = CLK_CTRL_CLKACT1 | CLK_CTRL_DIVISOR1(8) | CLK_CTRL_DIVISOR2(1); SLCR_REG(LQSPI_CLK_CTRL) = CLK_CTRL_CLKACT1 | CLK_CTRL_DIVISOR1(5); SLCR_REG(SDIO_CLK_CTRL) = CLK_CTRL_CLKACT1 | CLK_CTRL_DIVISOR1(20); SLCR_REG(UART_CLK_CTRL) = CLK_CTRL_CLKACT2 | CLK_CTRL_DIVISOR1(20); SLCR_REG(PCAP_CLK_CTRL) = CLK_CTRL_CLKACT1 | CLK_CTRL_DIVISOR1(5); SLCR_REG(FPGA0_CLK_CTRL) = CLK_CTRL_DIVISOR1(10) | CLK_CTRL_DIVISOR2(1); SLCR_REG(FPGA1_CLK_CTRL) = CLK_CTRL_SRCSEL(3) | CLK_CTRL_DIVISOR1(6) | CLK_CTRL_DIVISOR2(1); SLCR_REG(FPGA2_CLK_CTRL) = CLK_CTRL_SRCSEL(2) | CLK_CTRL_DIVISOR1(53) | CLK_CTRL_DIVISOR2(2); SLCR_REG(FPGA3_CLK_CTRL) = CLK_CTRL_DIVISOR2(1); SLCR_REG(CLK_621_TRUE) = CLK_621_ENABLE; SLCR_REG(APER_CLK_CTRL) = DMA_CPU_CLK_EN | USB0_CPU_CLK_EN | USB1_CPU_CLK_EN | GEM0_CPU_CLK_EN | SDI0_CPU_CLK_EN | I2C0_CPU_CLK_EN | I2C1_CPU_CLK_EN | UART1_CPU_CLK_EN | GPIO_CPU_CLK_EN | LQSPI_CPU_CLK_EN | SMC_CPU_CLK_EN; zynq_slcr_lock(); return 0; }
int zynq_mio_init(void) { zynq_slcr_unlock(); SLCR_REG(GPIOB_CTRL) = GPIOB_CTRL_VREF_EN; SLCR_REG(DDRIOB_ADDR0) = DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_ADDR1) = DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DATA0) = DDRIOB_INP_TYPE(1) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DATA1) = DDRIOB_INP_TYPE(1) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DIFF0) = DDRIOB_INP_TYPE(2) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DIFF1) = DDRIOB_INP_TYPE(2) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_CLOCK) = DDRIOB_OUTPUT_EN(0x3); /* These register fields are not documented in the TRM. These * values represent the defaults generated via the Zynq tools */ SLCR_REG(DDRIOB_DRIVE_SLEW_ADDR) = 0x0018C61CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DATA) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DIFF) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_CLOCK) = 0x00F9861CU; SLCR_REG(DDRIOB_DDR_CTRL) = 0x00000E60U; SLCR_REG(DDRIOB_DCI_CTRL) = 0x00000001U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000020U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000823U; /* mio pin config */ SLCR_REG(MIO_PIN_01) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_02) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_03) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_04) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_05) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_06) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_08) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS33; SLCR_REG(MIO_PIN_16) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_17) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_18) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_19) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_20) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_21) = MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL | MIO_DISABLE_RCVR; SLCR_REG(MIO_PIN_22) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_23) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_24) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_25) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_26) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_27) = MIO_TRI_ENABLE | MIO_L0_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_HSTL; SLCR_REG(MIO_PIN_28) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_29) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18 | MIO_TRI_ENABLE; SLCR_REG(MIO_PIN_30) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_31) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18 | MIO_TRI_ENABLE; SLCR_REG(MIO_PIN_32) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_33) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_34) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_35) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_36) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18 | MIO_TRI_ENABLE; SLCR_REG(MIO_PIN_37) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_38) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_39) = MIO_L1_SEL | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_40) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_41) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_42) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_43) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_44) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_45) = MIO_L3_SEL(0x4) | MIO_SPEED_FAST | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_47) = MIO_TRI_ENABLE | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_48) = MIO_L3_SEL(0x7) | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_49) = MIO_TRI_ENABLE | MIO_L3_SEL(0x7) | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_52) = MIO_L3_SEL(0x4) | MIO_IO_TYPE_LVCMOS18; SLCR_REG(MIO_PIN_53) = MIO_L3_SEL(0x4) | MIO_IO_TYPE_LVCMOS18; SLCR_REG(SD0_WP_CD_SEL) = SDIO0_WP_SEL(0x37) | SDIO0_CD_SEL(0x2F); zynq_slcr_lock(); return 0; }
/* For each PLL we need to configure the cp / res / lock_cnt and then place the PLL in bypass * before doing a reset to switch to the new values. Then bypass is removed to switch back to using * the PLL once its locked. */ int zynq_pll_init(void) { const zynq_pll_cfg_tree_t *cfg = &zynq_pll_cfg; SLCR_REG(ARM_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->arm.lock_cnt) | PLL_CFG_PLL_CP(cfg->arm.cp) | PLL_CFG_PLL_RES(cfg->arm.res); SLCR_REG(ARM_PLL_CTRL) = PLL_FDIV(cfg->arm.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(ARM_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_ARM_PLL_LOCK) == -1) { return -1; } SLCR_REG(ARM_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(ARM_CLK_CTRL) = zynq_clk_cfg.arm_clk; #if ZYNQ_SDRAM_INIT SLCR_REG(DDR_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->ddr.lock_cnt) | PLL_CFG_PLL_CP(cfg->ddr.cp) | PLL_CFG_PLL_RES(cfg->ddr.res); SLCR_REG(DDR_PLL_CTRL) = PLL_FDIV(cfg->ddr.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(DDR_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_DDR_PLL_LOCK) == -1) { return -1; } SLCR_REG(DDR_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(DDR_CLK_CTRL) = zynq_clk_cfg.ddr_clk; #elif SDRAM_SIZE == 0 /* if we're not using sdram and haven't been told to initialize sdram, stop the DDR pll */ SLCR_REG(DDR_CLK_CTRL) = 0; SLCR_REG(DDR_PLL_CTRL) |= PLL_PWRDOWN; #endif SLCR_REG(IO_PLL_CFG) = PLL_CFG_LOCK_CNT(cfg->io.lock_cnt) | PLL_CFG_PLL_CP(cfg->io.cp) | PLL_CFG_PLL_RES(cfg->io.res); SLCR_REG(IO_PLL_CTRL) = PLL_FDIV(cfg->io.fdiv) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(IO_PLL_CTRL) &= ~PLL_RESET; if (reg_poll((uintptr_t)&SLCR->PLL_STATUS, PLL_STATUS_IO_PLL_LOCK) == -1) { return -1; } SLCR_REG(IO_PLL_CTRL) &= ~PLL_BYPASS_FORCE; return 0; }
int zynq_pll_init(void) { zynq_slcr_unlock(); /* ARM PLL & Clock config * 375 cycles needed for pll * 26 divisor on pll * enable all ARM clocks * 2 divisor on ARM clocks * ARM clock source is ARM PLL */ SLCR_REG(ARM_PLL_CFG) = PLL_CFG_LOCK_CNT(375) | PLL_CFG_PLL_CP(2) | PLL_CFG_PLL_RES(12); SLCR_REG(ARM_PLL_CTRL) = PLL_FDIV(26) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(ARM_PLL_CTRL) &= ~PLL_RESET; if (pll_poll(PLL_STATUS_ARM_PLL_LOCK) == -1) { return -1; } SLCR_REG(ARM_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(ARM_CLK_CTRL) = ARM_CLK_CTRL_DIVISOR(2) | ARM_CLK_CTRL_CPU_6OR4XCLKACT | ARM_CLK_CTRL_CPU_3OR2XCLKACT | ARM_CLK_CTRL_CPU_2XCLKACT | ARM_CLK_CTRL_CPU_1XCLKACT |ARM_CLK_CTRL_PERI_CLKACT; /* DDR PLL & Clock config * 475 cycles needed * 21 divisor on PLL * enable all DDR clocks * 2 divisor for 3XCLK, 3 divisor for 2XCLK */ SLCR_REG(DDR_PLL_CFG) = PLL_CFG_LOCK_CNT(475) | PLL_CFG_PLL_CP(2) | PLL_CFG_PLL_RES(12); SLCR_REG(DDR_PLL_CTRL) = PLL_FDIV(26) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(DDR_PLL_CTRL) &= ~PLL_RESET; if (pll_poll(PLL_STATUS_DDR_PLL_LOCK) == -1) { return -1; } SLCR_REG(DDR_PLL_CTRL) &= ~PLL_BYPASS_FORCE; SLCR_REG(DDR_CLK_CTRL) = DDR_CLK_CTRL_DDR_3XCLKACT | DDR_CLK_CTRL_DDR_2XCLKACT | DDR_CLK_CTRL_DDR_3XCLK_DIV(2) | DDR_CLK_CTRL_DDR_2XCLK_DIV(3); /* IO PLL config * 500 cycles needed for pll * 20 divisor */ SLCR_REG(IO_PLL_CFG) = PLL_CFG_LOCK_CNT(500) | PLL_CFG_PLL_CP(2) | PLL_CFG_PLL_RES(12); SLCR_REG(IO_PLL_CTRL) = PLL_FDIV(20) | PLL_BYPASS_FORCE | PLL_RESET; SLCR_REG(IO_PLL_CTRL) &= ~PLL_RESET; if (pll_poll(PLL_STATUS_IO_PLL_LOCK) == -1) { return -1; } SLCR_REG(IO_PLL_CTRL) &= ~PLL_BYPASS_FORCE; zynq_slcr_lock(); return 0; }
int zynq_mio_init(void) { zynq_slcr_unlock(); /* This DDRIOB configuration applies to both zybo and uzed, but it's possible * it may not work for all boards in the future. Just something to keep in mind * with different memory configurations. */ #if ZYNQ_SDRAM_INIT SLCR_REG(GPIOB_CTRL) = GPIOB_CTRL_VREF_EN; SLCR_REG(DDRIOB_ADDR0) = DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_ADDR1) = DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DATA0) = DDRIOB_INP_TYPE(1) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DATA1) = DDRIOB_INP_TYPE(1) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DIFF0) = DDRIOB_INP_TYPE(2) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_DIFF1) = DDRIOB_INP_TYPE(2) | DDRIOB_TERM_EN | DDRIOB_DCI_TYPE(0x3) | DDRIOB_OUTPUT_EN(0x3); SLCR_REG(DDRIOB_CLOCK) = DDRIOB_OUTPUT_EN(0x3); /* These register fields are not documented in the TRM. These * values represent the defaults generated via the Zynq tools */ SLCR_REG(DDRIOB_DRIVE_SLEW_ADDR) = 0x0018C61CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DATA) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_DIFF) = 0x00F9861CU; SLCR_REG(DDRIOB_DRIVE_SLEW_CLOCK) = 0x00F9861CU; SLCR_REG(DDRIOB_DDR_CTRL) = 0x00000E60U; SLCR_REG(DDRIOB_DCI_CTRL) = 0x00000001U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000020U; SLCR_REG(DDRIOB_DCI_CTRL) |= 0x00000823U; #endif for (size_t pin = 0; pin < countof(zynq_mio_cfg); pin++) { if (zynq_mio_cfg[pin] != 0) { SLCR_REG(MIO_PIN_00 + (pin * sizeof(uint32_t))) = zynq_mio_cfg[pin]; } } SLCR_REG(SD0_WP_CD_SEL) = SDIO0_WP_SEL(0x37) | SDIO0_CD_SEL(0x2F); zynq_slcr_lock(); return 0; }