synopsys_ddr32_in_self_refresh(MPE42_DDR_PCTL_BASE(1)), #else UPDATE32(MPE42_DDR_PWR_DWN(1), ~MPE42_DDR_PWR_DWN_REQ, 0), WHILE_NE32(MPE42_DDR_PWR_STATUS(1), MPE42_DDR_PWR_STATUS_ACK, 0), #endif synopsys_ddr32_phy_standby_enter(MPE42_DDR_PCTL_BASE(1)), }; static const long stx_mpe42_ddr0_exit[] = { synopsys_ddr32_phy_standby_exit(MPE42_DDR_PCTL_BASE(0)), #ifdef SELF_REFRESH_ON_PCTL synopsys_ddr32_out_of_self_refresh(MPE42_DDR_PCTL_BASE(0)), #else OR32(MPE42_DDR_PWR_DWN(0), MPE42_DDR_PWR_DWN_REQ), WHILE_NE32(MPE42_DDR_PWR_STATUS(0), MPE42_DDR_PWR_STATUS_ACK, MPE42_DDR_PWR_STATUS_ACK), #endif }; static const long stx_mpe42_ddr1_exit[] = { synopsys_ddr32_phy_standby_exit(MPE42_DDR_PCTL_BASE(1)), #ifdef SELF_REFRESH_ON_PCTL synopsys_ddr32_out_of_self_refresh(MPE42_DDR_PCTL_BASE(1)), #else OR32(MPE42_DDR_PWR_DWN(1), MPE42_DDR_PWR_DWN_REQ), WHILE_NE32(MPE42_DDR_PWR_STATUS(1), MPE42_DDR_PWR_STATUS_ACK, MPE42_DDR_PWR_STATUS_ACK), #endif
#define SYSCONF_CPU_722 (STIG125_SYSCONF_CPU_BASE + 0x58) #define SYSCONF_CPU_760 (STIG125_SYSCONF_CPU_BASE + 0xf0) static void __iomem *clks_base[2]; static struct stm_wakeup_devices stig125_wkd; static long stig125_ddr0_enter[] = { synopsys_ddr32_in_self_refresh(STIG125_DDR_PCTL_BASE), synopsys_ddr32_phy_standby_enter(STIG125_DDR_PCTL_BASE), POKE32(CLK_A1_BASE + SASC_SWITCH_CFG(0), 0), POKE32(CLK_A1_BASE + SASC_POWER_CFG, 0x3), /* bypass and disable the A9.PLL */ OR32(SYSCONF_CPU_722, 1 << 2), OR32(SYSCONF_CPU_722, 1), END_MARKER, }; static long stig125_ddr0_exit[] = { /* enable, wait and don't bypass the A9.PLL */ UPDATE32(SYSCONF_CPU_722, ~1, 0), WHILE_NE32(SYSCONF_CPU_760, 1, 1), UPDATE32(SYSCONF_CPU_722, ~(1 << 2), 0), /* turn-on A1.PLLs */ POKE32(CLK_A1_BASE + SASC_POWER_CFG, 0x0), /* Wait A1.PLLs are locked */ WHILE_NE32(CLK_A1_BASE + SASC_PLL_LOCK_REG(0), SASC_PLL_LOCK_STATUS,
void LOR32(reg_t r1, reg_t r2, reg_t r3) { OR32(r1, r2, r3); }
*/ static unsigned long stxh205_standby_table[] __cacheline_aligned = { END_MARKER, END_MARKER }; /* ********************* * MEM INSTRUCTION TABLE * ********************* */ static unsigned long stxh205_mem_table[] __cacheline_aligned = { synopsys_ddr32_in_self_refresh(DDR3SS_REG), synopsys_ddr32_phy_standby_enter(DDR3SS_REG), OR32(CLK_A1_BASE + CKGA_POWER_CFG, 3), /* END. */ END_MARKER, /* * Turn-on DDR clock: * The DDR subsystem uses the channel coming from A1.HS_0 * this means there is _no_ ClockGen_D... * * - turn on the A1.PLLs * - wait both the PLLs are locked */ UPDATE32(CLK_A1_BASE + CKGA_POWER_CFG, 0, ~0x3), WHILE_NE32(SYSTEM_STATUS_160, 3, 3), synopsys_ddr32_phy_standby_exit(DDR3SS_REG),
#define SBC_GPIO_PORT(_nr) (0xfe610000 + (_nr) * 0x1000) #define LMI_RET_GPIO_PORT 4 #define LMI_RET_GPIO_PIN 4 #define LMI_RETENTION_PIN stm_gpio(LMI_RET_GPIO_PORT, LMI_RET_GPIO_PIN) #define SYSCONF_SYSTEM(x) (MPE41_SYSTEM_SYSCONF_BASE + \ ((x) - 600) * 0x4) #define SYSCONF_DDR0_PWR_DWN SYSCONF_SYSTEM(608) #define SYSCONF_DDR0_PWR_ACK SYSCONF_SYSTEM(670) #define SYSCONF_DDR1_PWR_DWN SYSCONF_SYSTEM(613) #define SYSCONF_DDR1_PWR_ACK SYSCONF_SYSTEM(672) static const unsigned long __fli7610_hom_ddr_0[] = { OR32(MPE41_DDR0_PCTL_BASE + DDR_DTU_CFG, DDR_DTU_CFG_ENABLE), synopsys_ddr32_in_hom(MPE41_DDR0_PCTL_BASE), }; static const unsigned long __fli7610_hom_ddr_1[] = { OR32(MPE41_DDR1_PCTL_BASE + DDR_DTU_CFG, DDR_DTU_CFG_ENABLE), synopsys_ddr32_in_hom(MPE41_DDR1_PCTL_BASE), }; static const unsigned long __fli7610_hom_lmi_retention[] = { /* * Enable retention mode gpio */ POKE32(SBC_GPIO_PORT(LMI_RET_GPIO_PORT) + STM_GPIO_REG_CLR_POUT, 1 << LMI_RET_GPIO_PIN), };
END_MARKER }; /* ********************* * MEM INSTRUCTION TABLE * ********************* */ static unsigned long fli7510_mem_table[] __cacheline_aligned = { /* 1. Enables the DDR self refresh mode based on paraghaph. 7.1.4 * -> from ACCESS to LowPower */ POKE32(DDR0_BASE_REG + DDR_SCTL, DDR_SCTL_SLEEP), WHILE_NE32(DDR0_BASE_REG + DDR_STAT, DDR_STAT_LOW_POWER, DDR_STAT_LOW_POWER), #if 0 OR32(DDR0_BASE_REG + DDR_PHY_IOCRV1, 1), OR32(DDR0_BASE_REG + DDR_PHY_DXCCR, 1), #endif POKE32(DDR1_BASE_REG + DDR_SCTL, DDR_SCTL_SLEEP), WHILE_NE32(DDR1_BASE_REG + DDR_STAT, DDR_STAT_LOW_POWER, DDR_STAT_LOW_POWER), #if 0 OR32(DDR1_BASE_REG + DDR_PHY_IOCRV1, 1), OR32(DDR1_BASE_REG + DDR_PHY_DXCCR, 1), OR32(DDR0_BASE_REG + DDR_PHY_PIR, 1 << 7), OR32(DDR1_BASE_REG + DDR_PHY_PIR, 1 << 7), #endif END_MARKER, #if 0
END_MARKER, /* reduces OSC_st40 */ POKE32(CGA + CKGA_OSC_DIV_CFG(17), 0), /* ic_if_200 */ END_MARKER }; /* ********************* * MEM INSTRUCTION TABLE * ********************* */ static unsigned long stx7111_mem_table[] __cacheline_aligned = { /* 1. Enables the DDR self refresh mode */ OR32(_SYS_CFG(38), (1 << 20)), /* waits until the ack bit is zero */ WHILE_NE32(_SYS_STA(4), 1, 1), /* Disable the analogue input buffers of the pads */ OR32(_SYS_CFG(12), (1 << 10)), /* Disable the clock output */ UPDATE32(_SYS_CFG(4), ~(1 << 2), 0), /* 1.1 Turn-off the ClockGenD */ OR32(_SYS_CFG(11), (1 << 12)), /* wait clock gen lock */ WHILE_NE32(_SYS_STA(3), 1, 1), POKE32(CGA + CKGA_OSC_DIV_CFG(17), 31), /* ic_if_200 */ END_MARKER,
*/ static unsigned long stx5206_standby_table[] __cacheline_aligned = { END_MARKER, END_MARKER }; /* ********************* * MEM INSTRUCTION TABLE * ********************* */ static unsigned long stx5206_mem_table[] __cacheline_aligned = { /* 1. Enables the DDR self refresh mode */ OR32(SYSCONF(38), (1 << 20)), /* waits until the ack bit is zero */ WHILE_NE32(SYSSTA(4), 1, 1), /* Disable the clock output */ UPDATE32(SYSCONF(4), ~(1 << 2), 0), /* Disable the analogue input buffers of the pads */ OR32(SYSCONF(12), (1 << 10)), /* 1.1 Turn-off the ClockGenD */ OR32(SYSCONF(11), (1 << 12)), /* wait clock gen lock */ WHILE_NE32(SYSSTA(3), 1, 1), END_MARKER, UPDATE32(SYSCONF(12), ~(1 << 10), 0),