/* * static function */ static inline void cpu_enter_lowpower(unsigned int cpu) { HOTPLUG_INFO("cpu_enter_lowpower\n"); /* Cluster off */ if ((cpu == 3 && cpu_online(2) == 0) || (cpu == 2 && cpu_online(3) == 0)) { /* Clear the SCTLR C bit to prevent further data cache allocation */ __disable_dcache(); isb(); dsb(); /* Clean and invalidate all data from the L1, L2 data cache */ inner_dcache_flush_all(); /* flush_cache_all(); */ /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */ __switch_to_amp(); isb(); dsb(); /* disable CA15 CCI */ spm_write(CA15_CCI400_DVM_EN, spm_read(CA15_CCI400_DVM_EN) & ~0x3); /* wait cci change pending */ while (spm_read(CCI400_STATUS) & 0x1) ; /* Ensure the ACP master does not send further requests to the individual processor. Assert AINACTS to idle the ACP slave interface after all responses are received. */ /* mt65xx_reg_sync_writel( *CA15_MISC_DBG | 0x11, CA15_MISC_DBG); */ spm_write(CA15_MISC_DBG, spm_read(CA15_MISC_DBG) | 0x11); } else { /* Clear the SCTLR C bit to prevent further data cache allocation */ __disable_dcache(); isb(); dsb(); /* Clean and invalidate all data from the L1 data cache */ inner_dcache_flush_L1(); /* Just flush the cache. */ /* flush_cache_all(); */ /* Execute a CLREX instruction */ __asm__ __volatile__("clrex"); /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */ __switch_to_amp(); } }
/* * static function */ static inline void cpu_enter_lowpower(unsigned int cpu) { //HOTPLUG_INFO("cpu_enter_lowpower\n"); #ifdef SPM_MCDI_FUNC spm_hot_plug_out_after(cpu); #endif /* Clear the SCTLR C bit to prevent further data cache allocation */ __disable_dcache(); /* Clean and invalidate all data from the L1 data cache */ inner_dcache_flush_L1(); //Just flush the cache. //flush_cache_all(); /* Clean all data from the L2 data cache */ __inner_clean_dcache_L2(); /* Execute a CLREX instruction */ __asm__ __volatile__("clrex"); /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */ __switch_to_amp(); }
void __invalidate_dcache_range(unsigned long start, unsigned long end) { unsigned int i; unsigned flags; unsigned int align; if (cpuinfo.use_dcache) { /* * No need to cover entire cache range, * just cover cache footprint */ end = min(start + cpuinfo.dcache_size, end); align = ~(cpuinfo.dcache_line - 1); start &= align; /* Make sure we are aligned */ /* Push end up to the next cache line */ end = ((end & align) + cpuinfo.dcache_line); local_irq_save(flags); __disable_dcache(); for (i = start; i < end; i += cpuinfo.dcache_line) __invalidate_dcache(i); __enable_dcache(); local_irq_restore(flags); } }
void __invalidate_dcache_all(void) { unsigned int i; unsigned flags; if (cpuinfo.use_dcache) { local_irq_save(flags); __disable_dcache(); /* * Just loop through cache size and invalidate, * no need to add CACHE_BASE address */ for (i = 0; i < cpuinfo.dcache_size; i += cpuinfo.dcache_line) __invalidate_dcache(i); __enable_dcache(); local_irq_restore(flags); } }
/* * static function */ static inline void cpu_enter_lowpower(unsigned int cpu) { //HOTPLUG_INFO("cpu_enter_lowpower\n"); if (((cpu == 4) && (cpu_online(5) == 0) && (cpu_online(6) == 0) && (cpu_online(7) == 0)) || ((cpu == 5) && (cpu_online(4) == 0) && (cpu_online(6) == 0) && (cpu_online(7) == 0)) || ((cpu == 6) && (cpu_online(4) == 0) && (cpu_online(5) == 0) && (cpu_online(7) == 0)) || ((cpu == 7) && (cpu_online(4) == 0) && (cpu_online(5) == 0) && (cpu_online(6) == 0))) { #if 0 /* Clear the SCTLR C bit to prevent further data cache allocation */ __disable_dcache(); /* Clean and invalidate all data from the L1/L2 data cache */ inner_dcache_flush_L1(); //flush_cache_all(); /* Execute a CLREX instruction */ __asm__ __volatile__("clrex"); /* Clean all data from the L2 data cache */ inner_dcache_flush_L2(); #else __disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2(); #endif /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */ __switch_to_amp(); /* Execute an ISB instruction to ensure that all of the CP15 register changes from the previous steps have been committed */ isb(); /* Execute a DSB instruction to ensure that all cache, TLB and branch predictor maintenance operations issued by any processor in the multiprocessor device before the SMP bit was cleared have completed */ dsb(); /* Disable snoop requests and DVM message requests */ REG_WRITE(CCI400_SI3_SNOOP_CONTROL, REG_READ(CCI400_SI3_SNOOP_CONTROL) & ~(SNOOP_REQ | DVM_MSG_REQ)); while (REG_READ(CCI400_STATUS) & CHANGE_PENDING); /* Disable CA15L snoop function */ #if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI) mcusys_smc_write_phy(virt_to_phys(MP1_AXI_CONFIG), REG_READ(MP1_AXI_CONFIG) | ACINACTM); #else //#if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI) mcusys_smc_write(MP1_AXI_CONFIG, REG_READ(MP1_AXI_CONFIG) | ACINACTM); #endif //#if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI) } else { #if 0 /* Clear the SCTLR C bit to prevent further data cache allocation */ __disable_dcache(); /* Clean and invalidate all data from the L1 data cache */ inner_dcache_flush_L1(); //Just flush the cache. //flush_cache_all(); /* Clean all data from the L2 data cache */ //__inner_clean_dcache_L2(); #else //FIXME: why __disable_dcache__inner_flush_dcache_L1 fail but 2 steps ok? //__disable_dcache__inner_flush_dcache_L1(); __disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2(); #endif /* Execute a CLREX instruction */ __asm__ __volatile__("clrex"); /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */ __switch_to_amp(); } }