/*
 * static function
 */
static inline void cpu_enter_lowpower(unsigned int cpu)
{
	HOTPLUG_INFO("cpu_enter_lowpower\n");

	/* Cluster off */
	if ((cpu == 3 && cpu_online(2) == 0) || (cpu == 2 && cpu_online(3) == 0)) {

		/* Clear the SCTLR C bit to prevent further data cache allocation */
		__disable_dcache();
		isb();
		dsb();

		/* Clean and invalidate all data from the L1, L2 data cache */
		inner_dcache_flush_all();
		/* flush_cache_all(); */

		/* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */
		__switch_to_amp();

		isb();
		dsb();

		/* disable CA15 CCI */
		spm_write(CA15_CCI400_DVM_EN, spm_read(CA15_CCI400_DVM_EN) & ~0x3);
		/* wait cci change pending */
		while (spm_read(CCI400_STATUS) & 0x1)
			;
		/* Ensure the ACP master does not send further requests to the individual processor.
		   Assert AINACTS to idle the ACP slave interface after all responses are received. */
		/* mt65xx_reg_sync_writel( *CA15_MISC_DBG | 0x11, CA15_MISC_DBG); */
		spm_write(CA15_MISC_DBG, spm_read(CA15_MISC_DBG) | 0x11);

	} else {
		/* Clear the SCTLR C bit to prevent further data cache allocation */
		__disable_dcache();
		isb();
		dsb();
		/* Clean and invalidate all data from the L1 data cache */
		inner_dcache_flush_L1();
		/* Just flush the cache. */
		/* flush_cache_all(); */
		/* Execute a CLREX instruction */
		__asm__ __volatile__("clrex");
		/* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */
		__switch_to_amp();
	}

}
Пример #2
0
/*
 * static function
 */
static inline void cpu_enter_lowpower(unsigned int cpu)
{
    //HOTPLUG_INFO("cpu_enter_lowpower\n");

#ifdef SPM_MCDI_FUNC
    spm_hot_plug_out_after(cpu);
#endif

    /* Clear the SCTLR C bit to prevent further data cache allocation */
    __disable_dcache();

    /* Clean and invalidate all data from the L1 data cache */
    inner_dcache_flush_L1();
    //Just flush the cache.
    //flush_cache_all();

    /* Clean all data from the L2 data cache */
    __inner_clean_dcache_L2();

    /* Execute a CLREX instruction */
    __asm__ __volatile__("clrex");

    /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */
    __switch_to_amp();
}
Пример #3
0
/*
 * static function
 */
static inline void cpu_enter_lowpower(unsigned int cpu)
{
    //HOTPLUG_INFO("cpu_enter_lowpower\n");

    if (((cpu == 4) && (cpu_online(5) == 0) && (cpu_online(6) == 0) && (cpu_online(7) == 0)) ||
        ((cpu == 5) && (cpu_online(4) == 0) && (cpu_online(6) == 0) && (cpu_online(7) == 0)) ||
        ((cpu == 6) && (cpu_online(4) == 0) && (cpu_online(5) == 0) && (cpu_online(7) == 0)) ||
        ((cpu == 7) && (cpu_online(4) == 0) && (cpu_online(5) == 0) && (cpu_online(6) == 0)))
    {
    #if 0
        /* Clear the SCTLR C bit to prevent further data cache allocation */
        __disable_dcache();

        /* Clean and invalidate all data from the L1/L2 data cache */
        inner_dcache_flush_L1();
        //flush_cache_all();

        /* Execute a CLREX instruction */
        __asm__ __volatile__("clrex");

        /* Clean all data from the L2 data cache */
        inner_dcache_flush_L2();
    #else
        __disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2();
    #endif

        /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */
        __switch_to_amp();

        /* Execute an ISB instruction to ensure that all of the CP15 register changes from the previous steps have been committed */
        isb();

        /* Execute a DSB instruction to ensure that all cache, TLB and branch predictor maintenance operations issued by any processor in the multiprocessor device before the SMP bit was cleared have completed */
        dsb();

        /* Disable snoop requests and DVM message requests */
        REG_WRITE(CCI400_SI3_SNOOP_CONTROL, REG_READ(CCI400_SI3_SNOOP_CONTROL) & ~(SNOOP_REQ | DVM_MSG_REQ));
        while (REG_READ(CCI400_STATUS) & CHANGE_PENDING);

        /* Disable CA15L snoop function */
    #if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI)
        mcusys_smc_write_phy(virt_to_phys(MP1_AXI_CONFIG), REG_READ(MP1_AXI_CONFIG) | ACINACTM);
    #else //#if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI)
        mcusys_smc_write(MP1_AXI_CONFIG, REG_READ(MP1_AXI_CONFIG) | ACINACTM);
    #endif //#if defined(CONFIG_ARM_PSCI) || defined(CONFIG_MTK_PSCI)
    }
    else
    {
    #if 0
        /* Clear the SCTLR C bit to prevent further data cache allocation */
        __disable_dcache();

        /* Clean and invalidate all data from the L1 data cache */
        inner_dcache_flush_L1();
        //Just flush the cache.
        //flush_cache_all();

        /* Clean all data from the L2 data cache */
        //__inner_clean_dcache_L2();
    #else
        //FIXME: why __disable_dcache__inner_flush_dcache_L1 fail but 2 steps ok?
        //__disable_dcache__inner_flush_dcache_L1();
        __disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2();
    #endif

        /* Execute a CLREX instruction */
        __asm__ __volatile__("clrex");

        /* Switch the processor from SMP mode to AMP mode by clearing the ACTLR SMP bit */
        __switch_to_amp();
    }
}