//============================================================== unsigned test_w_l1cache(unsigned fill_value, unsigned modify_value) { unsigned *addr; unsigned size, err_addr, val; int i; // current dcache is disable // clear no-cache memory block addr = (unsigned*)no_cache_mem_start; size = (cache_size)/sizeof(unsigned); for(i=0; i<size; i++, addr++) *addr = fill_value; // asm("dmb"); // asm("isb"); // map cache-memory data to cache addr = (unsigned*)cache_mem_start; size = cache_size/CONFIG_SYS_CACHE_LINE_SIZE; dcache_enable(); // asm("dmb"); // asm("isb"); for(i=0; i<size; i++, addr+=CONFIG_SYS_CACHE_LINE_SIZE) val = *addr; // write to cache addr = (unsigned*)cache_mem_start; size = cache_size/sizeof(unsigned); for(i=0; i<size; i++, addr++){ *addr = modify_value; } dcache_flush(); dcache_clean(); dcache_disable(); dcache_invalid(); asm("mov r0, r0"); asm("mov r0, r0"); asm("mov r0, r0"); err_addr = 0; addr = (unsigned*)no_cache_mem_start; for(i=0; i<size; i++, addr++){ if(*addr != modify_value){ err_addr = (unsigned)addr; break; } } return err_addr; }
/* * data cache only for ColdFire V4 such as MCF547x_8x, MCF5445x * the dcache will be dummy in ColdFire V2 and V3 */ void dcache_enable(void) { dcache_invalid(); *cf_dcache_status = 1; #ifdef CONFIG_CF_V4 __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0)); __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1)); #elif defined(CONFIG_CF_V4e) __asm__ __volatile__("movec %0, %%acr4"::"r"(CONFIG_SYS_CACHE_ACR4)); __asm__ __volatile__("movec %0, %%acr5"::"r"(CONFIG_SYS_CACHE_ACR5)); #endif __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_DCACR)); }
//============================================================== int l1cache_post_test(int flags) { int i; unsigned result, pattern=0; int status; status = dcache_status(); if(dcache_status() == OFF) dcache_enable(); dcache_flush(); icache_invalid(); dcache_clean(); dcache_disable(); // must invalid dcache after dcache_disable // if no valid dcache, dcache_enable() will jump here dcache_invalid(); asm("mov r0, r0"); asm("mov r0, r0"); asm("mov r0, r0"); for(i=0; i<ARRAY_SIZE(L1_cache_pattern); i++){ result = test_w_l1cache(0x55555555, L1_cache_pattern[i]); if(result != 0){ pattern = L1_cache_pattern[i]; break; } result = test_w_l1cache(0x55555555, ~L1_cache_pattern[i]); if(result != 0){ pattern = ~L1_cache_pattern[i]; break; } } if(status == ON) dcache_enable(); if(i<ARRAY_SIZE(L1_cache_pattern)){ post_log("<%d>%s:%d: l1cache: test fail: Error address=0x%x, pattern=0x%x\n", SYSTEST_INFO_L2, __FUNCTION__, __LINE__, result, pattern); return -1; } else{ post_log("<%d>l1cache test pattern count=%d\n", SYSTEST_INFO_L2, ARRAY_SIZE(L1_cache_pattern)); return 0; } }
void dcache_disable(void) { u32 temp = 0; *cf_dcache_status = 0; dcache_invalid(); __asm__ __volatile__("movec %0, %%cacr"::"r"(temp)); #ifdef CONFIG_CF_V4 __asm__ __volatile__("movec %0, %%acr0"::"r"(temp)); __asm__ __volatile__("movec %0, %%acr1"::"r"(temp)); #elif defined(CONFIG_CF_V4e) __asm__ __volatile__("movec %0, %%acr4"::"r"(temp)); __asm__ __volatile__("movec %0, %%acr5"::"r"(temp)); #endif }