void init_xlat_tables(void) { print_mmap(); init_xlation_table(mmap, 0, l1_xlation_table, 1); tcr_ps_bits = calc_physical_addr_size_bits(max_pa); assert(max_va < ADDR_SPACE_SIZE); }
void init_xlat_tables(void) { unsigned long long max_pa; uintptr_t max_va; print_mmap(); init_xlation_table(0, base_xlation_table, XLAT_TABLE_LEVEL_BASE, &max_va, &max_pa); tcr_ps_bits = calc_physical_addr_size_bits(max_pa); assert(max_va < ADDR_SPACE_SIZE); }
void init_xlat_tables(void) { unsigned long long max_pa; uintptr_t max_va; print_mmap(); init_xlation_table(0, base_xlation_table, XLAT_TABLE_LEVEL_BASE, &max_va, &max_pa); assert(max_va <= PLAT_VIRT_ADDR_SPACE_SIZE - 1); assert(max_pa <= PLAT_PHY_ADDR_SPACE_SIZE - 1); assert((PLAT_PHY_ADDR_SPACE_SIZE - 1) <= get_max_supported_pa()); tcr_ps_bits = calc_physical_addr_size_bits(max_pa); }
void core_init_mmu_tables(struct tee_mmap_region *mm) { paddr_t max_pa = 0; uint64_t max_va = 0; size_t n; for (n = 0; mm[n].size; n++) { paddr_t pa_end; vaddr_t va_end; debug_print(" %010" PRIxVA " %010" PRIxPA " %10zx %x", mm[n].va, mm[n].pa, mm[n].size, mm[n].attr); assert(IS_PAGE_ALIGNED(mm[n].pa)); assert(IS_PAGE_ALIGNED(mm[n].size)); pa_end = mm[n].pa + mm[n].size - 1; va_end = mm[n].va + mm[n].size - 1; if (pa_end > max_pa) max_pa = pa_end; if (va_end > max_va) max_va = va_end; } /* Clear table before use */ memset(l1_xlation_table[0], 0, NUM_L1_ENTRIES * XLAT_ENTRY_SIZE); init_xlation_table(mm, 0, l1_xlation_table[0], 1); for (n = 1; n < CFG_TEE_CORE_NB_CORE; n++) memcpy(l1_xlation_table[n], l1_xlation_table[0], XLAT_ENTRY_SIZE * NUM_L1_ENTRIES); for (n = 0; n < NUM_L1_ENTRIES; n++) { if (!l1_xlation_table[0][n]) { user_va_idx = n; break; } } assert(user_va_idx != -1); tcr_ps_bits = calc_physical_addr_size_bits(max_pa); COMPILE_TIME_ASSERT(ADDR_SPACE_SIZE > 0); assert(max_va < ADDR_SPACE_SIZE); }