static void walk_pde(MemoryMappingList *list, hwaddr pde_start_addr, int32_t a20_mask, target_ulong start_line_addr) { hwaddr pde_addr, pte_start_addr, start_paddr; uint64_t pde; target_ulong line_addr, start_vaddr; int i; for (i = 0; i < 512; i++) { pde_addr = (pde_start_addr + i * 8) & a20_mask; pde = ldq_phys(pde_addr); if (!(pde & PG_PRESENT_MASK)) { /* not present */ continue; } line_addr = start_line_addr | ((i & 0x1ff) << 21); if (pde & PG_PSE_MASK) { /* 2 MB page */ start_paddr = (pde & ~0x1fffff) & ~(0x1ULL << 63); if (cpu_physical_memory_is_io(start_paddr)) { /* I/O region */ continue; } start_vaddr = line_addr; memory_mapping_list_add_merge_sorted(list, start_paddr, start_vaddr, 1 << 21); continue; } pte_start_addr = (pde & PLM4_ADDR_MASK) & a20_mask; walk_pte(list, pte_start_addr, a20_mask, line_addr); } }
static void walk_pmd(struct pg_state *st, pud_t *pud, unsigned long start) { pmd_t *pmd = pmd_offset(pud, 0); unsigned long addr; unsigned i; for (i = 0; i < PTRS_PER_PMD; i++, pmd++) { addr = start + i * PMD_SIZE; #ifdef CONFIG_ARM64 if (pmd_none(*pmd) || pmd_sect (*pmd)) { #else if (pmd_none(*pmd) || pmd_large(*pmd) || !pmd_present(*pmd)) { #endif note_page(st, addr, 3, pmd_val(*pmd)); } else { walk_pte(st, pmd, addr); } #ifdef CONFIG_ARM if (SECTION_SIZE < PMD_SIZE && pmd_large(pmd[1])) note_page(st, addr + SECTION_SIZE, 3, pmd_val(pmd[1])); #endif } } static void walk_pud(struct pg_state *st, pgd_t *pgd, unsigned long start) { pud_t *pud = pud_offset(pgd, 0); unsigned long addr; unsigned i; for (i = 0; i < PTRS_PER_PUD; i++, pud++) { addr = start + i * PUD_SIZE; #if defined CONFIG_ARM64 && !defined (CONFIG_ANDROID) if (pud_none (*pud) || pud_sect (*pud)) { note_page (st, addr, 2, pud_val (*pud)); } else { walk_pmd (st, pud, addr); } #else if (!pud_none(*pud)) { walk_pmd (st, pud, addr); } else { note_page (st, addr, 2, pud_val (*pud)); } #endif } }