void pg_map(phys_bytes phys, vir_bytes vaddr, vir_bytes vaddr_end, kinfo_t *cbi) { static int mapped_pde = -1; static u32_t *pt = NULL; int pde, pte; assert(kernel_may_alloc); if(phys == PG_ALLOCATEME) { assert(!(vaddr % ARM_PAGE_SIZE)); } else { assert((vaddr % ARM_PAGE_SIZE) == (phys % ARM_PAGE_SIZE)); vaddr = pg_rounddown(vaddr); phys = pg_rounddown(phys); } assert(vaddr < kern_vir_start); while(vaddr < vaddr_end) { phys_bytes source = phys; assert(!(vaddr % ARM_PAGE_SIZE)); if(phys == PG_ALLOCATEME) { source = pg_alloc_page(cbi); } else { assert(!(phys % ARM_PAGE_SIZE)); } assert(!(source % ARM_PAGE_SIZE)); pde = ARM_VM_PDE(vaddr); pte = ARM_VM_PTE(vaddr); if(mapped_pde < pde) { phys_bytes ph; pt = alloc_pagetable(&ph); pagedir[pde] = (ph & ARM_VM_PDE_MASK) | ARM_VM_PAGEDIR | ARM_VM_PDE_DOMAIN; mapped_pde = pde; } assert(pt); pt[pte] = (source & ARM_VM_PTE_MASK) | ARM_VM_PAGETABLE | ARM_VM_PTE_WT | ARM_VM_PTE_USER; vaddr += ARM_PAGE_SIZE; if(phys != PG_ALLOCATEME) phys += ARM_PAGE_SIZE; } }
void pg_map(phys_bytes phys, vir_bytes vaddr, vir_bytes vaddr_end, kinfo_t *cbi) { static int mapped_pde = -1; static u32_t *pt = NULL; int pde, pte; assert(kernel_may_alloc); if(phys == PG_ALLOCATEME) { assert(!(vaddr % I386_PAGE_SIZE)); } else { assert((vaddr % I386_PAGE_SIZE) == (phys % I386_PAGE_SIZE)); vaddr = pg_rounddown(vaddr); phys = pg_rounddown(phys); } assert(vaddr < kern_vir_start); while(vaddr < vaddr_end) { phys_bytes source = phys; assert(!(vaddr % I386_PAGE_SIZE)); if(phys == PG_ALLOCATEME) { source = pg_alloc_page(cbi); } else { assert(!(phys % I386_PAGE_SIZE)); } assert(!(source % I386_PAGE_SIZE)); pde = I386_VM_PDE(vaddr); pte = I386_VM_PTE(vaddr); if(mapped_pde < pde) { phys_bytes ph; pt = alloc_pagetable(&ph); pagedir[pde] = (ph & I386_VM_ADDR_MASK) | I386_VM_PRESENT | I386_VM_USER | I386_VM_WRITE; mapped_pde = pde; } assert(pt); pt[pte] = (source & I386_VM_ADDR_MASK) | I386_VM_PRESENT | I386_VM_USER | I386_VM_WRITE; vaddr += I386_PAGE_SIZE; if(phys != PG_ALLOCATEME) phys += I386_PAGE_SIZE; } }