/*inline NEEDS["mapped_alloc.h", <cstring>, "ram_quota.h", Generic_obj_space::cap_virt]*/ typename Generic_obj_space<SPACE>::Entry * Generic_obj_space<SPACE>::caps_alloc(Address virt) { Address cv = (Address)cap_virt(virt); void *mem = Mapped_allocator::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE); if (!mem) return 0; add_dbg_info(mem, this, virt); Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword)); Mem_space::Status s; s = mem_space()->v_insert( Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)), Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)), Mem_space::Size::create(Config::PAGE_SIZE), Mem_space::Page_cacheable | Mem_space::Page_writable | Mem_space::Page_referenced | Mem_space::Page_dirty); switch (s) { case Insert_ok: case Insert_warn_exists: case Insert_warn_attrib_upgrade: case Insert_err_exists: break; case Insert_err_nomem: Mapped_allocator::allocator()->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, mem); return 0; }; unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem; return reinterpret_cast<Entry*>(cap); }
typename Generic_obj_space<SPACE>::Entry * Generic_obj_space<SPACE>::caps_alloc(Address virt) { static_assert(sizeof(Cap_table) == Config::PAGE_SIZE, "cap table size mismatch"); unsigned d_idx = virt / Caps_per_page; if (EXPECT_FALSE(d_idx >= Slots_per_dir)) return 0; void *mem = Kmem_alloc::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE); if (!mem) return 0; add_dbg_info(mem, this, virt); Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword)); Cap_table *tab = _dir->d[d_idx] = (Cap_table*)mem; return &tab->e[virt % Caps_per_page]; }