/* * Flush all user mappings from the TLBs. */ void zeus_tlb_flush_user(void) { u_long data, tag; u_int i, slot; for (i = 0; i < ZEUS_FTLB_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_FTLB, i); data = ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); tag = ldxa(slot, ASI_DTLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_DTLB_DATA_ACCESS_REG, 0); data = ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); tag = ldxa(slot, ASI_ITLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_ITLB_DATA_ACCESS_REG, 0); } for (i = 0; i < ZEUS_STLB_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_STLB, i); data = ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); tag = ldxa(slot, ASI_DTLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_DTLB_DATA_ACCESS_REG, 0); data = ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); tag = ldxa(slot, ASI_ITLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_ITLB_DATA_ACCESS_REG, 0); } }
int watch_phys_set_mask(vm_offset_t pa, u_long mask) { u_long lsucr; stxa_sync(AA_DMMU_PWPR, ASI_DMMU, pa & (((2UL << 38) - 1) << 3)); lsucr = ldxa(0, ASI_LSU_CTL_REG); lsucr = ((lsucr | LSU_PW) & ~LSU_PM_MASK) | (mask << LSU_PM_SHIFT); stxa_sync(0, ASI_LSU_CTL_REG, lsucr); return (0); }
int watch_virt_set_mask(vm_offset_t va, u_long mask) { u_long lsucr; stxa_sync(AA_DMMU_VWPR, ASI_DMMU, va & (((2UL << 41) - 1) << 3)); lsucr = ldxa(0, ASI_LSU_CTL_REG); lsucr = ((lsucr | LSU_VW) & ~LSU_VM_MASK) | (mask << LSU_VM_SHIFT); stxa_sync(0, ASI_LSU_CTL_REG, lsucr); return (0); }
/* * Flush a physical page from the data cache. */ void cheetah_dcache_page_inval(vm_paddr_t spa) { vm_paddr_t pa; void *cookie; KASSERT((spa & PAGE_MASK) == 0, ("%s: pa not page aligned", __func__)); cookie = ipi_dcache_page_inval(tl_ipi_cheetah_dcache_page_inval, spa); for (pa = spa; pa < spa + PAGE_SIZE; pa += PCPU_GET(cache.dc_linesize)) stxa_sync(pa, ASI_DCACHE_INVALIDATE, 0); ipi_wait(cookie); }
void watch_phys_clear() { stxa_sync(0, ASI_LSU_CTL_REG, ldxa(0, ASI_LSU_CTL_REG) & ~LSU_PW); }
void watch_virt_clear() { stxa_sync(0, ASI_LSU_CTL_REG, ldxa(0, ASI_LSU_CTL_REG) & ~LSU_VW); }
/* * Flush all lines from the level 1 caches. */ void zeus_cache_flush(void) { stxa_sync(0, ASI_FLUSH_L1I, 0); }
/* * Flush all user mappings from the TLBs. */ void cheetah_tlb_flush_user(void) { u_long data, tag; register_t s; u_int i, slot; /* * We read ASI_{D,I}TLB_DATA_ACCESS_REG twice back-to-back in order * to work around errata of USIII and beyond. */ for (i = 0; i < CHEETAH_T16_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_T16, i); s = intr_disable(); (void)ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); intr_restore(s); tag = ldxa(slot, ASI_DTLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_DTLB_DATA_ACCESS_REG, 0); s = intr_disable(); (void)ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); intr_restore(s); tag = ldxa(slot, ASI_ITLB_TAG_READ_REG); if ((data & TD_V) != 0 && (data & TD_L) == 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_ITLB_DATA_ACCESS_REG, 0); } for (i = 0; i < CHEETAH_DT512_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_DT512_0, i); s = intr_disable(); (void)ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); intr_restore(s); tag = ldxa(slot, ASI_DTLB_TAG_READ_REG); if ((data & TD_V) != 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_DTLB_DATA_ACCESS_REG, 0); slot = TLB_DAR_SLOT(TLB_DAR_DT512_1, i); s = intr_disable(); (void)ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_DTLB_DATA_ACCESS_REG); intr_restore(s); tag = ldxa(slot, ASI_DTLB_TAG_READ_REG); if ((data & TD_V) != 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_DTLB_DATA_ACCESS_REG, 0); } if (PCPU_GET(impl) == CPU_IMPL_ULTRASPARCIVp) { for (i = 0; i < CHEETAH_IT512_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_IT512, i); s = intr_disable(); (void)ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); intr_restore(s); tag = ldxa(slot, ASI_ITLB_TAG_READ_REG); if ((data & TD_V) != 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_ITLB_DATA_ACCESS_REG, 0); } } else { for (i = 0; i < CHEETAH_IT128_ENTRIES; i++) { slot = TLB_DAR_SLOT(TLB_DAR_IT128, i); s = intr_disable(); (void)ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); data = ldxa(slot, ASI_ITLB_DATA_ACCESS_REG); tag = ldxa(slot, ASI_ITLB_TAG_READ_REG); intr_restore(s); if ((data & TD_V) != 0 && TLB_TAR_CTX(tag) != TLB_CTX_KERNEL) stxa_sync(slot, ASI_ITLB_DATA_ACCESS_REG, 0); } } }