/** * Creates the kernel input device. */ static int __init vboxguestLinuxCreateInputDevice(void) { int rc; rc = VbglGRAlloc((VMMDevRequestHeader **)&g_pMouseStatusReq, sizeof(*g_pMouseStatusReq), VMMDevReq_GetMouseStatus); if (RT_FAILURE(rc)) return -ENOMEM; g_pInputDevice = input_allocate_device(); if (!g_pInputDevice) { VbglGRFree(&g_pMouseStatusReq->header); return -ENOMEM; } g_pInputDevice->id.bustype = BUS_PCI; g_pInputDevice->id.vendor = VMMDEV_VENDORID; g_pInputDevice->id.product = VMMDEV_DEVICEID; g_pInputDevice->id.version = VBOX_SHORT_VERSION; g_pInputDevice->open = vboxguestOpenInputDevice; g_pInputDevice->close = vboxguestCloseInputDevice; # if LINUX_VERSION_CODE < KERNEL_VERSION(2, 6, 22) g_pInputDevice->cdev.dev = &g_pPciDev->dev; # else g_pInputDevice->dev.parent = &g_pPciDev->dev; # endif { int rc = input_register_device(g_pInputDevice); if (rc) { VbglGRFree(&g_pMouseStatusReq->header); input_free_device(g_pInputDevice); return rc; } } /* Do what one of our competitors apparently does as that works. */ ASMBitSet(g_pInputDevice->evbit, EV_ABS); ASMBitSet(g_pInputDevice->evbit, EV_KEY); # ifdef EV_SYN ASMBitSet(g_pInputDevice->evbit, EV_SYN); # endif input_set_abs_params(g_pInputDevice, ABS_X, VMMDEV_MOUSE_RANGE_MIN, VMMDEV_MOUSE_RANGE_MAX, 0, 0); input_set_abs_params(g_pInputDevice, ABS_Y, VMMDEV_MOUSE_RANGE_MIN, VMMDEV_MOUSE_RANGE_MAX, 0, 0); ASMBitSet(g_pInputDevice->keybit, BTN_MOUSE); /** @todo this string should be in a header file somewhere. */ g_pInputDevice->name = "VirtualBox mouse integration"; return 0; }
static void test2(RTTEST hTest) { struct TestMap2 *p2 = (struct TestMap2 *)RTTestGuardedAllocTail(hTest, sizeof(TestMap2)); p2->idNil = NIL_TEST2_ID; p2->idLast = TEST2_ID_LAST; /* Some simple tests first. */ RT_ZERO(p2->bmChunkId); RTTEST_CHECK(hTest, ASMBitFirstSet(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1); for (uint32_t iBit = 0; iBit <= TEST2_ID_LAST; iBit++) RTTEST_CHECK(hTest, !ASMBitTest(&p2->bmChunkId[0], iBit)); memset(&p2->bmChunkId[0], 0xff, sizeof(p2->bmChunkId)); RTTEST_CHECK(hTest, ASMBitFirstClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1); for (uint32_t iBit = 0; iBit <= TEST2_ID_LAST; iBit++) RTTEST_CHECK(hTest, ASMBitTest(&p2->bmChunkId[0], iBit)); /* The real test. */ p2->idChunkPrev = 0; RT_ZERO(p2->bmChunkId); ASMBitSet(p2->bmChunkId, NIL_TEST2_ID); uint32_t cLeft = TEST2_ID_LAST; while (cLeft-- > 0) test2AllocId(p2); RTTEST_CHECK(hTest, ASMBitFirstClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1); }
RTDECL(bool) ASMBitTestAndSet(volatile void *pvBitmap, int32_t iBit) { if (ASMBitTest(pvBitmap, iBit)) return true; ASMBitSet(pvBitmap, iBit); return false; }
BS3_DECL(void) Bs3SlabInit(PBS3SLABCTL pSlabCtl, size_t cbSlabCtl, uint32_t uFlatSlabPtr, uint32_t cbSlab, uint16_t cbChunk) { uint16_t cBits; BS3_ASSERT(RT_IS_POWER_OF_TWO(cbChunk)); BS3_ASSERT(cbSlab >= cbChunk * 4); BS3_ASSERT(!(uFlatSlabPtr & (cbChunk - 1))); BS3_XPTR_SET_FLAT(BS3SLABCTL, pSlabCtl->pNext, 0); BS3_XPTR_SET_FLAT(BS3SLABCTL, pSlabCtl->pHead, 0); BS3_XPTR_SET_FLAT(BS3SLABCTL, pSlabCtl->pbStart, uFlatSlabPtr); pSlabCtl->cbChunk = cbChunk; pSlabCtl->cChunkShift = ASMBitFirstSetU16(cbChunk) - 1; pSlabCtl->cChunks = cbSlab >> pSlabCtl->cChunkShift; pSlabCtl->cFreeChunks = pSlabCtl->cChunks; cBits = RT_ALIGN_T(pSlabCtl->cChunks, 32, uint16_t); BS3_ASSERT(cbSlabCtl >= RT_OFFSETOF(BS3SLABCTL, bmAllocated[cBits >> 3])); Bs3MemZero(&pSlabCtl->bmAllocated, cBits >> 3); /* Mark excess bitmap padding bits as allocated. */ if (cBits != pSlabCtl->cChunks) { uint16_t iBit; for (iBit = pSlabCtl->cChunks; iBit < cBits; iBit++) ASMBitSet(pSlabCtl->bmAllocated, iBit); } }
/** * Tries to allocate a chunk of pages from a heap block. * * @retval VINF_SUCCESS on success. * @retval VERR_NO_MEMORY if the allocation failed. * @param pBlock The block to allocate from. * @param cPages The size of the allocation. * @param fZero Whether it should be zeroed or not. * @param ppv Where to return the allocation address on success. */ DECLINLINE(int) rtHeapPageAllocFromBlock(PRTHEAPPAGEBLOCK pBlock, size_t cPages, bool fZero, void **ppv) { if (pBlock->cFreePages >= cPages) { int iPage = ASMBitFirstClear(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT); Assert(iPage >= 0); /* special case: single page. */ if (cPages == 1) { ASMBitSet(&pBlock->bmAlloc[0], iPage); return rtHeapPageAllocFromBlockSuccess(pBlock, iPage, cPages, fZero, ppv); } while ( iPage >= 0 && (unsigned)iPage <= RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT - cPages) { if (rtHeapPageIsPageRangeFree(pBlock, iPage + 1, cPages - 1)) { ASMBitSetRange(&pBlock->bmAlloc[0], iPage, iPage + cPages); return rtHeapPageAllocFromBlockSuccess(pBlock, iPage, cPages, fZero, ppv); } /* next */ iPage = ASMBitNextSet(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT, iPage); if (iPage < 0 || iPage >= RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT - 1) break; iPage = ASMBitNextClear(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT, iPage); } } return VERR_NO_MEMORY; }
static PCR_DISPLAY crServerDisplayGet(uint32_t idScreen) { if (idScreen >= CR_MAX_GUEST_MONITORS) { crWarning("invalid idScreen %d", idScreen); return NULL; } if (ASMBitTest(cr_server.DisplaysInitMap, idScreen)) return &cr_server.aDispplays[idScreen]; /* the display (screen id == 0) can be initialized while doing crServerCheckInitDisplayBlitter, * so re-check the bit map */ if (ASMBitTest(cr_server.DisplaysInitMap, idScreen)) return &cr_server.aDispplays[idScreen]; int rc = CrDpInit(&cr_server.aDispplays[idScreen]); if (RT_SUCCESS(rc)) { CrDpResize(&cr_server.aDispplays[idScreen], cr_server.screen[idScreen].w, cr_server.screen[idScreen].h, cr_server.screen[idScreen].w, cr_server.screen[idScreen].h); ASMBitSet(cr_server.DisplaysInitMap, idScreen); return &cr_server.aDispplays[idScreen]; } else { crWarning("CrDpInit failed for screen %d", idScreen); } return NULL; }
/** * Grows the cache. * * @returns IPRT status code. * @param pThis The memory cache instance. */ static int rtMemCacheGrow(RTMEMCACHEINT *pThis) { /* * Enter the critical section here to avoid allocation races leading to * wasted memory (++) and make it easier to link in the new page. */ RTCritSectEnter(&pThis->CritSect); int rc = VINF_SUCCESS; if (pThis->cFree < 0) { /* * Allocate and initialize the new page. * * We put the constructor bitmap at the lower end right after cFree. * We then push the object array to the end of the page and place the * allocation bitmap below it. The hope is to increase the chance that * the allocation bitmap is in a different cache line than cFree since * this increases performance markably when lots of threads are beating * on the cache. */ PRTMEMCACHEPAGE pPage = (PRTMEMCACHEPAGE)RTMemPageAlloc(PAGE_SIZE); if (pPage) { uint32_t const cObjects = RT_MIN(pThis->cPerPage, pThis->cMax - pThis->cTotal); ASMMemZeroPage(pPage); pPage->pCache = pThis; pPage->pNext = NULL; pPage->cFree = cObjects; pPage->cObjects = cObjects; uint8_t *pb = (uint8_t *)(pPage + 1); pb = RT_ALIGN_PT(pb, 8, uint8_t *); pPage->pbmCtor = pb; pb = (uint8_t *)pPage + PAGE_SIZE - pThis->cbObject * cObjects; pPage->pbObjects = pb; Assert(RT_ALIGN_P(pb, pThis->cbAlignment) == pb); pb -= pThis->cBits / 8; pb = (uint8_t *)((uintptr_t)pb & ~(uintptr_t)7); pPage->pbmAlloc = pb; Assert((uintptr_t)pPage->pbmCtor + pThis->cBits / 8 <= (uintptr_t)pPage->pbmAlloc); /* Mark the bitmap padding and any unused objects as allocated. */ for (uint32_t iBit = cObjects; iBit < pThis->cBits; iBit++) ASMBitSet(pPage->pbmAlloc, iBit); /* Make it the hint. */ ASMAtomicWritePtr(&pThis->pPageHint, pPage); /* Link the page in at the end of the list. */ ASMAtomicWritePtr(pThis->ppPageNext, pPage); pThis->ppPageNext = &pPage->pNext; /* Add it to the page counts. */ ASMAtomicAddS32(&pThis->cFree, cObjects); ASMAtomicAddU32(&pThis->cTotal, cObjects); } else
/** * Mark a page as scanned/not scanned * * @note: we always mark it as scanned, even if we haven't completely done so * * @returns VBox status code. * @param pVM Pointer to the VM. * @param pPage GC page address (not necessarily aligned) * @param fScanned Mark as scanned or not scanned * */ VMM_INT_DECL(int) CSAMMarkPage(PVM pVM, RTRCUINTPTR pPage, bool fScanned) { int pgdir, bit; uintptr_t page; #ifdef LOG_ENABLED if (fScanned && !CSAMIsPageScanned(pVM, (RTRCPTR)pPage)) Log(("CSAMMarkPage %RRv\n", pPage)); #endif if (!CSAMIsEnabled(pVM)) return VINF_SUCCESS; Assert(!HMIsEnabled(pVM)); page = (uintptr_t)pPage; pgdir = page >> X86_PAGE_4M_SHIFT; bit = (page & X86_PAGE_4M_OFFSET_MASK) >> X86_PAGE_4K_SHIFT; Assert(pgdir < CSAM_PGDIRBMP_CHUNKS); Assert(bit < PAGE_SIZE); if(!CTXSUFF(pVM->csam.s.pPDBitmap)[pgdir]) { STAM_COUNTER_INC(&pVM->csam.s.StatBitmapAlloc); int rc = MMHyperAlloc(pVM, CSAM_PAGE_BITMAP_SIZE, 0, MM_TAG_CSAM, (void **)&pVM->csam.s.CTXSUFF(pPDBitmap)[pgdir]); if (RT_FAILURE(rc)) { Log(("MMHyperAlloc failed with %Rrc\n", rc)); return rc; } #ifdef IN_RC pVM->csam.s.pPDHCBitmapGC[pgdir] = MMHyperRCToR3(pVM, (RCPTRTYPE(void*))pVM->csam.s.pPDBitmapGC[pgdir]); if (!pVM->csam.s.pPDHCBitmapGC[pgdir]) { Log(("MMHyperHC2GC failed for %RRv\n", pVM->csam.s.pPDBitmapGC[pgdir])); return rc; } #else pVM->csam.s.pPDGCBitmapHC[pgdir] = MMHyperR3ToRC(pVM, pVM->csam.s.pPDBitmapHC[pgdir]); if (!pVM->csam.s.pPDGCBitmapHC[pgdir]) { Log(("MMHyperHC2GC failed for %RHv\n", pVM->csam.s.pPDBitmapHC[pgdir])); return rc; } #endif } if(fScanned) ASMBitSet((void *)pVM->csam.s.CTXSUFF(pPDBitmap)[pgdir], bit); else ASMBitClear((void *)pVM->csam.s.CTXSUFF(pPDBitmap)[pgdir], bit); return VINF_SUCCESS; }
/** * Avoids some gotos in rtHeapPageAllocFromBlock. * * @returns VINF_SUCCESS. * @param pBlock The block. * @param iPage The page to start allocating at. * @param cPages The number of pages. * @param fZero Whether to clear them. * @param ppv Where to return the allocation address. */ DECLINLINE(int) rtHeapPageAllocFromBlockSuccess(PRTHEAPPAGEBLOCK pBlock, uint32_t iPage, size_t cPages, bool fZero, void **ppv) { PRTHEAPPAGE pHeap = pBlock->pHeap; ASMBitSet(&pBlock->bmFirst[0], iPage); pBlock->cFreePages -= cPages; pHeap->cFreePages -= cPages; if (!pHeap->pHint2 || pHeap->pHint2->cFreePages < pBlock->cFreePages) pHeap->pHint2 = pBlock; pHeap->cAllocCalls++; void *pv = (uint8_t *)pBlock->Core.Key + (iPage << PAGE_SHIFT); *ppv = pv; if (fZero) RT_BZERO(pv, cPages << PAGE_SHIFT); return VINF_SUCCESS; }
int main() { /* * Init the runtime and stuff. */ RTTEST hTest; int rc = RTTestInitAndCreate("tstRTBitOperations", &hTest); if (rc) return rc; RTTestBanner(hTest); int i; int j; int k; /* * Tests */ struct TestMap { uint32_t au32[4]; }; #if 0 struct TestMap sTest; struct TestMap *p = &sTest; #else struct TestMap *p = (struct TestMap *)RTTestGuardedAllocTail(hTest, sizeof(*p)); #endif #define DUMP() RTTestPrintf(hTest, RTTESTLVL_INFO, "au32={%08x,%08x,%08x,%08x}", p->au32[0], p->au32[1], p->au32[2], p->au32[3]) #define CHECK(expr) do { if (!(expr)) { RTTestFailed(hTest, "line %d: %s", __LINE__, #expr); DUMP(); } CHECK_GUARD(s); } while (0) #define CHECK_BIT(expr, b1) do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d: %s", __LINE__, b1, #expr); } CHECK_GUARD(s); } while (0) #define CHECK_BIT2(expr, b1, b2) do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d b2=%d: %s", __LINE__, b1, b2, #expr); } CHECK_GUARD(s); } while (0) #define CHECK_BIT3(expr, b1, b2, b3) do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d b2=%d b3=%d: %s", __LINE__, b1, b2, b3, #expr); } CHECK_GUARD(s); } while (0) #define GUARD_MAP(p) do { } while (0) #define CHECK_GUARD(p) do { } while (0) #define MAP_CLEAR(p) do { RT_ZERO(*(p)); GUARD_MAP(p); } while (0) #define MAP_SET(p) do { memset(p, 0xff, sizeof(*(p))); GUARD_MAP(p); } while (0) /* self check. */ MAP_CLEAR(p); CHECK_GUARD(p); /* bit set */ MAP_CLEAR(p); ASMBitSet(&p->au32[0], 0); ASMBitSet(&p->au32[0], 31); ASMBitSet(&p->au32[0], 65); CHECK(p->au32[0] == 0x80000001U); CHECK(p->au32[2] == 0x00000002U); CHECK(ASMBitTestAndSet(&p->au32[0], 0) && p->au32[0] == 0x80000001U); CHECK(!ASMBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x80010001U); CHECK(ASMBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x80010001U); CHECK(!ASMBitTestAndSet(&p->au32[0], 80) && p->au32[2] == 0x00010002U); MAP_CLEAR(p); ASMAtomicBitSet(&p->au32[0], 0); ASMAtomicBitSet(&p->au32[0], 30); ASMAtomicBitSet(&p->au32[0], 64); CHECK(p->au32[0] == 0x40000001U); CHECK(p->au32[2] == 0x00000001U); CHECK(ASMAtomicBitTestAndSet(&p->au32[0], 0) && p->au32[0] == 0x40000001U); CHECK(!ASMAtomicBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x40010001U); CHECK(ASMAtomicBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x40010001U); CHECK(!ASMAtomicBitTestAndSet(&p->au32[0], 80) && p->au32[2] == 0x00010001U); /* bit clear */ MAP_SET(p); ASMBitClear(&p->au32[0], 0); ASMBitClear(&p->au32[0], 31); ASMBitClear(&p->au32[0], 65); CHECK(p->au32[0] == ~0x80000001U); CHECK(p->au32[2] == ~0x00000002U); CHECK(!ASMBitTestAndClear(&p->au32[0], 0) && p->au32[0] == ~0x80000001U); CHECK(ASMBitTestAndClear(&p->au32[0], 16) && p->au32[0] == ~0x80010001U); CHECK(!ASMBitTestAndClear(&p->au32[0], 16) && p->au32[0] == ~0x80010001U); CHECK(ASMBitTestAndClear(&p->au32[0], 80) && p->au32[2] == ~0x00010002U); MAP_SET(p); ASMAtomicBitClear(&p->au32[0], 0); ASMAtomicBitClear(&p->au32[0], 30); ASMAtomicBitClear(&p->au32[0], 64); CHECK(p->au32[0] == ~0x40000001U); CHECK(p->au32[2] == ~0x00000001U); CHECK(!ASMAtomicBitTestAndClear(&p->au32[0], 0) && p->au32[0] == ~0x40000001U); CHECK(ASMAtomicBitTestAndClear(&p->au32[0], 16) && p->au32[0] == ~0x40010001U); CHECK(!ASMAtomicBitTestAndClear(&p->au32[0], 16) && p->au32[0] == ~0x40010001U); CHECK(ASMAtomicBitTestAndClear(&p->au32[0], 80) && p->au32[2] == ~0x00010001U); /* toggle */ MAP_SET(p); ASMBitToggle(&p->au32[0], 0); ASMBitToggle(&p->au32[0], 31); ASMBitToggle(&p->au32[0], 65); ASMBitToggle(&p->au32[0], 47); ASMBitToggle(&p->au32[0], 47); CHECK(p->au32[0] == ~0x80000001U); CHECK(p->au32[2] == ~0x00000002U); CHECK(!ASMBitTestAndToggle(&p->au32[0], 0) && p->au32[0] == ~0x80000000U); CHECK(ASMBitTestAndToggle(&p->au32[0], 0) && p->au32[0] == ~0x80000001U); CHECK(ASMBitTestAndToggle(&p->au32[0], 16) && p->au32[0] == ~0x80010001U); CHECK(!ASMBitTestAndToggle(&p->au32[0], 16) && p->au32[0] == ~0x80000001U); CHECK(ASMBitTestAndToggle(&p->au32[0], 80) && p->au32[2] == ~0x00010002U); MAP_SET(p); ASMAtomicBitToggle(&p->au32[0], 0); ASMAtomicBitToggle(&p->au32[0], 30); ASMAtomicBitToggle(&p->au32[0], 64); ASMAtomicBitToggle(&p->au32[0], 47); ASMAtomicBitToggle(&p->au32[0], 47); CHECK(p->au32[0] == ~0x40000001U); CHECK(p->au32[2] == ~0x00000001U); CHECK(!ASMAtomicBitTestAndToggle(&p->au32[0], 0) && p->au32[0] == ~0x40000000U); CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 0) && p->au32[0] == ~0x40000001U); CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 16) && p->au32[0] == ~0x40010001U); CHECK(!ASMAtomicBitTestAndToggle(&p->au32[0], 16) && p->au32[0] == ~0x40000001U); CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 80) && p->au32[2] == ~0x00010001U); /* test bit. */ for (i = 0; i < 128; i++) { MAP_SET(p); CHECK_BIT(ASMBitTest(&p->au32[0], i), i); ASMBitToggle(&p->au32[0], i); CHECK_BIT(!ASMBitTest(&p->au32[0], i), i); CHECK_BIT(!ASMBitTestAndToggle(&p->au32[0], i), i); CHECK_BIT(ASMBitTest(&p->au32[0], i), i); CHECK_BIT(ASMBitTestAndToggle(&p->au32[0], i), i); CHECK_BIT(!ASMBitTest(&p->au32[0], i), i); MAP_SET(p); CHECK_BIT(ASMBitTest(&p->au32[0], i), i); ASMAtomicBitToggle(&p->au32[0], i); CHECK_BIT(!ASMBitTest(&p->au32[0], i), i); CHECK_BIT(!ASMAtomicBitTestAndToggle(&p->au32[0], i), i); CHECK_BIT(ASMBitTest(&p->au32[0], i), i); CHECK_BIT(ASMAtomicBitTestAndToggle(&p->au32[0], i), i); CHECK_BIT(!ASMBitTest(&p->au32[0], i), i); } /* bit searching */ MAP_SET(p); CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == -1); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0); ASMBitClear(&p->au32[0], 1); CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 1); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0); MAP_SET(p); ASMBitClear(&p->au32[0], 95); CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 95); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0); MAP_SET(p); ASMBitClear(&p->au32[0], 127); CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 127); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 0) == 1); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 1) == 2); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 2) == 3); MAP_SET(p); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 0) == -1); ASMBitClear(&p->au32[0], 32); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 32) == -1); ASMBitClear(&p->au32[0], 88); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 57) == 88); MAP_SET(p); ASMBitClear(&p->au32[0], 31); ASMBitClear(&p->au32[0], 57); ASMBitClear(&p->au32[0], 88); ASMBitClear(&p->au32[0], 101); ASMBitClear(&p->au32[0], 126); ASMBitClear(&p->au32[0], 127); CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 31); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 31) == 57); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 57) == 88); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 88) == 101); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 101) == 126); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 126) == 127); CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 127) == -1); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 29) == 30); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 30) == 32); MAP_CLEAR(p); for (i = 1; i < 128; i++) CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, i - 1) == i, i); for (i = 0; i < 128; i++) { MAP_SET(p); ASMBitClear(&p->au32[0], i); CHECK_BIT(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == i, i); for (j = 0; j < i; j++) CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, j) == i, i); for (j = i; j < 128; j++) CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, j) == -1, i); } /* clear range. */ MAP_SET(p); ASMBitClearRange(&p->au32, 0, 128); CHECK(!p->au32[0] && !p->au32[1] && !p->au32[2] && !p->au32[3]); for (i = 0; i < 128; i++) { for (j = i + 1; j <= 128; j++) { MAP_SET(p); ASMBitClearRange(&p->au32, i, j); for (k = 0; k < i; k++) CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k); for (k = i; k < j; k++) CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k); for (k = j; k < 128; k++) CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k); } } /* set range. */ MAP_CLEAR(p); ASMBitSetRange(&p->au32[0], 0, 5); ASMBitSetRange(&p->au32[0], 6, 44); ASMBitSetRange(&p->au32[0], 64, 65); CHECK(p->au32[0] == UINT32_C(0xFFFFFFDF)); CHECK(p->au32[1] == UINT32_C(0x00000FFF)); CHECK(p->au32[2] == UINT32_C(0x00000001)); MAP_CLEAR(p); ASMBitSetRange(&p->au32[0], 0, 1); ASMBitSetRange(&p->au32[0], 62, 63); ASMBitSetRange(&p->au32[0], 63, 64); ASMBitSetRange(&p->au32[0], 127, 128); CHECK(p->au32[0] == UINT32_C(0x00000001) && p->au32[1] == UINT32_C(0xC0000000)); CHECK(p->au32[2] == UINT32_C(0x00000000) && p->au32[3] == UINT32_C(0x80000000)); MAP_CLEAR(p); ASMBitSetRange(&p->au32, 0, 128); CHECK(!~p->au32[0] && !~p->au32[1] && !~p->au32[2] && !~p->au32[3]); for (i = 0; i < 128; i++) { for (j = i + 1; j <= 128; j++) { MAP_CLEAR(p); ASMBitSetRange(&p->au32, i, j); for (k = 0; k < i; k++) CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k); for (k = i; k < j; k++) CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k); for (k = j; k < 128; k++) CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k); } } /* searching for set bits. */ MAP_CLEAR(p); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == -1); ASMBitSet(&p->au32[0], 65); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 65); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 65) == -1); for (i = 0; i < 65; i++) CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 65); for (i = 65; i < 128; i++) CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == -1); ASMBitSet(&p->au32[0], 17); CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 17); CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 17) == 65); for (i = 0; i < 16; i++) CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 17); for (i = 17; i < 65; i++) CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 65); MAP_SET(p); for (i = 1; i < 128; i++) CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i - 1) == i, i); for (i = 0; i < 128; i++) { MAP_CLEAR(p); ASMBitSet(&p->au32[0], i); CHECK_BIT(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == i, i); for (j = 0; j < i; j++) CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, j) == i, i); for (j = i; j < 128; j++) CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, j) == -1, i); } CHECK(ASMBitLastSetU32(0) == 0); CHECK(ASMBitLastSetU32(1) == 1); CHECK(ASMBitLastSetU32(0x80000000) == 32); CHECK(ASMBitLastSetU32(0xffffffff) == 32); CHECK(ASMBitLastSetU32(RT_BIT(23) | RT_BIT(11)) == 24); for (i = 0; i < 32; i++) CHECK(ASMBitLastSetU32(1 << i) == (unsigned)i + 1); CHECK(ASMBitFirstSetU32(0) == 0); CHECK(ASMBitFirstSetU32(1) == 1); CHECK(ASMBitFirstSetU32(0x80000000) == 32); CHECK(ASMBitFirstSetU32(0xffffffff) == 1); CHECK(ASMBitFirstSetU32(RT_BIT(23) | RT_BIT(11)) == 12); for (i = 0; i < 32; i++) CHECK(ASMBitFirstSetU32(1 << i) == (unsigned)i + 1); /* * Special tests. */ test2(hTest); /* * Summary */ return RTTestSummaryAndDestroy(hTest); }
/** * Allocates a page from the page pool. * * @returns Pointer to allocated page(s). * @returns NULL on failure. * @param pPool Pointer to the page pool. * @thread The Emulation Thread. */ DECLINLINE(void *) mmR3PagePoolAlloc(PMMPAGEPOOL pPool) { VM_ASSERT_EMT(pPool->pVM); STAM_COUNTER_INC(&pPool->cAllocCalls); /* * Walk free list. */ if (pPool->pHeadFree) { PMMPAGESUBPOOL pSub = pPool->pHeadFree; /* decrement free count and unlink if no more free entries. */ if (!--pSub->cPagesFree) pPool->pHeadFree = pSub->pNextFree; #ifdef VBOX_WITH_STATISTICS pPool->cFreePages--; #endif /* find free spot in bitmap. */ #ifdef USE_INLINE_ASM_BIT_OPS const int iPage = ASMBitFirstClear(pSub->auBitmap, pSub->cPages); if (iPage >= 0) { Assert(!ASMBitTest(pSub->auBitmap, iPage)); ASMBitSet(pSub->auBitmap, iPage); return (uint8_t *)pSub->pvPages + PAGE_SIZE * iPage; } #else unsigned *pu = &pSub->auBitmap[0]; unsigned *puEnd = &pSub->auBitmap[pSub->cPages / (sizeof(pSub->auBitmap) * 8)]; while (pu < puEnd) { unsigned u; if ((u = *pu) != ~0U) { unsigned iBit = 0; unsigned uMask = 1; while (iBit < sizeof(pSub->auBitmap[0]) * 8) { if (!(u & uMask)) { *pu |= uMask; return (uint8_t *)pSub->pvPages + PAGE_SIZE * (iBit + ((uint8_t *)pu - (uint8_t *)&pSub->auBitmap[0]) * 8); } iBit++; uMask <<= 1; } STAM_COUNTER_INC(&pPool->cErrors); AssertMsgFailed(("how odd, expected to find a free bit in %#x, but didn't\n", u)); } /* next */ pu++; } #endif STAM_COUNTER_INC(&pPool->cErrors); #ifdef VBOX_WITH_STATISTICS pPool->cFreePages++; #endif AssertMsgFailed(("how strange, expected to find a free bit in %p, but didn't (%d pages supposed to be free!)\n", pSub, pSub->cPagesFree + 1)); } /* * Allocate new subpool. */ unsigned cPages = !pPool->fLow ? 128 : 32; PMMPAGESUBPOOL pSub; int rc = MMHyperAlloc(pPool->pVM, RT_OFFSETOF(MMPAGESUBPOOL, auBitmap[cPages / (sizeof(pSub->auBitmap[0]) * 8)]) + (sizeof(SUPPAGE) + sizeof(MMPPLOOKUPHCPHYS)) * cPages + sizeof(MMPPLOOKUPHCPTR), 0, MM_TAG_MM_PAGE, (void **)&pSub); if (RT_FAILURE(rc)) return NULL; PSUPPAGE paPhysPages = (PSUPPAGE)&pSub->auBitmap[cPages / (sizeof(pSub->auBitmap[0]) * 8)]; Assert((uintptr_t)paPhysPages >= (uintptr_t)&pSub->auBitmap[1]); if (!pPool->fLow) { rc = SUPR3PageAllocEx(cPages, 0 /* fFlags */, &pSub->pvPages, NULL, paPhysPages); if (RT_FAILURE(rc)) rc = VMSetError(pPool->pVM, rc, RT_SRC_POS, N_("Failed to lock host %zd bytes of memory (out of memory)"), (size_t)cPages << PAGE_SHIFT); } else rc = SUPR3LowAlloc(cPages, &pSub->pvPages, NULL, paPhysPages); if (RT_SUCCESS(rc)) { /* * Setup the sub structure and allocate the requested page. */ pSub->cPages = cPages; pSub->cPagesFree= cPages - 1; pSub->paPhysPages = paPhysPages; memset(pSub->auBitmap, 0, cPages / 8); /* allocate first page. */ pSub->auBitmap[0] |= 1; /* link into free chain. */ pSub->pNextFree = pPool->pHeadFree; pPool->pHeadFree= pSub; /* link into main chain. */ pSub->pNext = pPool->pHead; pPool->pHead = pSub; /* update pool statistics. */ pPool->cSubPools++; pPool->cPages += cPages; #ifdef VBOX_WITH_STATISTICS pPool->cFreePages += cPages - 1; #endif /* * Initialize the physical pages with backpointer to subpool. */ unsigned i = cPages; while (i-- > 0) { AssertMsg(paPhysPages[i].Phys && !(paPhysPages[i].Phys & PAGE_OFFSET_MASK), ("i=%d Phys=%d\n", i, paPhysPages[i].Phys)); paPhysPages[i].uReserved = (RTHCUINTPTR)pSub; } /* * Initialize the physical lookup record with backpointers to the physical pages. */ PMMPPLOOKUPHCPHYS paLookupPhys = (PMMPPLOOKUPHCPHYS)&paPhysPages[cPages]; i = cPages; while (i-- > 0) { paLookupPhys[i].pPhysPage = &paPhysPages[i]; paLookupPhys[i].Core.Key = paPhysPages[i].Phys; RTAvlHCPhysInsert(&pPool->pLookupPhys, &paLookupPhys[i].Core); } /* * And the one record for virtual memory lookup. */ PMMPPLOOKUPHCPTR pLookupVirt = (PMMPPLOOKUPHCPTR)&paLookupPhys[cPages]; pLookupVirt->pSubPool = pSub; pLookupVirt->Core.Key = pSub->pvPages; RTAvlPVInsert(&pPool->pLookupVirt, &pLookupVirt->Core); /* return allocated page (first). */ return pSub->pvPages; } MMHyperFree(pPool->pVM, pSub); STAM_COUNTER_INC(&pPool->cErrors); if (pPool->fLow) VMSetError(pPool->pVM, rc, RT_SRC_POS, N_("Failed to expand page pool for memory below 4GB. Current size: %d pages"), pPool->cPages); AssertMsgFailed(("Failed to expand pool%s. rc=%Rrc poolsize=%d\n", pPool->fLow ? " (<4GB)" : "", rc, pPool->cPages)); return NULL; }
/** * Worker for the --list and --extract commands. * * @returns The appropriate exit code. * @param pOpts The Unzip options. * @param pfnCallback The command specific callback. */ static RTEXITCODE rtZipUnzipDoWithMembers(PRTZIPUNZIPCMDOPS pOpts, PFNDOWITHMEMBER pfnCallback, uint32_t *pcFiles, PRTFOFF pcBytes) { /* * Allocate a bitmap to go with the file list. This will be used to * indicate which files we've processed and which not. */ uint32_t *pbmFound = NULL; if (pOpts->cFiles) { pbmFound = (uint32_t *)RTMemAllocZ(((pOpts->cFiles + 31) / 32) * sizeof(uint32_t)); if (!pbmFound) return RTMsgErrorExit(RTEXITCODE_FAILURE, "Failed to allocate the found-file-bitmap"); } uint32_t cFiles = 0; RTFOFF cBytesSum = 0; /* * Open the input archive. */ RTVFSFSSTREAM hVfsFssIn; RTEXITCODE rcExit = rtZipUnzipCmdOpenInputArchive(pOpts, &hVfsFssIn); if (rcExit == RTEXITCODE_SUCCESS) { /* * Process the stream. */ for (;;) { /* * Retrieve the next object. */ char *pszName; RTVFSOBJ hVfsObj; int rc = RTVfsFsStrmNext(hVfsFssIn, &pszName, NULL, &hVfsObj); if (RT_FAILURE(rc)) { if (rc != VERR_EOF) rcExit = RTMsgErrorExit(RTEXITCODE_FAILURE, "RTVfsFsStrmNext returned %Rrc", rc); break; } /* * Should we process this object? */ uint32_t iFile = UINT32_MAX; if ( !pOpts->cFiles || rtZipUnzipCmdIsNameInArray(pszName, pOpts->papszFiles, &iFile)) { if (pbmFound) ASMBitSet(pbmFound, iFile); RTFOFF cBytes = 0; rcExit = pfnCallback(pOpts, hVfsObj, pszName, rcExit, &cBytes); cBytesSum += cBytes; cFiles++; } /* * Release the current object and string. */ RTVfsObjRelease(hVfsObj); RTStrFree(pszName); } /* * Complain about any files we didn't find. */ for (uint32_t iFile = 0; iFile <pOpts->cFiles; iFile++) if (!ASMBitTest(pbmFound, iFile)) { RTMsgError("%s: Was not found in the archive", pOpts->papszFiles[iFile]); rcExit = RTEXITCODE_FAILURE; } RTVfsFsStrmRelease(hVfsFssIn); } RTMemFree(pbmFound); *pcFiles = cFiles; *pcBytes = cBytesSum; return RTEXITCODE_SUCCESS; }
RTDECL(void) ASMAtomicBitSet(volatile void *pvBitmap, int32_t iBit) { ASMBitSet(pvBitmap, iBit); }