Beispiel #1
0
static void test2(RTTEST hTest)
{
    struct TestMap2 *p2 = (struct TestMap2 *)RTTestGuardedAllocTail(hTest, sizeof(TestMap2));
    p2->idNil  = NIL_TEST2_ID;
    p2->idLast = TEST2_ID_LAST;

    /* Some simple tests first. */
    RT_ZERO(p2->bmChunkId);
    RTTEST_CHECK(hTest, ASMBitFirstSet(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1);
    for (uint32_t iBit = 0; iBit <= TEST2_ID_LAST; iBit++)
        RTTEST_CHECK(hTest, !ASMBitTest(&p2->bmChunkId[0], iBit));

    memset(&p2->bmChunkId[0], 0xff, sizeof(p2->bmChunkId));
    RTTEST_CHECK(hTest, ASMBitFirstClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1);
    for (uint32_t iBit = 0; iBit <= TEST2_ID_LAST; iBit++)
        RTTEST_CHECK(hTest, ASMBitTest(&p2->bmChunkId[0], iBit));

    /* The real test. */
    p2->idChunkPrev = 0;
    RT_ZERO(p2->bmChunkId);
    ASMBitSet(p2->bmChunkId, NIL_TEST2_ID);
    uint32_t cLeft = TEST2_ID_LAST;
    while (cLeft-- > 0)
        test2AllocId(p2);

    RTTEST_CHECK(hTest, ASMBitFirstClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1) == -1);
}
Beispiel #2
0
/**
 * Tries to allocate a chunk of pages from a heap block.
 *
 * @retval  VINF_SUCCESS on success.
 * @retval  VERR_NO_MEMORY if the allocation failed.
 * @param   pBlock          The block to allocate from.
 * @param   cPages          The size of the allocation.
 * @param   fZero           Whether it should be zeroed or not.
 * @param   ppv             Where to return the allocation address on success.
 */
DECLINLINE(int) rtHeapPageAllocFromBlock(PRTHEAPPAGEBLOCK pBlock, size_t cPages, bool fZero, void **ppv)
{
    if (pBlock->cFreePages >= cPages)
    {
        int iPage = ASMBitFirstClear(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT);
        Assert(iPage >= 0);

        /* special case: single page. */
        if (cPages == 1)
        {
            ASMBitSet(&pBlock->bmAlloc[0], iPage);
            return rtHeapPageAllocFromBlockSuccess(pBlock, iPage, cPages, fZero, ppv);
        }

        while (   iPage >= 0
                  && (unsigned)iPage <= RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT - cPages)
        {
            if (rtHeapPageIsPageRangeFree(pBlock, iPage + 1, cPages - 1))
            {
                ASMBitSetRange(&pBlock->bmAlloc[0], iPage, iPage + cPages);
                return rtHeapPageAllocFromBlockSuccess(pBlock, iPage, cPages, fZero, ppv);
            }

            /* next */
            iPage = ASMBitNextSet(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT, iPage);
            if (iPage < 0 || iPage >= RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT - 1)
                break;
            iPage = ASMBitNextClear(&pBlock->bmAlloc[0], RTMEMPAGEPOSIX_BLOCK_PAGE_COUNT, iPage);
        }
    }

    return VERR_NO_MEMORY;
}
DECLINLINE(int) vboxNetAdpGetNextAvailableUnit(void)
{
    bool fOld;
    int iUnit;
    /* There is absolutely no chance that all units are taken */
    do {
        iUnit = ASMBitFirstClear(g_aUnits, VBOXNETADP_MAX_UNITS);
        if (iUnit < 0)
            break;
        fOld = ASMAtomicBitTestAndSet(g_aUnits, iUnit);
    } while (fOld);

    return iUnit;
}
Beispiel #4
0
RTR3DECL(int) RTTlsAllocEx(PRTTLS piTls, PFNRTTLSDTOR pfnDestructor)
{
    for (unsigned i = 0; i < 128; i++)
    {
        int iTls = ASMBitFirstClear(&g_au32AllocatedBitmap[0], RTTHREAD_TLS_ENTRIES);
        if (iTls < 0)
        {
            *piTls = NIL_RTTLS;
            return VERR_NO_MEMORY;
        }
        if (!ASMAtomicBitTestAndSet(&g_au32AllocatedBitmap[0], iTls))
        {
            g_apfnDestructors[iTls] = pfnDestructor;
            *piTls = iTls;
            return VINF_SUCCESS;
        }
    }

    AssertFailed();
    return VERR_NO_MEMORY;
}
Beispiel #5
0
static uint32_t test2AllocId(struct TestMap2 *p2)
{
    /*
     * Scan sequentially from the last one + 1.
     */
    int32_t idChunk = ++p2->idChunkPrev;
    if (    (uint32_t)idChunk < TEST2_ID_LAST
        &&  idChunk > NIL_TEST2_ID)
    {
        idChunk = ASMBitNextClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1, idChunk);
        if (idChunk > NIL_TEST2_ID)
        {
            if (ASMAtomicBitTestAndSet(&p2->bmChunkId[0], idChunk))
            {
                RTTestFailed(NIL_RTTEST, "line %d: idChunk=%#x", __LINE__, idChunk);
                return NIL_TEST2_ID;
            }
            return p2->idChunkPrev = idChunk;
        }
    }

    /*
     * Ok, scan from the start.
     */
    idChunk = ASMBitFirstClear(&p2->bmChunkId[0], TEST2_ID_LAST + 1);
    if (idChunk <= NIL_TEST2_ID)
    {
        RTTestFailed(NIL_RTTEST, "line %d: idChunk=%#x", __LINE__, idChunk);
        return NIL_TEST2_ID;
    }
    if (ASMAtomicBitTestAndSet(&p2->bmChunkId[0], idChunk))
    {
        RTTestFailed(NIL_RTTEST, "line %d: idChunk=%#x", __LINE__, idChunk);
        return NIL_TEST2_ID;
    }

    return p2->idChunkPrev = idChunk;
}
Beispiel #6
0
RTDECL(int) ASMBitNextClear(const volatile void *pvBitmap, uint32_t cBits, uint32_t iBitPrev)
{
    const volatile uint8_t *pau8Bitmap = (const volatile uint8_t *)pvBitmap;
    int                      iBit = ++iBitPrev & 7;
    if (iBit)
    {
        /*
         * Inspect the byte containing the unaligned bit.
         */
        uint8_t u8 = ~pau8Bitmap[iBitPrev / 8] >> iBit;
        if (u8)
        {
            iBit = 0;
            while (!(u8 & 1))
            {
                u8 >>= 1;
                iBit++;
            }
            return iBitPrev + iBit;
        }

        /*
         * Skip ahead and see if there is anything left to search.
         */
        iBitPrev |= 7;
        iBitPrev++;
        if (cBits <= iBitPrev)
            return -1;
    }

    /*
     * Byte search, let ASMBitFirstClear do the dirty work.
     */
    iBit = ASMBitFirstClear(&pau8Bitmap[iBitPrev / 8], cBits - iBitPrev);
    if (iBit >= 0)
        iBit += iBitPrev;
    return iBit;
}
Beispiel #7
0
RTDECL(int) ASMBitNextClear(const volatile void *pvBitmap, uint32_t cBits, uint32_t iBitPrev)
{
    const volatile uint32_t *pau32Bitmap = (const volatile uint32_t *)pvBitmap;
    int                      iBit = ++iBitPrev & 31;
    if (iBit)
    {
        /*
         * Inspect the 32-bit word containing the unaligned bit.
         */
        uint32_t u32 = ~pau32Bitmap[iBitPrev / 32] >> iBit;
        if (u32)
        {
            iBit = 0;
            while (!(u32 & 1))
            {
                u32 >>= 1;
                iBit++;
            }
            return iBitPrev + iBit;
        }

        /*
         * Skip ahead and see if there is anything left to search.
         */
        iBitPrev |= 31;
        iBitPrev++;
        if (cBits <= (uint32_t)iBitPrev)
            return -1;
    }

    /*
     * 32-bit aligned search, let ASMBitFirstClear do the dirty work.
     */
    iBit = ASMBitFirstClear(&pau32Bitmap[iBitPrev / 32], cBits - iBitPrev);
    if (iBit >= 0)
        iBit += iBitPrev;
    return iBit;
}
Beispiel #8
0
int main()
{
    /*
     * Init the runtime and stuff.
     */
    RTTEST hTest;
    int rc = RTTestInitAndCreate("tstRTBitOperations", &hTest);
    if (rc)
        return rc;
    RTTestBanner(hTest);

    int i;
    int j;
    int k;

    /*
     * Tests
     */
    struct TestMap
    {
        uint32_t au32[4];
    };
#if 0
    struct TestMap sTest;
    struct TestMap *p = &sTest;
#else
    struct TestMap *p = (struct TestMap *)RTTestGuardedAllocTail(hTest, sizeof(*p));
#endif
#define DUMP()          RTTestPrintf(hTest, RTTESTLVL_INFO, "au32={%08x,%08x,%08x,%08x}", p->au32[0], p->au32[1], p->au32[2], p->au32[3])
#define CHECK(expr)     do { if (!(expr)) { RTTestFailed(hTest, "line %d: %s", __LINE__, #expr); DUMP(); } CHECK_GUARD(s); } while (0)
#define CHECK_BIT(expr,  b1)            do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d: %s", __LINE__, b1, #expr); } CHECK_GUARD(s); } while (0)
#define CHECK_BIT2(expr, b1, b2)        do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d b2=%d: %s", __LINE__, b1, b2, #expr); } CHECK_GUARD(s); } while (0)
#define CHECK_BIT3(expr, b1, b2, b3)    do { if (!(expr)) { RTTestFailed(hTest, "line %d, b1=%d b2=%d b3=%d: %s", __LINE__, b1, b2, b3, #expr); } CHECK_GUARD(s); } while (0)

#define GUARD_MAP(p)    do {  } while (0)
#define CHECK_GUARD(p)  do {  } while (0)
#define MAP_CLEAR(p)    do { RT_ZERO(*(p)); GUARD_MAP(p); } while (0)
#define MAP_SET(p)      do { memset(p, 0xff, sizeof(*(p))); GUARD_MAP(p); } while (0)

    /* self check. */
    MAP_CLEAR(p);
    CHECK_GUARD(p);

    /* bit set */
    MAP_CLEAR(p);
    ASMBitSet(&p->au32[0], 0);
    ASMBitSet(&p->au32[0], 31);
    ASMBitSet(&p->au32[0], 65);
    CHECK(p->au32[0] == 0x80000001U);
    CHECK(p->au32[2] == 0x00000002U);
    CHECK(ASMBitTestAndSet(&p->au32[0], 0)   && p->au32[0] == 0x80000001U);
    CHECK(!ASMBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x80010001U);
    CHECK(ASMBitTestAndSet(&p->au32[0], 16)  && p->au32[0] == 0x80010001U);
    CHECK(!ASMBitTestAndSet(&p->au32[0], 80) && p->au32[2] == 0x00010002U);

    MAP_CLEAR(p);
    ASMAtomicBitSet(&p->au32[0], 0);
    ASMAtomicBitSet(&p->au32[0], 30);
    ASMAtomicBitSet(&p->au32[0], 64);
    CHECK(p->au32[0] == 0x40000001U);
    CHECK(p->au32[2] == 0x00000001U);
    CHECK(ASMAtomicBitTestAndSet(&p->au32[0], 0)   && p->au32[0] == 0x40000001U);
    CHECK(!ASMAtomicBitTestAndSet(&p->au32[0], 16) && p->au32[0] == 0x40010001U);
    CHECK(ASMAtomicBitTestAndSet(&p->au32[0], 16)  && p->au32[0] == 0x40010001U);
    CHECK(!ASMAtomicBitTestAndSet(&p->au32[0], 80) && p->au32[2] == 0x00010001U);

    /* bit clear */
    MAP_SET(p);
    ASMBitClear(&p->au32[0], 0);
    ASMBitClear(&p->au32[0], 31);
    ASMBitClear(&p->au32[0], 65);
    CHECK(p->au32[0] == ~0x80000001U);
    CHECK(p->au32[2] == ~0x00000002U);
    CHECK(!ASMBitTestAndClear(&p->au32[0], 0)   && p->au32[0] == ~0x80000001U);
    CHECK(ASMBitTestAndClear(&p->au32[0], 16)   && p->au32[0] == ~0x80010001U);
    CHECK(!ASMBitTestAndClear(&p->au32[0], 16)  && p->au32[0] == ~0x80010001U);
    CHECK(ASMBitTestAndClear(&p->au32[0], 80)   && p->au32[2] == ~0x00010002U);

    MAP_SET(p);
    ASMAtomicBitClear(&p->au32[0], 0);
    ASMAtomicBitClear(&p->au32[0], 30);
    ASMAtomicBitClear(&p->au32[0], 64);
    CHECK(p->au32[0] == ~0x40000001U);
    CHECK(p->au32[2] == ~0x00000001U);
    CHECK(!ASMAtomicBitTestAndClear(&p->au32[0], 0)   && p->au32[0] == ~0x40000001U);
    CHECK(ASMAtomicBitTestAndClear(&p->au32[0], 16)   && p->au32[0] == ~0x40010001U);
    CHECK(!ASMAtomicBitTestAndClear(&p->au32[0], 16)  && p->au32[0] == ~0x40010001U);
    CHECK(ASMAtomicBitTestAndClear(&p->au32[0], 80)   && p->au32[2] == ~0x00010001U);

    /* toggle */
    MAP_SET(p);
    ASMBitToggle(&p->au32[0], 0);
    ASMBitToggle(&p->au32[0], 31);
    ASMBitToggle(&p->au32[0], 65);
    ASMBitToggle(&p->au32[0], 47);
    ASMBitToggle(&p->au32[0], 47);
    CHECK(p->au32[0] == ~0x80000001U);
    CHECK(p->au32[2] == ~0x00000002U);
    CHECK(!ASMBitTestAndToggle(&p->au32[0], 0)   && p->au32[0] == ~0x80000000U);
    CHECK(ASMBitTestAndToggle(&p->au32[0], 0)    && p->au32[0] == ~0x80000001U);
    CHECK(ASMBitTestAndToggle(&p->au32[0], 16)   && p->au32[0] == ~0x80010001U);
    CHECK(!ASMBitTestAndToggle(&p->au32[0], 16)  && p->au32[0] == ~0x80000001U);
    CHECK(ASMBitTestAndToggle(&p->au32[0], 80)   && p->au32[2] == ~0x00010002U);

    MAP_SET(p);
    ASMAtomicBitToggle(&p->au32[0], 0);
    ASMAtomicBitToggle(&p->au32[0], 30);
    ASMAtomicBitToggle(&p->au32[0], 64);
    ASMAtomicBitToggle(&p->au32[0], 47);
    ASMAtomicBitToggle(&p->au32[0], 47);
    CHECK(p->au32[0] == ~0x40000001U);
    CHECK(p->au32[2] == ~0x00000001U);
    CHECK(!ASMAtomicBitTestAndToggle(&p->au32[0], 0)   && p->au32[0] == ~0x40000000U);
    CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 0)    && p->au32[0] == ~0x40000001U);
    CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 16)   && p->au32[0] == ~0x40010001U);
    CHECK(!ASMAtomicBitTestAndToggle(&p->au32[0], 16)  && p->au32[0] == ~0x40000001U);
    CHECK(ASMAtomicBitTestAndToggle(&p->au32[0], 80)   && p->au32[2] == ~0x00010001U);

    /* test bit. */
    for (i = 0; i < 128; i++)
    {
        MAP_SET(p);
        CHECK_BIT(ASMBitTest(&p->au32[0], i), i);
        ASMBitToggle(&p->au32[0], i);
        CHECK_BIT(!ASMBitTest(&p->au32[0], i), i);
        CHECK_BIT(!ASMBitTestAndToggle(&p->au32[0], i), i);
        CHECK_BIT(ASMBitTest(&p->au32[0], i), i);
        CHECK_BIT(ASMBitTestAndToggle(&p->au32[0], i), i);
        CHECK_BIT(!ASMBitTest(&p->au32[0], i), i);

        MAP_SET(p);
        CHECK_BIT(ASMBitTest(&p->au32[0], i), i);
        ASMAtomicBitToggle(&p->au32[0], i);
        CHECK_BIT(!ASMBitTest(&p->au32[0], i), i);
        CHECK_BIT(!ASMAtomicBitTestAndToggle(&p->au32[0], i), i);
        CHECK_BIT(ASMBitTest(&p->au32[0], i), i);
        CHECK_BIT(ASMAtomicBitTestAndToggle(&p->au32[0], i), i);
        CHECK_BIT(!ASMBitTest(&p->au32[0], i), i);
    }

    /* bit searching */
    MAP_SET(p);
    CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == -1);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0);

    ASMBitClear(&p->au32[0], 1);
    CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 1);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0);

    MAP_SET(p);
    ASMBitClear(&p->au32[0], 95);
    CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 95);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0);

    MAP_SET(p);
    ASMBitClear(&p->au32[0], 127);
    CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 127);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 0);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 0) == 1);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 1) == 2);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 2) == 3);


    MAP_SET(p);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 0) == -1);
    ASMBitClear(&p->au32[0], 32);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 32) == -1);
    ASMBitClear(&p->au32[0], 88);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8,  57) ==  88);

    MAP_SET(p);
    ASMBitClear(&p->au32[0], 31);
    ASMBitClear(&p->au32[0], 57);
    ASMBitClear(&p->au32[0], 88);
    ASMBitClear(&p->au32[0], 101);
    ASMBitClear(&p->au32[0], 126);
    ASMBitClear(&p->au32[0], 127);
    CHECK(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == 31);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8,  31) ==  57);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8,  57) ==  88);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8,  88) == 101);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 101) == 126);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 126) == 127);
    CHECK(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, 127) == -1);

    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 29) == 30);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 30) == 32);

    MAP_CLEAR(p);
    for (i = 1; i < 128; i++)
        CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, i - 1) == i, i);
    for (i = 0; i < 128; i++)
    {
        MAP_SET(p);
        ASMBitClear(&p->au32[0], i);
        CHECK_BIT(ASMBitFirstClear(&p->au32[0], sizeof(p->au32) * 8) == i, i);
        for (j = 0; j < i; j++)
            CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, j) == i, i);
        for (j = i; j < 128; j++)
            CHECK_BIT(ASMBitNextClear(&p->au32[0], sizeof(p->au32) * 8, j) == -1, i);
    }

    /* clear range. */
    MAP_SET(p);
    ASMBitClearRange(&p->au32, 0, 128);
    CHECK(!p->au32[0] && !p->au32[1] && !p->au32[2] && !p->au32[3]);
    for (i = 0; i < 128; i++)
    {
        for (j = i + 1; j <= 128; j++)
        {
            MAP_SET(p);
            ASMBitClearRange(&p->au32, i, j);
            for (k = 0; k < i; k++)
                CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k);
            for (k = i; k < j; k++)
                CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k);
            for (k = j; k < 128; k++)
                CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k);
        }
    }

    /* set range. */
    MAP_CLEAR(p);
    ASMBitSetRange(&p->au32[0], 0, 5);
    ASMBitSetRange(&p->au32[0], 6, 44);
    ASMBitSetRange(&p->au32[0], 64, 65);
    CHECK(p->au32[0] == UINT32_C(0xFFFFFFDF));
    CHECK(p->au32[1] == UINT32_C(0x00000FFF));
    CHECK(p->au32[2] == UINT32_C(0x00000001));

    MAP_CLEAR(p);
    ASMBitSetRange(&p->au32[0], 0, 1);
    ASMBitSetRange(&p->au32[0], 62, 63);
    ASMBitSetRange(&p->au32[0], 63, 64);
    ASMBitSetRange(&p->au32[0], 127, 128);
    CHECK(p->au32[0] == UINT32_C(0x00000001) && p->au32[1] == UINT32_C(0xC0000000));
    CHECK(p->au32[2] == UINT32_C(0x00000000) && p->au32[3] == UINT32_C(0x80000000));

    MAP_CLEAR(p);
    ASMBitSetRange(&p->au32, 0, 128);
    CHECK(!~p->au32[0] && !~p->au32[1] && !~p->au32[2] && !~p->au32[3]);
    for (i = 0; i < 128; i++)
    {
        for (j = i + 1; j <= 128; j++)
        {
            MAP_CLEAR(p);
            ASMBitSetRange(&p->au32, i, j);
            for (k = 0; k < i; k++)
                CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k);
            for (k = i; k < j; k++)
                CHECK_BIT3(ASMBitTest(&p->au32[0], k), i, j, k);
            for (k = j; k < 128; k++)
                CHECK_BIT3(!ASMBitTest(&p->au32[0], k), i, j, k);
        }
    }

    /* searching for set bits. */
    MAP_CLEAR(p);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == -1);

    ASMBitSet(&p->au32[0], 65);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 65);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 65) == -1);
    for (i = 0; i < 65; i++)
        CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 65);
    for (i = 65; i < 128; i++)
        CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == -1);

    ASMBitSet(&p->au32[0], 17);
    CHECK(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == 17);
    CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, 17) == 65);
    for (i = 0; i < 16; i++)
        CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 17);
    for (i = 17; i < 65; i++)
        CHECK(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i) == 65);

    MAP_SET(p);
    for (i = 1; i < 128; i++)
        CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, i - 1) == i, i);
    for (i = 0; i < 128; i++)
    {
        MAP_CLEAR(p);
        ASMBitSet(&p->au32[0], i);
        CHECK_BIT(ASMBitFirstSet(&p->au32[0], sizeof(p->au32) * 8) == i, i);
        for (j = 0; j < i; j++)
            CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, j) == i, i);
        for (j = i; j < 128; j++)
            CHECK_BIT(ASMBitNextSet(&p->au32[0], sizeof(p->au32) * 8, j) == -1, i);
    }


    CHECK(ASMBitLastSetU32(0) == 0);
    CHECK(ASMBitLastSetU32(1) == 1);
    CHECK(ASMBitLastSetU32(0x80000000) == 32);
    CHECK(ASMBitLastSetU32(0xffffffff) == 32);
    CHECK(ASMBitLastSetU32(RT_BIT(23) | RT_BIT(11)) == 24);
    for (i = 0; i < 32; i++)
        CHECK(ASMBitLastSetU32(1 << i) == (unsigned)i + 1);

    CHECK(ASMBitFirstSetU32(0) == 0);
    CHECK(ASMBitFirstSetU32(1) == 1);
    CHECK(ASMBitFirstSetU32(0x80000000) == 32);
    CHECK(ASMBitFirstSetU32(0xffffffff) == 1);
    CHECK(ASMBitFirstSetU32(RT_BIT(23) | RT_BIT(11)) == 12);
    for (i = 0; i < 32; i++)
        CHECK(ASMBitFirstSetU32(1 << i) == (unsigned)i + 1);

    /*
     * Special tests.
     */
    test2(hTest);

    /*
     * Summary
     */
    return RTTestSummaryAndDestroy(hTest);
}
Beispiel #9
0
/**
 * Allocates a page from the page pool.
 *
 * @returns Pointer to allocated page(s).
 * @returns NULL on failure.
 * @param   pPool   Pointer to the page pool.
 * @thread  The Emulation Thread.
 */
DECLINLINE(void *) mmR3PagePoolAlloc(PMMPAGEPOOL pPool)
{
    VM_ASSERT_EMT(pPool->pVM);
    STAM_COUNTER_INC(&pPool->cAllocCalls);

    /*
     * Walk free list.
     */
    if (pPool->pHeadFree)
    {
        PMMPAGESUBPOOL  pSub = pPool->pHeadFree;
        /* decrement free count and unlink if no more free entries. */
        if (!--pSub->cPagesFree)
            pPool->pHeadFree = pSub->pNextFree;
#ifdef VBOX_WITH_STATISTICS
        pPool->cFreePages--;
#endif

        /* find free spot in bitmap. */
#ifdef USE_INLINE_ASM_BIT_OPS
        const int iPage = ASMBitFirstClear(pSub->auBitmap, pSub->cPages);
        if (iPage >= 0)
        {
            Assert(!ASMBitTest(pSub->auBitmap, iPage));
            ASMBitSet(pSub->auBitmap, iPage);
            return (uint8_t *)pSub->pvPages + PAGE_SIZE * iPage;
        }
#else
        unsigned   *pu = &pSub->auBitmap[0];
        unsigned   *puEnd = &pSub->auBitmap[pSub->cPages / (sizeof(pSub->auBitmap) * 8)];
        while (pu < puEnd)
        {
            unsigned u;
            if ((u = *pu) != ~0U)
            {
                unsigned iBit = 0;
                unsigned uMask = 1;
                while (iBit < sizeof(pSub->auBitmap[0]) * 8)
                {
                    if (!(u & uMask))
                    {
                        *pu |= uMask;
                        return (uint8_t *)pSub->pvPages
                            + PAGE_SIZE * (iBit + ((uint8_t *)pu - (uint8_t *)&pSub->auBitmap[0]) * 8);
                    }
                    iBit++;
                    uMask <<= 1;
                }
                STAM_COUNTER_INC(&pPool->cErrors);
                AssertMsgFailed(("how odd, expected to find a free bit in %#x, but didn't\n", u));
            }
            /* next */
            pu++;
        }
#endif
        STAM_COUNTER_INC(&pPool->cErrors);
#ifdef VBOX_WITH_STATISTICS
        pPool->cFreePages++;
#endif
        AssertMsgFailed(("how strange, expected to find a free bit in %p, but didn't (%d pages supposed to be free!)\n", pSub, pSub->cPagesFree + 1));
    }

    /*
     * Allocate new subpool.
     */
    unsigned        cPages = !pPool->fLow ? 128 : 32;
    PMMPAGESUBPOOL  pSub;
    int rc = MMHyperAlloc(pPool->pVM,
                          RT_OFFSETOF(MMPAGESUBPOOL, auBitmap[cPages / (sizeof(pSub->auBitmap[0]) * 8)])
                          + (sizeof(SUPPAGE) + sizeof(MMPPLOOKUPHCPHYS)) * cPages
                          + sizeof(MMPPLOOKUPHCPTR),
                          0,
                          MM_TAG_MM_PAGE,
                          (void **)&pSub);
    if (RT_FAILURE(rc))
        return NULL;

    PSUPPAGE paPhysPages = (PSUPPAGE)&pSub->auBitmap[cPages / (sizeof(pSub->auBitmap[0]) * 8)];
    Assert((uintptr_t)paPhysPages >= (uintptr_t)&pSub->auBitmap[1]);
    if (!pPool->fLow)
    {
        rc = SUPR3PageAllocEx(cPages,
                              0 /* fFlags */,
                              &pSub->pvPages,
                              NULL,
                              paPhysPages);
        if (RT_FAILURE(rc))
            rc = VMSetError(pPool->pVM, rc, RT_SRC_POS,
                            N_("Failed to lock host %zd bytes of memory (out of memory)"), (size_t)cPages << PAGE_SHIFT);
    }
    else
        rc = SUPR3LowAlloc(cPages, &pSub->pvPages, NULL, paPhysPages);
    if (RT_SUCCESS(rc))
    {
        /*
         * Setup the sub structure and allocate the requested page.
         */
        pSub->cPages    = cPages;
        pSub->cPagesFree= cPages - 1;
        pSub->paPhysPages = paPhysPages;
        memset(pSub->auBitmap, 0, cPages / 8);
        /* allocate first page. */
        pSub->auBitmap[0] |= 1;
        /* link into free chain. */
        pSub->pNextFree = pPool->pHeadFree;
        pPool->pHeadFree= pSub;
        /* link into main chain. */
        pSub->pNext     = pPool->pHead;
        pPool->pHead    = pSub;
        /* update pool statistics. */
        pPool->cSubPools++;
        pPool->cPages  += cPages;
#ifdef VBOX_WITH_STATISTICS
        pPool->cFreePages += cPages - 1;
#endif

        /*
         * Initialize the physical pages with backpointer to subpool.
         */
        unsigned i = cPages;
        while (i-- > 0)
        {
            AssertMsg(paPhysPages[i].Phys && !(paPhysPages[i].Phys & PAGE_OFFSET_MASK),
                      ("i=%d Phys=%d\n", i, paPhysPages[i].Phys));
            paPhysPages[i].uReserved = (RTHCUINTPTR)pSub;
        }

        /*
         * Initialize the physical lookup record with backpointers to the physical pages.
         */
        PMMPPLOOKUPHCPHYS paLookupPhys = (PMMPPLOOKUPHCPHYS)&paPhysPages[cPages];
        i = cPages;
        while (i-- > 0)
        {
            paLookupPhys[i].pPhysPage = &paPhysPages[i];
            paLookupPhys[i].Core.Key = paPhysPages[i].Phys;
            RTAvlHCPhysInsert(&pPool->pLookupPhys, &paLookupPhys[i].Core);
        }

        /*
         * And the one record for virtual memory lookup.
         */
        PMMPPLOOKUPHCPTR   pLookupVirt = (PMMPPLOOKUPHCPTR)&paLookupPhys[cPages];
        pLookupVirt->pSubPool = pSub;
        pLookupVirt->Core.Key = pSub->pvPages;
        RTAvlPVInsert(&pPool->pLookupVirt, &pLookupVirt->Core);

        /* return allocated page (first). */
        return pSub->pvPages;
    }

    MMHyperFree(pPool->pVM, pSub);
    STAM_COUNTER_INC(&pPool->cErrors);
    if (pPool->fLow)
        VMSetError(pPool->pVM, rc, RT_SRC_POS,
                   N_("Failed to expand page pool for memory below 4GB. Current size: %d pages"),
                   pPool->cPages);
    AssertMsgFailed(("Failed to expand pool%s. rc=%Rrc poolsize=%d\n",
                     pPool->fLow ? " (<4GB)" : "", rc, pPool->cPages));
    return NULL;
}