Compare TreeFind(Tree *treeReturn, Tree root, TreeKey key, TreeCompare compare) { Tree node, parent; Compare cmp = CompareEQUAL; AVERT_CRITICAL(Tree, root); AVER_CRITICAL(treeReturn != NULL); AVER_CRITICAL(FUNCHECK(compare)); /* key is arbitrary */ parent = NULL; node = root; while (node != TreeEMPTY) { parent = node; cmp = compare(node, key); switch (cmp) { case CompareLESS: node = node->left; break; case CompareEQUAL: *treeReturn = node; return cmp; case CompareGREATER: node = node->right; break; default: NOTREACHED; *treeReturn = NULL; return cmp; } } *treeReturn = parent; return cmp; }
Res PoolAlloc(Addr *pReturn, Pool pool, Size size) { Res res; AVER(pReturn != NULL); AVERT(Pool, pool); AVER(size > 0); res = Method(Pool, pool, alloc)(pReturn, pool, size); if (res != ResOK) return res; /* Make sure that the allocated address was in the pool's memory. */ /* .hasaddr.critical: The PoolHasAddr check is expensive, and in */ /* allocation-bound programs this is on the critical path. */ AVER_CRITICAL(PoolHasAddr(pool, *pReturn)); /* All allocations should be aligned to the pool's alignment */ AVER_CRITICAL(AddrIsAligned(*pReturn, pool->alignment)); /* All PoolAllocs should advance the allocation clock, so we count */ /* it all in the fillMutatorSize field. */ ArenaGlobals(PoolArena(pool))->fillMutatorSize += size; EVENT3(PoolAlloc, pool, *pReturn, size); return ResOK; }
Bool SplayFindFirst(Tree *nodeReturn, SplayTree splay, SplayTestNodeFunction testNode, SplayTestTreeFunction testTree, void *testClosure) { SplayFindClosureStruct closureStruct; Bool found; AVER_CRITICAL(nodeReturn != NULL); AVERT_CRITICAL(SplayTree, splay); AVER_CRITICAL(FUNCHECK(testNode)); AVER_CRITICAL(FUNCHECK(testTree)); if (SplayTreeIsEmpty(splay) || !testTree(splay, SplayTreeRoot(splay), testClosure)) return FALSE; /* no suitable nodes in tree */ closureStruct.testClosure = testClosure; closureStruct.testNode = testNode; closureStruct.testTree = testTree; closureStruct.splay = splay; closureStruct.found = FALSE; found = SplaySplay(splay, &closureStruct, SplayFindFirstCompare) == CompareEQUAL && closureStruct.found; while (!found) { Tree oldRoot, newRoot; /* FIXME: Rename to "seen" and "not yet seen" or something. */ oldRoot = SplayTreeRoot(splay); newRoot = TreeRight(oldRoot); if (newRoot == TreeEMPTY || !(*testTree)(splay, newRoot, testClosure)) return FALSE; /* no suitable nodes in the rest of the tree */ /* Temporarily chop off the left half-tree, inclusive of root, so that the search excludes any nodes we've seen already. */ SplayTreeSetRoot(splay, newRoot); TreeSetRight(oldRoot, TreeEMPTY); found = SplaySplay(splay, &closureStruct, SplayFindFirstCompare) == CompareEQUAL && closureStruct.found; /* Restore the left tree, then rotate left so that the node we just splayed is at the root. Update both. */ newRoot = SplayTreeRoot(splay); TreeSetRight(oldRoot, newRoot); SplayTreeSetRoot(splay, oldRoot); TreeRotateLeft(&splay->root); splay->updateNode(splay, oldRoot); splay->updateNode(splay, newRoot); } *nodeReturn = SplayTreeRoot(splay); return TRUE; }
Bool SplayTreeNeighbours(Tree *leftReturn, Tree *rightReturn, SplayTree splay, TreeKey key) { SplayStateStruct stateStruct; Bool found; Compare cmp; #ifdef SPLAY_DEBUG Count count = SplayDebugCount(splay); #endif AVERT_CRITICAL(SplayTree, splay); AVER_CRITICAL(leftReturn != NULL); AVER_CRITICAL(rightReturn != NULL); if (SplayTreeIsEmpty(splay)) { *leftReturn = *rightReturn = TreeEMPTY; return TRUE; } cmp = SplaySplit(&stateStruct, splay, key, splay->compare); switch (cmp) { default: NOTREACHED; /* fall through */ case CompareEQUAL: found = FALSE; break; case CompareLESS: AVER_CRITICAL(!TreeHasLeft(stateStruct.middle)); *rightReturn = stateStruct.middle; *leftReturn = stateStruct.leftLast; found = TRUE; break; case CompareGREATER: AVER_CRITICAL(!TreeHasRight(stateStruct.middle)); *leftReturn = stateStruct.middle; *rightReturn = stateStruct.rightFirst; found = TRUE; break; } SplayAssemble(splay, &stateStruct); SplayTreeSetRoot(splay, stateStruct.middle); #ifdef SPLAY_DEBUG AVER(count == SplayDebugCount(splay)); #endif return found; }
Res PoolFix(Pool pool, ScanState ss, Seg seg, Addr *refIO) { AVERT_CRITICAL(Pool, pool); AVERT_CRITICAL(ScanState, ss); AVERT_CRITICAL(Seg, seg); AVER_CRITICAL(pool == SegPool(seg)); AVER_CRITICAL(refIO != NULL); /* Should only be fixing references to white segments. */ AVER_CRITICAL(TraceSetInter(SegWhite(seg), ss->traces) != TraceSetEMPTY); return pool->fix(pool, ss, seg, refIO); }
void (ShieldCover)(Arena arena, Seg seg) { /* <design/trace/#fix.noaver> */ AVERT_CRITICAL(Arena, arena); AVERT_CRITICAL(Seg, seg); AVER_CRITICAL(SegPM(seg) == AccessSetEMPTY); AVER_CRITICAL(arena->shDepth > 0); AVER_CRITICAL(SegDepth(seg) > 0); SegSetDepth(seg, SegDepth(seg) - 1); --arena->shDepth; /* ensure inv.unsynced.depth */ cache(arena, seg); }
void PoolReclaim(Pool pool, Trace trace, Seg seg) { AVERT_CRITICAL(Pool, pool); AVERT_CRITICAL(Trace, trace); AVERT_CRITICAL(Seg, seg); AVER_CRITICAL(pool->arena == trace->arena); AVER_CRITICAL(SegPool(seg) == pool); /* There shouldn't be any grey things left for this trace. */ AVER_CRITICAL(!TraceSetIsMember(SegGrey(seg), trace)); /* Should only be reclaiming segments which are still white. */ AVER_CRITICAL(TraceSetIsMember(SegWhite(seg), trace)); Method(Pool, pool, reclaim)(pool, trace, seg); }
static Compare SplayFindLastCompare(Tree node, TreeKey key) { SplayFindClosure my; SplayTestNodeFunction testNode; SplayTestTreeFunction testTree; void *testClosure; SplayTree splay; AVERT_CRITICAL(Tree, node); AVER_CRITICAL(key != NULL); /* Lift closure values into variables so that they aren't aliased by calls to the test functions. */ my = (SplayFindClosure)key; testClosure = my->testClosure; testNode = my->testNode; testTree = my->testTree; splay = my->splay; if (TreeHasRight(node) && (*testTree)(splay, TreeRight(node), testClosure)) { return CompareGREATER; } else if ((*testNode)(splay, node, testClosure)) { my->found = TRUE; return CompareEQUAL; } else { /* See SplayFindFirstCompare. */ if (TreeHasLeft(node) && !(*testTree)(splay, TreeLeft(node), testClosure)) { my->found = FALSE; return CompareEQUAL; } return CompareLESS; } }
Bool TractOfAddr(Tract *tractReturn, Arena arena, Addr addr) { Bool b; Index i; Chunk chunk; /* <design/trace/#fix.noaver> */ AVER_CRITICAL(tractReturn != NULL); /* .tract.critical */ AVERT_CRITICAL(Arena, arena); b = ChunkOfAddr(&chunk, arena, addr); if (!b) return FALSE; /* <design/trace/#fix.tractofaddr> */ i = INDEX_OF_ADDR(chunk, addr); /* .addr.free: If the page is recorded as being free then */ /* either the page is free or it is */ /* part of the arena tables (see .ullagepages). */ if (BTGet(chunk->allocTable, i)) { *tractReturn = PageTract(ChunkPage(chunk, i)); return TRUE; } return FALSE; }
Res PoolFixEmergency(Pool pool, ScanState ss, Seg seg, Addr *refIO) { Res res; AVERT_CRITICAL(Pool, pool); AVERT_CRITICAL(ScanState, ss); AVERT_CRITICAL(Seg, seg); AVER_CRITICAL(pool == SegPool(seg)); AVER_CRITICAL(refIO != NULL); /* Should only be fixing references to white segments. */ AVER_CRITICAL(TraceSetInter(SegWhite(seg), ss->traces) != TraceSetEMPTY); res = Method(Pool, pool, fixEmergency)(pool, ss, seg, refIO); AVER_CRITICAL(res == ResOK); return res; }
void (ShieldExpose)(Arena arena, Seg seg) { AccessSet mode = AccessREAD | AccessWRITE; /* <design/trace/#fix.noaver> */ AVERT_CRITICAL(Arena, arena); AVER_CRITICAL(arena->insideShield); SegSetDepth(seg, SegDepth(seg) + 1); ++arena->shDepth; /* <design/trace/#fix.noaver> */ AVER_CRITICAL(arena->shDepth > 0); AVER_CRITICAL(SegDepth(seg) > 0); if (SegPM(seg) & mode) ShieldSuspend(arena); /* This ensures inv.expose.prot */ protLower(arena, seg, mode); }
static void SplayAssembleDown(SplayTree splay, SplayState state) { AVERT(SplayTree, splay); AVER(state->middle != TreeEMPTY); AVER(!SplayHasUpdate(splay)); if (state->left != TreeEMPTY) { AVER_CRITICAL(state->leftLast != TreeEMPTY); TreeSetRight(state->leftLast, TreeLeft(state->middle)); TreeSetLeft(state->middle, state->left); } if (state->right != TreeEMPTY) { AVER_CRITICAL(state->rightFirst != TreeEMPTY); TreeSetLeft(state->rightFirst, TreeRight(state->middle)); TreeSetRight(state->middle, state->right); } }
void (ShieldCover)(Arena arena, Seg seg) { Shield shield; /* <design/trace/#fix.noaver> */ AVERT_CRITICAL(Arena, arena); shield = ArenaShield(arena); AVERT_CRITICAL(Seg, seg); AVER_CRITICAL(SegPM(seg) == AccessSetEMPTY); AVER_CRITICAL(SegDepth(seg) > 0); SegSetDepth(seg, SegDepth(seg) - 1); AVER_CRITICAL(shield->depth > 0); --shield->depth; /* Ensure design.mps.shield.inv.unsynced.depth. */ shieldQueue(arena, seg); }
void PageAlloc(Chunk chunk, Index pi, Pool pool) { Tract tract; Addr base; Page page; AVERT_CRITICAL(Chunk, chunk); AVER_CRITICAL(pi >= chunk->allocBase); AVER_CRITICAL(pi < chunk->pages); AVER_CRITICAL(!BTGet(chunk->allocTable, pi)); AVERT_CRITICAL(Pool, pool); page = ChunkPage(chunk, pi); tract = PageTract(page); base = PageIndexBase(chunk, pi); BTSet(chunk->allocTable, pi); TractInit(tract, pool, base); }
static Tree SplayZag(Tree middle, Tree *leftLastIO, Tree *leftPrevReturn) { AVERT_CRITICAL(Tree, middle); AVER_CRITICAL(leftLastIO != NULL); AVERT_CRITICAL(Tree, *leftLastIO); TreeSetRight(*leftLastIO, middle); *leftPrevReturn = *leftLastIO; *leftLastIO = middle; return TreeRight(middle); }
static Tree SplayZig(Tree middle, Tree *rightFirstIO, Tree *rightNextReturn) { AVERT_CRITICAL(Tree, middle); AVER_CRITICAL(rightFirstIO != NULL); AVERT_CRITICAL(Tree, *rightFirstIO); TreeSetLeft(*rightFirstIO, middle); *rightNextReturn = *rightFirstIO; *rightFirstIO = middle; return TreeLeft(middle); }
Tract TractOfBaseAddr(Arena arena, Addr addr) { Tract tract = NULL; Bool found; AVERT_CRITICAL(Arena, arena); AVER_CRITICAL(AddrIsAligned(addr, ArenaGrainSize(arena))); /* Check first in the cache, see <design/arena/#tract.cache>. */ if (arena->lastTractBase == addr) { tract = arena->lastTract; } else { found = TractOfAddr(&tract, arena, addr); AVER_CRITICAL(found); } AVER_CRITICAL(TractBase(tract) == addr); return tract; }
Bool ChunkOfAddr(Chunk *chunkReturn, Arena arena, Addr addr) { Tree tree; AVER_CRITICAL(chunkReturn != NULL); AVERT_CRITICAL(Arena, arena); /* addr is arbitrary */ if (TreeFind(&tree, ArenaChunkTree(arena), TreeKeyOfAddrVar(addr), ChunkCompare) == CompareEQUAL) { Chunk chunk = ChunkOfTree(tree); AVER_CRITICAL(chunk->base <= addr); AVER_CRITICAL(addr < chunk->limit); *chunkReturn = chunk; return TRUE; } return FALSE; }
static Tree SplayZagRev(Tree middle, Tree *leftLastIO) { Tree child; AVERT_CRITICAL(Tree, middle); AVER_CRITICAL(leftLastIO != NULL); AVERT_CRITICAL(Tree, *leftLastIO); child = TreeRight(middle); TreeSetRight(middle, *leftLastIO); *leftLastIO = middle; return child; }
static Tree SplayZigRev(Tree middle, Tree *rightFirstIO) { Tree child; AVERT_CRITICAL(Tree, middle); AVER_CRITICAL(rightFirstIO != NULL); AVERT_CRITICAL(Tree, *rightFirstIO); child = TreeLeft(middle); TreeSetLeft(middle, *rightFirstIO); *rightFirstIO = middle; return child; }
void TractInit(Tract tract, Pool pool, Addr base) { AVER_CRITICAL(tract != NULL); AVERT_CRITICAL(Pool, pool); tract->pool.pool = pool; tract->base = base; tract->p = NULL; tract->white = TraceSetEMPTY; tract->hasSeg = FALSE; AVERT(Tract, tract); }
void PageInit(Chunk chunk, Index pi) { Page page; AVERT_CRITICAL(Chunk, chunk); AVER_CRITICAL(pi < chunk->pages); page = ChunkPage(chunk, pi); BTRes(chunk->allocTable, pi); PageSetPool(page, NULL); PageSetType(page, PageStateFREE); RingInit(PageSpareRing(page)); }
void (ShieldExpose)(Arena arena, Seg seg) { Shield shield; AccessSet mode = AccessREAD | AccessWRITE; /* <design/trace/#fix.noaver> */ AVERT_CRITICAL(Arena, arena); shield = ArenaShield(arena); AVER_CRITICAL(shield->inside); SegSetDepth(seg, SegDepth(seg) + 1); AVER_CRITICAL(SegDepth(seg) > 0); /* overflow */ ++shield->depth; AVER_CRITICAL(shield->depth > 0); /* overflow */ if (BS_INTER(SegPM(seg), mode) != AccessSetEMPTY) shieldSuspend(arena); /* Ensure design.mps.shield.inv.expose.prot. */ /* TODO: Mass exposure -- see design.mps.shield.improv.mass-expose. */ shieldProtLower(shield, seg, mode); }
static Compare TagCompare(Tree node, TreeKey key) { Addr addr1, addr2; addr1 = *(Addr *)key; addr2 = TagOfTree(node)->addr; if (addr1 < addr2) return CompareLESS; else if (addr1 > addr2) { /* Check key is not inside the object of this tag */ AVER_CRITICAL(AddrAdd(addr2, TagOfTree(node)->size) <= addr1); return CompareGREATER; } else return CompareEQUAL; }
static Compare TagComp(void *key, SplayNode node) { Addr addr1, addr2; addr1 = *(Addr *)key; addr2 = SplayNode2Tag(node)->addr; if (addr1 < addr2) return CompareLESS; else if (addr1 > addr2) { /* Check key is not inside the object of this tag */ AVER_CRITICAL(AddrAdd(addr2, SplayNode2Tag(node)->size) <= addr1); return CompareGREATER; } else return CompareEQUAL; }
Bool BTIsResRange(BT bt, Index base, Index limit) { AVERT_CRITICAL(BT, bt); /* See .aver.critical */ AVER_CRITICAL(base < limit); /* Can't check range of base or limit */ #define SINGLE_IS_RES_RANGE(i) \ if (BTGet(bt, (i))) return FALSE #define BITS_IS_RES_RANGE(i,base,limit) \ if ((bt[(i)] & BTMask((base),(limit))) != (Word)0) return FALSE #define WORD_IS_RES_RANGE(i) \ if (bt[(i)] != (Word)0) return FALSE ACT_ON_RANGE(base, limit, SINGLE_IS_RES_RANGE, BITS_IS_RES_RANGE, WORD_IS_RES_RANGE); return TRUE; }
static void SplayAssembleRev(SplayTree splay, SplayState state) { Tree left, right; AVERT_CRITICAL(SplayTree, splay); AVER_CRITICAL(state->middle != TreeEMPTY); left = TreeLeft(state->middle); left = SplayUpdateRightSpine(splay, state->leftLast, left); TreeSetLeft(state->middle, left); right = TreeRight(state->middle); right = SplayUpdateLeftSpine(splay, state->rightFirst, right); TreeSetRight(state->middle, right); splay->updateNode(splay, state->middle); }
static Compare SplayFindFirstCompare(Tree node, TreeKey key) { SplayFindClosure my; SplayTestNodeFunction testNode; SplayTestTreeFunction testTree; void *testClosure; SplayTree splay; AVERT_CRITICAL(Tree, node); AVER_CRITICAL(key != NULL); /* Lift closure values into variables so that they aren't aliased by calls to the test functions. */ my = (SplayFindClosure)key; testClosure = my->testClosure; testNode = my->testNode; testTree = my->testTree; splay = my->splay; if (TreeHasLeft(node) && (*testTree)(splay, TreeLeft(node), testClosure)) { return CompareLESS; } else if ((*testNode)(splay, node, testClosure)) { my->found = TRUE; return CompareEQUAL; } else { /* If there's a right subtree but it doesn't satisfy the tree test then we want to terminate the splay right now. SplaySplay will return TRUE, so the caller must check closure->found to find out whether the result node actually satisfies testNode. */ if (TreeHasRight(node) && !(*testTree)(splay, TreeRight(node), testClosure)) { my->found = FALSE; return CompareEQUAL; } return CompareGREATER; } }
Compare ChunkCompare(Tree tree, TreeKey key) { Addr base1, base2, limit2; Chunk chunk; AVERT_CRITICAL(Tree, tree); AVER_CRITICAL(tree != TreeEMPTY); /* See .chunk.at.base. */ chunk = ChunkOfTree(tree); AVERT_CRITICAL(Chunk, chunk); base1 = AddrOfTreeKey(key); base2 = chunk->base; limit2 = chunk->limit; if (base1 < base2) return CompareLESS; else if (base1 >= limit2) return CompareGREATER; else return CompareEQUAL; }
static void shieldQueue(Arena arena, Seg seg) { Shield shield; /* <design/trace/#fix.noaver> */ AVERT_CRITICAL(Arena, arena); shield = ArenaShield(arena); SHIELD_AVERT_CRITICAL(Seg, seg); if (SegIsSynced(seg) || seg->queued) return; if (SegIsExposed(seg)) { /* This can occur if the mutator isn't suspended, we expose a segment, then raise the shield on it. In this case, the mutator isn't allowed to see the segment, but we don't need to queue it until its covered. */ shieldSuspend(arena); return; } /* Allocate or extend the shield queue if necessary. */ if (shield->next >= shield->length) { void *p; Res res; Count length; AVER(shield->next == shield->length); if (shield->length == 0) length = ShieldQueueLENGTH; else length = shield->length * 2; res = ControlAlloc(&p, arena, length * sizeof shield->queue[0]); if (res != ResOK) { AVER(ResIsAllocFailure(res)); /* Carry on with the existing queue. */ } else { if (shield->length > 0) { Size oldSize = shield->length * sizeof shield->queue[0]; AVER(shield->queue != NULL); mps_lib_memcpy(p, shield->queue, oldSize); ControlFree(arena, shield->queue, oldSize); } shield->queue = p; shield->length = length; } } /* Queue unavailable, so synchronize now. Or if the mutator is not yet suspended and the code raises the shield on a covered segment, protect it now, because that's probably better than suspending the mutator. */ if (shield->length == 0 || !shield->suspended) { shieldSync(shield, seg); return; } AVER_CRITICAL(shield->limit <= shield->length); AVER_CRITICAL(shield->next <= shield->limit); /* If we failed to extend the shield queue array, degrade to an LRU circular buffer. */ if (shield->next >= shield->length) shield->next = 0; AVER_CRITICAL(shield->next < shield->length); AVER_CRITICAL(shield->length > 0); /* If the limit is less than the length, then the queue array has yet to be filled, and next is an uninitialized entry. Otherwise it's the tail end from last time around, and needs to be flushed. */ if (shield->limit >= shield->length) { AVER_CRITICAL(shield->limit == shield->length); shieldFlushEntry(shield, shield->next); } shield->queue[shield->next] = seg; ++shield->next; seg->queued = TRUE; if (shield->next >= shield->limit) shield->limit = shield->next; }