HXRET _hxcheckbuf(HXLOCAL const*locp, HXBUF const*bufp) { HXFILE *hp = locp->file; if (IS_MAP(hp, bufp->pgno)) { if (!(bufp->data[0] & 1)) return bad_map_self; if (bufp->pgno) { // extended map page. if (bufp->next || bufp->used) return bad_map_head; } else { // root page. if (bufp->used >= DATASIZE(hp) || bufp->used != hp->uleng) return bad_map_head; // Todo: check that HXROOT{pgsize,version} are good. } int lastbit, pos; PAGENO lastmap = locp->npages - 1; lastmap = _hxmap(hp, lastmap - lastmap % HXPGRATE, &lastbit); if (bufp->pgno == lastmap) { // All bits beyond lastbit must be zero. BYTE mask = -2 << (lastbit & 7); for (pos = lastbit >> 3; pos < (int)DATASIZE(hp); ++pos, mask = -1) if (bufp->data[pos] & mask) return bad_overmap; } } else { // DATA page.
const char *typeStr(Value v) { const char *s = "?"; if (IS_NIL(v)) { s = "nil"; } else if (IS_NUM(v)) { s = "number"; } else if (IS_STRING(v)) { s = "string"; } else if (IS_ARRAY(v)) { s = "array"; } else if (IS_MAP(v)) { s = "map"; } else if (IS_FUNC(v)) { s = "func"; } else if (IS_CFUNC(v)) { s = "cfunc"; } else if (IS_CF(v)) { s = "cf"; } else if (IS_CP(v)) { s = "cp"; } else if (IS_PROTO(v)) { s = "proto"; } else if (IS_REG(v)) { s = "reg"; } return s; }
HXRET hxstat(HXFILE * hp, HXSTAT * sp) { HXLOCAL loc, *locp = &loc; PAGENO pg; int i, nchains; if (!hp || !sp) return HXERR_BAD_REQUEST; ENTER(locp, hp, 0, 1); HXBUF *bufp = &locp->buf[0]; locp->mode = F_RDLCK; _hxlock(locp, 0, 0); _hxsize(locp); memset(sp, 0, sizeof *sp); sp->npages = locp->npages; _hxinitRefs(locp); for (pg = 1; pg < (unsigned)locp->npages; ++pg) { _hxload(locp, bufp, pg); if (IS_MAP(hp, pg)) continue; _hxsetRef(locp, pg, bufp->next); if (IS_HEAD(bufp->pgno)) { sp->head_bytes += bufp->used; } else if (bufp->used) { ++sp->ovfl_pages; sp->ovfl_bytes += bufp->used; } char *recp, *endp; FOR_EACH_REC(recp, bufp, endp) { ++sp->nrecs; sp->hash ^= RECHASH(recp); } if (!IS_HEAD(pg)) { COUNT nheads = 0; _hxfindHeads(locp, bufp); while (locp->vprev[nheads]) ++nheads; if (nheads > HX_MAX_SHARE) nheads = HX_MAX_SHARE; ++sp->share_hist[nheads]; } }
// // Uncolor_Array: C // void Uncolor_Array(REBARR *a) { if (Is_Series_White(SER(a))) return; // avoid loop Flip_Series_To_White(SER(a)); RELVAL *val; for (val = ARR_HEAD(a); NOT_END(val); ++val) if (ANY_ARRAY_OR_PATH(val) or IS_MAP(val) or ANY_CONTEXT(val)) Uncolor(val); }
inline Value doAdd(GC *gc, Value a, Value b) { int ta = TAG(a); if (IS_NUM_TAG(ta) && IS_NUM(b)) { return VAL_NUM(GET_NUM(a) + GET_NUM(b)); } else if (IS_STRING(a)) { return String::concat(gc, a, b); } else { if (ta != T_OBJ) { return E_WRONG_TYPE; } int type = O_TYPE(a); if (type == O_ARRAY && (IS_ARRAY(b) || IS_MAP(b) || IS_STRING(b))) { Array *array = Array::alloc(gc); array->add(a); array->add(b); return VAL_OBJ(array); } else if (type == O_MAP && (IS_ARRAY(b) || IS_MAP(b) || IS_STRING(b))) { Map *map = MAP(a)->copy(gc); map->add(b); return VAL_OBJ(map); } else { return VERR; } } }
void StringBuilder::append(Value v, bool raw) { void *ptr = GET_PTR(v); if (IS_STRING(v)) { if (!raw) { append('\''); } append(GET_CSTR(v), len(v)); if (!raw) { append('\''); } } else if (IS_NUM(v)) { append(GET_NUM(v)); return; } else if (IS_ARRAY(v)) { Array *a = (Array *) ptr; int size = a->size(); append('['); for (int i = 0; i < size; ++i) { append(i ? ", " : ""); append(a->getI(i)); } append(']'); } else if (IS_MAP(v)) { Map *m = (Map *) ptr; int size = m->size(); append('{'); Value *keys = m->keyBuf(); Value *vals = m->valBuf(); for (int i = 0; i < size; ++i) { append(i ? ", " : ""); append(keys[i]); append(':'); append(vals[i]); } append('}'); } else if (IS_NIL(v)) { append("nil"); } else { append('<'); append(typeStr(v)); append('>'); char tmp[32]; snprintf(tmp, sizeof(tmp), "%p", ptr); append(tmp); } }
// // MT_Map: C // REBFLG MT_Map(REBVAL *out, REBVAL *data, enum Reb_Kind type) { REBCNT n; REBSER *series; if (!IS_BLOCK(data) && !IS_MAP(data)) return FALSE; n = VAL_BLK_LEN(data); if (n & 1) return FALSE; series = Make_Map(n/2); Append_Map(series, data, UNKNOWN); Rehash_Hash(series); Val_Init_Map(out, series); return TRUE; }
// // Uncolor: C // // Clear the recusion markers for series and object trees. // void Uncolor(RELVAL *v) { REBARR *array; if (ANY_ARRAY_OR_PATH(v)) array = VAL_ARRAY(v); else if (IS_MAP(v)) array = MAP_PAIRLIST(VAL_MAP(v)); else if (ANY_CONTEXT(v)) array = CTX_VARLIST(VAL_CONTEXT(v)); else { // Shouldn't have marked recursively any non-array series (no need) // assert( not ANY_SERIES(v) or Is_Series_White(VAL_SERIES(v)) ); return; } Uncolor_Array(array); }
*/ REBFLG MT_Map(REBVAL *out, REBVAL *data, REBCNT type) /* ***********************************************************************/ { REBCNT n; REBSER *series; if (!IS_BLOCK(data) && !IS_MAP(data)) return FALSE; n = VAL_BLK_LEN(data); if (n & 1) return FALSE; series = Make_Map(n/2); //COPY_BLK_PART(series, VAL_BLK_DATA(data), n); Append_Map(series, data, UNKNOWN); Rehash_Hash(series); Set_Series(REB_MAP, out, series); return TRUE; }
STOID Mold_Block(REBVAL *value, REB_MOLD *mold) { REBYTE *sep; REBOOL all = GET_MOPT(mold, MOPT_MOLD_ALL); REBSER *series = mold->series; REBFLG over = FALSE; if (SERIES_WIDE(VAL_SERIES(value)) == 0) Crash(RP_BAD_WIDTH, sizeof(REBVAL), 0, VAL_TYPE(value)); // Optimize when no index needed: if (VAL_INDEX(value) == 0 && !IS_MAP(value)) // && (VAL_TYPE(value) <= REB_LIT_PATH)) all = FALSE; // If out of range, do not cause error to avoid error looping. if (VAL_INDEX(value) >= VAL_TAIL(value)) over = TRUE; // Force it into [] if (all || (over && !IS_BLOCK(value) && !IS_PAREN(value))) { SET_FLAG(mold->opts, MOPT_MOLD_ALL); Pre_Mold(value, mold); // #[block! part //if (over) Append_Bytes(mold->series, "[]"); //else Mold_Block_Series(mold, VAL_SERIES(value), 0, 0); Post_Mold(value, mold); } else { switch(VAL_TYPE(value)) { case REB_MAP: Pre_Mold(value, mold); sep = 0; case REB_BLOCK: if (GET_MOPT(mold, MOPT_ONLY)) { CLR_FLAG(mold->opts, MOPT_ONLY); // only top level sep = "\000\000"; } else sep = 0; break; case REB_PAREN: sep = "()"; break; case REB_GET_PATH: series = Append_Byte(series, ':'); sep = "/"; break; case REB_LIT_PATH: series = Append_Byte(series, '\''); /* fall through */ case REB_PATH: case REB_SET_PATH: sep = "/"; break; } if (over) Append_Bytes(mold->series, sep ? sep : (REBYTE*)("[]")); else Mold_Block_Series(mold, VAL_SERIES(value), VAL_INDEX(value), sep); if (VAL_TYPE(value) == REB_SET_PATH) Append_Byte(series, ':'); } }
// extern __thread jmp_buf jumpBuf; int VM::call(Value A, int nEffArgs, Value *regs, Stack *stack) { Vector<RetInfo> retInfo; // only used if FAST_CALL if (!(IS_O_TYPE(A, O_FUNC) || IS_CF(A) || IS_O_TYPE(A, O_CFUNC))) { return -1; } regs = stack->maybeGrow(regs, 256); int nExpectedArgs = IS_O_TYPE(A, O_FUNC) ? ((Func *)GET_OBJ(A))->proto->nArgs : NARGS_CFUNC; nEffArgs = prepareStackForCall(regs, nExpectedArgs, nEffArgs, gc); if (IS_CF(A) || IS_O_TYPE(A, O_CFUNC)) { if (IS_CF(A)) { tfunc f = GET_CF(A); *regs = f(this, CFunc::CFUNC_CALL, 0, regs, nEffArgs); } else { ((CFunc *) GET_OBJ(A))->call(this, regs, nEffArgs); } return 0; } unsigned code = 0; Value B; Value *ptrC; Func *activeFunc = (Func *) GET_OBJ(A); unsigned *pc = (unsigned *) activeFunc->proto->code.buf(); static void *dispatch[] = { #define _(name) &&name #include "opcodes.inc" #undef _ }; assert(sizeof(dispatch)/sizeof(dispatch[0]) == N_OPCODES); copyUpvals(activeFunc, regs); STEP; JMP: pc += OD(code); STEP; JT: if (!IS_FALSE(*ptrC)) { pc += OD(code); } STEP; JF: if ( IS_FALSE(*ptrC)) { pc += OD(code); } STEP; JLT: if (lessThan(A, B)) { pc += OSC(code); } STEP; JNIS: if (A != B) { pc += OSC(code); } STEP; FOR: A = *(ptrC + 1); B = *(ptrC + 2); if (!IS_NUM(A) || !IS_NUM(B)) { goto error; } // E_FOR_NOT_NUMBER *ptrC = B; if (!(GET_NUM(B) < GET_NUM(A))) { pc += OD(code); } STEP; LOOP: { const double counter = GET_NUM(*ptrC) + 1; if (counter < GET_NUM(*(ptrC+1))) { pc += OD(code); } *ptrC = VAL_NUM(counter); STEP; } FUNC: assert(IS_PROTO(A)); *ptrC = VAL_OBJ(Func::alloc(gc, PROTO(A), regs + 256, regs, OB(code))); STEP; // index, A[B] GETI: *ptrC = types->type(A)->indexGet(A, B); if (*ptrC == VERR) { goto error; } STEP; GETF: *ptrC = types->type(A)->fieldGet(A, B); if (*ptrC == VERR) { goto error; } STEP; SETI: if (!types->type(*ptrC)->indexSet(*ptrC, A, B)) { goto error; } STEP; SETF: if (!types->type(*ptrC)->fieldSet(*ptrC, A, B)) { goto error; } STEP; /* const int oa = OA(code); const int ob = OB(code); int top = max(oa, ob) + 1; top = max(top, activeFunc->proto->localsTop); Value *base = regs + top; printf("top %d\n", top); base[0] = A; base[1] = B; int cPos = ptrC - regs; DO_CALL(v, 2, regs, base, stack); regs[cPos] = base[0]; break; if (*ptrC == VERR) { goto error; } */ GETS: *ptrC = getSlice(gc, A, B, regs[OB(code)+1]); if (*ptrC==VERR) { goto error; } STEP; SETS: if (setSlice(*ptrC, A, regs[OA(code)+1], B)) { goto error; } STEP; RET: { regs[0] = A; Value *root = stack->base; gc->maybeCollect(root, regs - root + 1); #if FAST_CALL if (!retInfo.size()) { return 0; } RetInfo *ri = retInfo.top(); pc = ri->pc; regs = stack->base + ri->base; activeFunc = ri->func; retInfo.pop(); copyUpvals(activeFunc, regs); STEP; #else return 0; #endif } CALL: { if (!IS_OBJ(A) && !IS_CF(A)) { goto error; } // E_CALL_NOT_FUNC int nEffArgs = OSB(code); assert(nEffArgs != 0); Value *base = ptrC; #if FAST_CALL if (IS_O_TYPE(A, O_FUNC)) { Func *f = (Func *) GET_OBJ(A); Proto *proto = f->proto; prepareStackForCall(base, proto->nArgs, nEffArgs, gc); RetInfo *ret = retInfo.push(); ret->pc = pc; ret->base = regs - stack->base; ret->func = activeFunc; regs = stack->maybeGrow(base, 256); copyUpvals(f, regs); pc = proto->code.buf(); activeFunc = f; } else { #endif int ret = DO_CALL(A, nEffArgs, regs, base, stack); if (ret) { goto error; } #if FAST_CALL } #endif STEP; } MOVEUP: { const int slot = regs + 256 - ptrC; activeFunc->setUp(slot, A); } MOVE_R: *ptrC = A; STEP; MOVE_I: *ptrC = VAL_NUM(OD(code)); STEP; MOVE_V: { int id = OA(code); *ptrC = id == CONST_NIL ? VNIL : id == CONST_EMPTY_STRING ? EMPTY_STRING : id == CONST_EMPTY_ARRAY ? VAL_OBJ(emptyArray->copy(gc)) : VAL_OBJ(emptyMap->copy(gc)); STEP; } MOVE_C: { Value v = *pc | (((u64) *(pc+1)) << 32); pc += 2; if (IS_ARRAY(v)) { v = VAL_OBJ(ARRAY(v)->copy(gc)); } else if (IS_MAP(v)) { v = VAL_OBJ(MAP(v)->copy(gc)); } *ptrC = v; STEP; } LEN: *ptrC = VAL_NUM(len(A)); STEP; NOTL: *ptrC = IS_FALSE(A) ? TRUE : FALSE; STEP; // notb: *ptrC = IS_INT(A)? VAL_INT(~getInteger(A)):ERROR(E_WRONG_TYPE); STEP; ADD: *ptrC = doAdd(gc, A, B); if (*ptrC == VERR) { goto error; } STEP; SUB: *ptrC = BINOP(-, A, B); STEP; MUL: *ptrC = BINOP(*, A, B); STEP; DIV: *ptrC = BINOP(/, A, B); STEP; MOD: *ptrC = doMod(A, B); if (*ptrC == VERR) { goto error; } STEP; POW: *ptrC = doPow(A, B); if (*ptrC == VERR) { goto error; } STEP; AND: *ptrC = BITOP(&, A, B); STEP; OR: *ptrC = BITOP(|, A, B); STEP; XOR: *ptrC = BITOP(^, A, B); STEP; SHL_RR: ERR(!IS_NUM(B), E_WRONG_TYPE); *ptrC = doSHL(A, (int)GET_NUM(B)); STEP; SHR_RR: ERR(!IS_NUM(B), E_WRONG_TYPE); *ptrC = doSHR(A, (int)GET_NUM(B)); STEP; SHL_RI: *ptrC = doSHL(A, OSB(code)); STEP; SHR_RI: *ptrC = doSHR(A, OSB(code)); STEP; EQ: *ptrC = equals(A, B) ? TRUE : FALSE; STEP; NEQ: *ptrC = !equals(A, B) ? TRUE : FALSE; STEP; IS: *ptrC = A == B ? TRUE : FALSE; STEP; NIS: *ptrC = A != B ? TRUE : FALSE; STEP; LT: *ptrC = lessThan(A, B) ? TRUE : FALSE; STEP; LE: *ptrC = (equals(A, B) || lessThan(A, B)) ? TRUE : FALSE; STEP; error: return pc - (unsigned *) activeFunc->proto->code.buf(); }
bool is_map (Value* self) { return IS_MAP(self); }
*/ void Make_Block_Type(REBFLG make, REBVAL *value, REBVAL *arg) /* ** Value can be: ** 1. a datatype (e.g. BLOCK!) ** 2. a value (e.g. [...]) ** ** Arg can be: ** 1. integer (length of block) ** 2. block (copy it) ** 3. value (convert to a block) ** ***********************************************************************/ { REBCNT type; REBCNT len; REBSER *ser; // make block! ... if (IS_DATATYPE(value)) type = VAL_DATATYPE(value); else // make [...] .... type = VAL_TYPE(value); // make block! [1 2 3] if (ANY_BLOCK(arg)) { len = VAL_BLK_LEN(arg); if (len > 0 && type >= REB_PATH && type <= REB_LIT_PATH) No_Nones(arg); ser = Copy_Values(VAL_BLK_DATA(arg), len); goto done; } if (IS_STRING(arg)) { REBCNT index, len = 0; VAL_SERIES(arg) = Prep_Bin_Str(arg, &index, &len); // (keeps safe) ser = Scan_Source(VAL_BIN(arg), VAL_LEN(arg)); goto done; } if (IS_BINARY(arg)) { ser = Scan_Source(VAL_BIN_DATA(arg), VAL_LEN(arg)); goto done; } if (IS_MAP(arg)) { ser = Map_To_Block(VAL_SERIES(arg), 0); goto done; } if (ANY_OBJECT(arg)) { ser = Make_Object_Block(VAL_OBJ_FRAME(arg), 3); goto done; } if (IS_VECTOR(arg)) { ser = Make_Vector_Block(arg); goto done; } // if (make && IS_NONE(arg)) { // ser = Make_Block(0); // goto done; // } // to block! typset if (!make && IS_TYPESET(arg) && type == REB_BLOCK) { Set_Block(value, Typeset_To_Block(arg)); return; } if (make) { // make block! 10 if (IS_INTEGER(arg) || IS_DECIMAL(arg)) { len = Int32s(arg, 0); Set_Series(type, value, Make_Block(len)); return; } Trap_Arg(arg); } ser = Copy_Values(arg, 1); done: Set_Series(type, value, ser); return; }
*/ static REB_R Loop_Each(struct Reb_Call *call_, REBINT mode) /* ** Supports these natives (modes): ** 0: foreach ** 1: remove-each ** 2: map ** ***********************************************************************/ { REBSER *body; REBVAL *vars; REBVAL *words; REBSER *frame; REBVAL *value; REBSER *series; REBSER *out; // output block (for MAP, mode = 2) REBINT index; // !!!! should these be REBCNT? REBINT tail; REBINT windex; // write REBINT rindex; // read REBINT err; REBCNT i; REBCNT j; REBVAL *ds; assert(mode >= 0 && mode < 3); value = D_ARG(2); // series if (IS_NONE(value)) return R_NONE; body = Init_Loop(D_ARG(1), D_ARG(3), &frame); // vars, body SET_OBJECT(D_ARG(1), frame); // keep GC safe Set_Block(D_ARG(3), body); // keep GC safe SET_NONE(D_OUT); // Default result to NONE if the loop does not run // If it's MAP, create result block: if (mode == 2) { out = Make_Block(VAL_LEN(value)); SAVE_SERIES(out); } // Get series info: if (ANY_OBJECT(value)) { series = VAL_OBJ_FRAME(value); out = FRM_WORD_SERIES(series); // words (the out local reused) index = 1; //if (frame->tail > 3) Trap_Arg_DEAD_END(FRM_WORD(frame, 3)); } else if (IS_MAP(value)) { series = VAL_SERIES(value); index = 0; //if (frame->tail > 3) Trap_Arg_DEAD_END(FRM_WORD(frame, 3)); } else { series = VAL_SERIES(value); index = VAL_INDEX(value); if (index >= cast(REBINT, SERIES_TAIL(series))) { if (mode == 1) { SET_INTEGER(D_OUT, 0); } else if (mode == 2) { Set_Block(D_OUT, out); UNSAVE_SERIES(out); } return R_OUT; } } windex = index; // Iterate over each value in the series block: while (index < (tail = SERIES_TAIL(series))) { rindex = index; // remember starting spot j = 0; // Set the FOREACH loop variables from the series: for (i = 1; i < frame->tail; i++) { vars = FRM_VALUE(frame, i); words = FRM_WORD(frame, i); // var spec is WORD if (IS_WORD(words)) { if (index < tail) { if (ANY_BLOCK(value)) { *vars = *BLK_SKIP(series, index); } else if (ANY_OBJECT(value)) { if (!VAL_GET_EXT(BLK_SKIP(out, index), EXT_WORD_HIDE)) { // Alternate between word and value parts of object: if (j == 0) { Init_Word(vars, REB_WORD, VAL_WORD_SYM(BLK_SKIP(out, index)), series, index); if (NOT_END(vars+1)) index--; // reset index for the value part } else if (j == 1) *vars = *BLK_SKIP(series, index); else Trap_Arg_DEAD_END(words); j++; } else { // Do not evaluate this iteration index++; goto skip_hidden; } } else if (IS_VECTOR(value)) { Set_Vector_Value(vars, series, index); } else if (IS_MAP(value)) { REBVAL *val = BLK_SKIP(series, index | 1); if (!IS_NONE(val)) { if (j == 0) { *vars = *BLK_SKIP(series, index & ~1); if (IS_END(vars+1)) index++; // only words } else if (j == 1) *vars = *BLK_SKIP(series, index); else Trap_Arg_DEAD_END(words); j++; } else { index += 2; goto skip_hidden; } } else { // A string or binary if (IS_BINARY(value)) { SET_INTEGER(vars, (REBI64)(BIN_HEAD(series)[index])); } else if (IS_IMAGE(value)) { Set_Tuple_Pixel(BIN_SKIP(series, index), vars); } else { VAL_SET(vars, REB_CHAR); VAL_CHAR(vars) = GET_ANY_CHAR(series, index); } } index++; } else SET_NONE(vars); } // var spec is SET_WORD: else if (IS_SET_WORD(words)) { if (ANY_OBJECT(value) || IS_MAP(value)) { *vars = *value; } else { VAL_SET(vars, REB_BLOCK); VAL_SERIES(vars) = series; VAL_INDEX(vars) = index; } //if (index < tail) index++; // do not increment block. } else Trap_Arg_DEAD_END(words); } if (index == rindex) index++; //the word block has only set-words: foreach [a:] [1 2 3][] if (!DO_BLOCK(D_OUT, body, 0)) { if ((err = Check_Error(D_OUT)) >= 0) { index = rindex; break; } // else CONTINUE: if (mode == 1) SET_FALSE(D_OUT); // keep the value (for mode == 1) } else { err = 0; // prevent later test against uninitialized value } if (mode > 0) { //if (ANY_OBJECT(value)) Trap_Types_DEAD_END(words, REB_BLOCK, VAL_TYPE(value)); //check not needed // If FALSE return, copy values to the write location: if (mode == 1) { // remove-each if (IS_CONDITIONAL_FALSE(D_OUT)) { REBCNT wide = SERIES_WIDE(series); // memory areas may overlap, so use memmove and not memcpy! memmove(series->data + (windex * wide), series->data + (rindex * wide), (index - rindex) * wide); windex += index - rindex; // old: while (rindex < index) *BLK_SKIP(series, windex++) = *BLK_SKIP(series, rindex++); } } else if (!IS_UNSET(D_OUT)) Append_Value(out, D_OUT); // (mode == 2) } skip_hidden: ; } // Finish up: if (mode == 1) { // Remove hole (updates tail): if (windex < index) Remove_Series(series, windex, index - windex); SET_INTEGER(D_OUT, index - windex); return R_OUT; } // If MAP... if (mode == 2) { UNSAVE_SERIES(out); if (err != 2) { // ...and not BREAK/RETURN: Set_Block(D_OUT, out); return R_OUT; } } return R_OUT; }
//--------------|--------------------------------------------- HXRET hxshape(HXFILE * hp, double overload) { HXLOCAL loc, *locp = &loc; HXBUF *srcp, *dstp, *oldp; int pos, bitpos; PGINFO *atail, *ztail; PAGENO pg, pm, *aprev, *afree, *zfree; double totbytes = 0, fullbytes = 0, fullpages = 0; if (!hp || hp->buffer.pgno || hp->mode & HX_MMAP || !(hp->mode & HX_UPDATE)) return HXERR_BAD_REQUEST; ENTER(locp, hp, NULL, 3); _hxlock(locp, 0, 0); _hxsize(locp); srcp = &locp->buf[0]; dstp = &locp->buf[1]; oldp = &locp->buf[2]; _hxinitRefs(locp); // Populate vnext,vrefs,vtail for tail-merging: ztail = calloc(locp->npages / HXPGRATE + 1, sizeof(PGINFO)); locp->vtail = ztail; for (pg = 1; pg < locp->npages; ++pg) { PGINFO x = _hxpginfo(locp, pg); _hxsetRef(locp, pg, x.pgno); totbytes += x.used; if (x.pgno) // i.e. page.next != 0 ++fullpages, fullbytes += x.used; else if (x.used && !IS_HEAD(pg)) x.pgno = pg, *ztail++ = x; } // Sort vtail by (used), so that smallest+largest (used) // counts can be matched up; simple greedy-fill algorithm. qsort(locp->vtail, ztail - locp->vtail, sizeof *locp->vtail, (cmpfn_t) cmpused); // Combine tail pages where possible: for (atail = locp->vtail, --ztail; atail < ztail;) { if (!_FITS(hp, atail->used, atail->recs, ztail->used, ztail->recs)) { --ztail; continue; } // Merge is always from [atail] to [ztail], to maintain // ([ztail].used >= [ztail-1].used). if (atail->pgno < ztail->pgno) { PGINFO tmp = *atail; *atail = *ztail; *ztail = tmp; } _hxload(locp, srcp, atail->pgno); _hxload(locp, dstp, ztail->pgno); _hxappend(dstp, srcp->data, srcp->used); dstp->recs += srcp->recs; _hxsave(locp, dstp); _hxfindRefs(locp, srcp, srcp->pgno); for (aprev = locp->vprev; *aprev; ++aprev) PUTLINK(locp, *aprev, dstp->pgno); ztail->used += srcp->used; srcp->used = srcp->recs = 0; BUFLINK(locp, srcp, 0); _hxsave(locp, srcp); _hxalloc(locp, srcp->pgno, 0); ++atail; } // Now decide whether to grow or shrink the file. PAGENO overflows = 0; for (pg = 1; pg < locp->npages; ++pg) { if (IS_HEAD(pg)) { int loops = HX_MAX_CHAIN; for (pm = pg; (pm = locp->vnext[pm]); ++overflows) if (!--loops) LEAVE(locp, HXERR_BAD_FILE); } } PAGENO dpages = locp->dpages + overflows; PAGENO goodsize = dpages / (1.0 + overload); DEBUG("%.0f/%.0f=%0.f %.0f %lu/%.2f=%lu => %lu", fullbytes, fullpages, fullbytes / fullpages, totbytes, dpages, overload + 1, goodsize, _hxd2f(goodsize)); // "+1" for the root page goodsize = goodsize ? _hxd2f(goodsize) + 1 : 2; if (locp->npages <= goodsize) { // Increase dpages. // Note that _hxgrow always returns an ALLOCATED // overflow. It would be smarter to clear all the // map bits in one step at the end, the way // hxbuild sets all the map bits in one load/save. PAGENO junk = 0; while (locp->npages < goodsize) { _hxgrow(locp, dstp, DATASIZE(hp), &junk); _hxsave(locp, dstp); _hxalloc(locp, dstp->pgno, 0); } _hxflushfreed(locp, dstp); LEAVE(locp, HXNOTE); } // Build a list of free pgnos assert(sizeof *afree <= sizeof *locp->vtail); afree = zfree = (PAGENO *) locp->vtail; for (pg = pm = 0; pg < locp->npages; pg += HXPGRATE) { if (!VREF(locp, pg) && !IS_MAP(hp, pg)) *zfree++ = pg; } // Since we are decrementing npages BEFORE reading last page, // set locked such that _hxislocked gives correct answer. hp->locked |= LOCKED_BEYOND; // Work backward from end of file, trimming pages. for (; locp->npages > goodsize; --locp->npages) { PAGENO srchead = locp->npages - 1; PAGENO srctail, dsthead, dstneck, dsttail; int loops = HX_MAX_CHAIN; // EASIEST CASE: a map or unreferenced oveflow page if (srchead == zfree[-1] || IS_MAP(hp, srchead)) { assert(!VREF(locp, srchead) && !IS_HEAD(srchead)); --zfree; continue; } // EASIER CASE: an empty head page _hxload(locp, srcp, srchead); if (!srcp->used) continue; // Anything from here on might need 2 free pages if (zfree - afree < 2) break; --VREF(locp, srcp->next); STAIN(srcp); srcp->pgno = *afree++; # if 0 // hxshape does not work with MMAP as yet. if (hp->mmap) memcpy(&hp->mmap[(off_t) srcp->pgno * hp->pgsize], srcp->page, hp->pgsize); # endif _hxalloc(locp, srcp->pgno, 1); _hxsetRef(locp, srcp->pgno, srcp->next); // EASY CASE: an overflow page to relocate: if (!IS_HEAD(srchead)) { _hxsave(locp, srcp); _hxfindRefs(locp, srcp, srchead); for (aprev = locp->vprev; *aprev; ++aprev) PUTLINK(locp, *aprev, srcp->pgno); continue; } // HARD CASE: a head page to desplit: locp->hash = RECHASH(srcp->data); _hxpoint(locp); // recalc (dpages,head) dsthead = locp->head; dstneck = locp->vnext[dsthead]; dsttail = _hxgetRef(locp, dsthead, 0); srctail = _hxgetRef(locp, srchead, 0); // Append srchead to dsttail, or insert chain between // dsthead and vnext[dsthead]. If srctail is shared, // make a copy of * it first. if (dsttail == dsthead || VREF(locp, dsttail) == 1) { dsthead = dsttail; } else if (srctail == srchead) { BUFLINK(locp, srcp, dstneck); } else if (VREF(locp, srctail) == 1) { PUTLINK(locp, srctail, dstneck); } else if (srctail == dsttail) { if (dsttail != dstneck) { srctail = _hxgetRef(locp, srcp->pgno, srctail); if (srctail == srcp->pgno) { BUFLINK(locp, srcp, dstneck); } else { PUTLINK(locp, srctail, dstneck); } } } else { _hxload(locp, oldp, srctail); if (!oldp->used) LEAVE(locp, HXERR_BAD_FILE); _hxfresh(locp, dstp, *afree++); _hxalloc(locp, dstp->pgno, 1); _hxshift(locp, locp->head, srchead, oldp, dstp, dstp); _hxsave(locp, oldp); // This hack prevents the CHECK in _hxlink from // aborting when oldp contains a page that the // next iteration wants to PUTLINK. This ONLY occurs // in this code (I think). // TODO: can this happen in (hxfix,hxshape)?? oldp->pgno = -1; BUFLINK(locp, dstp, dstneck); _hxsave(locp, dstp); if (srcp->next == srctail) { BUFLINK(locp, srcp, dstp->pgno); } else { pg = _hxgetRef(locp, srchead, srctail); PUTLINK(locp, pg, dstp->pgno); } } _hxload(locp, dstp, dsthead); BUFLINK(locp, dstp, srcp->pgno); // Cannot early-out on !SHRUNK here (as "hxput" does); // new chain may have vacancies in two places. while (1) { if (!--loops) LEAVE(locp, HXERR_BAD_FILE); if (_hxshift(locp, locp->head, srchead, srcp, dstp, dstp)) { SWAP(srcp, dstp); } else { BUFLINK(locp, dstp, srcp->next); if (!srcp->used != !VREF(locp, srcp->pgno)) LEAVE(locp, HXERR_BAD_FILE); if (!srcp->used) { BUFLINK(locp, srcp, 0); *--afree = srcp->pgno; _hxalloc(locp, srcp->pgno, 0); } } _hxsave(locp, srcp); if (!dstp->next) break; _hxload(locp, srcp, dstp->next); } _hxsave(locp, dstp); } // npages was overdecremented by one in loop _hxresize(locp, locp->npages + 1); // Zero the freemap for all truncated overflow pages: pg = _hxmap(hp, locp->npages + HXPGRATE - locp->npages % HXPGRATE, &bitpos); _hxload(locp, dstp, pg); DEBUG2("clear map %lu from bit %d onward", pg, bitpos); pos = bitpos >> 3; dstp->data[pos++] &= ~(-1 << (bitpos & 7)); memset(dstp->data + pos, 0, DATASIZE(hp) - pos); STAIN(dstp); _hxsave(locp, dstp); LEAVE(locp, HXOKAY); }
*/ static REB_R Loop_Each(struct Reb_Call *call_, LOOP_MODE mode) /* ** Common implementation code of FOR-EACH, REMOVE-EACH, MAP-EACH, ** and EVERY. ** ***********************************************************************/ { REBSER *body; REBVAL *vars; REBVAL *words; REBSER *frame; // `data` is the series/object/map/etc. being iterated over // Note: `data_is_object` flag is optimized out, but hints static analyzer REBVAL *data = D_ARG(2); REBSER *series; const REBOOL data_is_object = ANY_OBJECT(data); REBSER *out; // output block (needed for MAP-EACH) REBINT index; // !!!! should these be REBCNT? REBINT tail; REBINT windex; // write REBINT rindex; // read REBOOL break_with = FALSE; REBOOL every_true = TRUE; REBCNT i; REBCNT j; REBVAL *ds; if (IS_NONE(data)) return R_NONE; body = Init_Loop(D_ARG(1), D_ARG(3), &frame); // vars, body Val_Init_Object(D_ARG(1), frame); // keep GC safe Val_Init_Block(D_ARG(3), body); // keep GC safe SET_NONE(D_OUT); // Default result to NONE if the loop does not run if (mode == LOOP_MAP_EACH) { // Must be managed *and* saved...because we are accumulating results // into it, and those results must be protected from GC // !!! This means we cannot Free_Series in case of a BREAK, we // have to leave it to the GC. Should there be a variant which // lets a series be a GC root for a temporary time even if it is // not SER_KEEP? out = Make_Array(VAL_LEN(data)); MANAGE_SERIES(out); SAVE_SERIES(out); } // Get series info: if (data_is_object) { series = VAL_OBJ_FRAME(data); out = FRM_WORD_SERIES(series); // words (the out local reused) index = 1; //if (frame->tail > 3) raise Error_Invalid_Arg(FRM_WORD(frame, 3)); } else if (IS_MAP(data)) { series = VAL_SERIES(data); index = 0; //if (frame->tail > 3) raise Error_Invalid_Arg(FRM_WORD(frame, 3)); } else { series = VAL_SERIES(data); index = VAL_INDEX(data); if (index >= cast(REBINT, SERIES_TAIL(series))) { if (mode == LOOP_REMOVE_EACH) { SET_INTEGER(D_OUT, 0); } else if (mode == LOOP_MAP_EACH) { UNSAVE_SERIES(out); Val_Init_Block(D_OUT, out); } return R_OUT; } } windex = index; // Iterate over each value in the data series block: while (index < (tail = SERIES_TAIL(series))) { rindex = index; // remember starting spot j = 0; // Set the FOREACH loop variables from the series: for (i = 1; i < frame->tail; i++) { vars = FRM_VALUE(frame, i); words = FRM_WORD(frame, i); // var spec is WORD if (IS_WORD(words)) { if (index < tail) { if (ANY_BLOCK(data)) { *vars = *BLK_SKIP(series, index); } else if (data_is_object) { if (!VAL_GET_EXT(BLK_SKIP(out, index), EXT_WORD_HIDE)) { // Alternate between word and value parts of object: if (j == 0) { Val_Init_Word(vars, REB_WORD, VAL_WORD_SYM(BLK_SKIP(out, index)), series, index); if (NOT_END(vars+1)) index--; // reset index for the value part } else if (j == 1) *vars = *BLK_SKIP(series, index); else raise Error_Invalid_Arg(words); j++; } else { // Do not evaluate this iteration index++; goto skip_hidden; } } else if (IS_VECTOR(data)) { Set_Vector_Value(vars, series, index); } else if (IS_MAP(data)) { REBVAL *val = BLK_SKIP(series, index | 1); if (!IS_NONE(val)) { if (j == 0) { *vars = *BLK_SKIP(series, index & ~1); if (IS_END(vars+1)) index++; // only words } else if (j == 1) *vars = *BLK_SKIP(series, index); else raise Error_Invalid_Arg(words); j++; } else { index += 2; goto skip_hidden; } } else { // A string or binary if (IS_BINARY(data)) { SET_INTEGER(vars, (REBI64)(BIN_HEAD(series)[index])); } else if (IS_IMAGE(data)) { Set_Tuple_Pixel(BIN_SKIP(series, index), vars); } else { VAL_SET(vars, REB_CHAR); VAL_CHAR(vars) = GET_ANY_CHAR(series, index); } } index++; } else SET_NONE(vars); } // var spec is SET_WORD: else if (IS_SET_WORD(words)) { if (ANY_OBJECT(data) || IS_MAP(data)) *vars = *data; else Val_Init_Block_Index(vars, series, index); //if (index < tail) index++; // do not increment block. } else raise Error_Invalid_Arg(words); } if (index == rindex) { // the word block has only set-words: for-each [a:] [1 2 3][] index++; } if (Do_Block_Throws(D_OUT, body, 0)) { if (IS_WORD(D_OUT) && VAL_WORD_SYM(D_OUT) == SYM_CONTINUE) { if (mode == LOOP_REMOVE_EACH) { // signal the post-body-execution processing that we // *do not* want to remove the element on a CONTINUE SET_FALSE(D_OUT); } else { // CONTINUE otherwise acts "as if" the loop body execution // returned an UNSET! SET_UNSET(D_OUT); } } else if (IS_WORD(D_OUT) && VAL_WORD_SYM(D_OUT) == SYM_BREAK) { // If it's a BREAK, get the /WITH value (UNSET! if no /WITH) // Though technically this doesn't really tell us if a // BREAK/WITH happened, as you can BREAK/WITH an UNSET! TAKE_THROWN_ARG(D_OUT, D_OUT); if (!IS_UNSET(D_OUT)) break_with = TRUE; index = rindex; break; } else { // Any other kind of throw, with a WORD! name or otherwise... index = rindex; break; } } switch (mode) { case LOOP_FOR_EACH: // no action needed after body is run break; case LOOP_REMOVE_EACH: // If FALSE return, copy values to the write location // !!! Should UNSET! also act as conditional false here? Error? if (IS_CONDITIONAL_FALSE(D_OUT)) { REBYTE wide = SERIES_WIDE(series); // memory areas may overlap, so use memmove and not memcpy! // !!! This seems a slow way to do it, but there's probably // not a lot that can be done as the series is expected to // be in a good state for the next iteration of the body. :-/ memmove( series->data + (windex * wide), series->data + (rindex * wide), (index - rindex) * wide ); windex += index - rindex; } break; case LOOP_MAP_EACH: // anything that's not an UNSET! will be added to the result if (!IS_UNSET(D_OUT)) Append_Value(out, D_OUT); break; case LOOP_EVERY: if (every_true) { // !!! This currently treats UNSET! as true, which ALL // effectively does right now. That's likely a bad idea. // When ALL changes, so should this. // every_true = IS_CONDITIONAL_TRUE(D_OUT); } break; default: assert(FALSE); } skip_hidden: ; } switch (mode) { case LOOP_FOR_EACH: // Nothing to do but return last result (will be UNSET! if an // ordinary BREAK was used, the /WITH if a BREAK/WITH was used, // and an UNSET! if the last loop iteration did a CONTINUE.) return R_OUT; case LOOP_REMOVE_EACH: // Remove hole (updates tail): if (windex < index) Remove_Series(series, windex, index - windex); SET_INTEGER(D_OUT, index - windex); return R_OUT; case LOOP_MAP_EACH: UNSAVE_SERIES(out); if (break_with) { // If BREAK is given a /WITH parameter that is not an UNSET!, it // is assumed that you want to override the accumulated mapped // data so far and return the /WITH value. (which will be in // D_OUT when the loop above is `break`-ed) // !!! Would be nice if we could Free_Series(out), but it is owned // by GC (we had to make it that way to use SAVE_SERIES on it) return R_OUT; } // If you BREAK/WITH an UNSET! (or just use a BREAK that has no // /WITH, which is indistinguishable in the thrown value) then it // returns the accumulated results so far up to the break. Val_Init_Block(D_OUT, out); return R_OUT; case LOOP_EVERY: // Result is the cumulative TRUE? state of all the input (with any // unsets taken out of the consideration). The last TRUE? input // if all valid and NONE! otherwise. (Like ALL.) If the loop // never runs, `every_true` will be TRUE *but* D_OUT will be NONE! if (!every_true) SET_NONE(D_OUT); return R_OUT; } DEAD_END; }