/* * The routine VALconvert transforms a value for interpretation in a * certain type. It uses some standard cast conventions to do this. * The result, a pointer to a value, is returned. If there are illegal * values, or type combinations involved, it gives up with an * ILLEGALVALUE. */ ptr VALconvert(int typ, ValPtr t) { int src_tpe = t->vtype; ValRecord dst; dst.vtype = typ; /* use base types for user types */ if (src_tpe > TYPE_str) src_tpe = ATOMstorage(src_tpe); if (dst.vtype > TYPE_str) dst.vtype = ATOMstorage(dst.vtype); else if (dst.vtype == TYPE_void) dst.vtype = TYPE_oid; /* first convert into a new location */ if (VARconvert(&dst, t, 0) == GDK_FAIL) return ILLEGALVALUE; /* then maybe free the old */ if (src_tpe != dst.vtype && t->vtype != typ && dst.vtype != TYPE_void && (src_tpe >= TYPE_str || dst.vtype >= TYPE_str)) VALclear(t); /* and finally copy the result */ *t = dst; /* make sure we return the correct type (not the storage type) */ t->vtype = typ; return VALget(t); }
void GDKqsort_rev(void *h, void *t, const void *base, size_t n, int hs, int ts, int tpe) { struct qsort_t buf; assert(hs > 0); assert(ts >= 0); assert(tpe != TYPE_void); buf.hs = (unsigned int) hs; buf.ts = (unsigned int) ts; buf.cmp = BATatoms[tpe].atomCmp; buf.base = base; if (ATOMvarsized(tpe)) { assert(base != NULL); GDKqsort_impl_var_rev(&buf, h, t, n); return; } if (base) tpe = TYPE_str; /* we need the default case */ if (tpe != ATOMstorage(tpe) && ATOMnilptr(ATOMstorage(tpe)) == ATOMnilptr(tpe) && BATatoms[ATOMstorage(tpe)].atomCmp == BATatoms[tpe].atomCmp) tpe = ATOMstorage(tpe); switch (tpe) { case TYPE_bte: GDKqsort_impl_bte_rev(&buf, h, t, n); break; case TYPE_sht: GDKqsort_impl_sht_rev(&buf, h, t, n); break; case TYPE_int: GDKqsort_impl_int_rev(&buf, h, t, n); break; case TYPE_lng: GDKqsort_impl_lng_rev(&buf, h, t, n); break; #ifdef HAVE_HGE case TYPE_hge: GDKqsort_impl_hge_rev(&buf, h, t, n); break; #endif case TYPE_flt: GDKqsort_impl_flt_rev(&buf, h, t, n); break; case TYPE_dbl: GDKqsort_impl_dbl_rev(&buf, h, t, n); break; default: GDKqsort_impl_any_rev(&buf, h, t, n); break; } }
/* Create a copy of the type value combination in TPE/S, allocating * space for external values (non-fixed sized values). See VALcopy * for a version where the source is in a ValRecord, and see VALset * for a version where ownership of the source is transferred. * * Returns NULL in case of (malloc) failure. */ ValPtr VALinit(ValPtr d, int tpe, const void *s) { switch (ATOMstorage(d->vtype = tpe)) { case TYPE_void: d->val.oval = *(const oid *) s; break; case TYPE_bte: d->val.btval = *(const bte *) s; break; case TYPE_sht: d->val.shval = *(const sht *) s; break; case TYPE_int: d->val.ival = *(const int *) s; break; case TYPE_flt: d->val.fval = *(const flt *) s; break; case TYPE_dbl: d->val.dval = *(const dbl *) s; break; case TYPE_lng: d->val.lval = *(const lng *) s; break; #ifdef HAVE_HGE case TYPE_hge: d->val.hval = *(const hge *) s; break; #endif case TYPE_str: d->val.sval = GDKstrdup(s); if (d->val.sval == NULL) return NULL; d->len = strLen(s); break; case TYPE_ptr: d->val.pval = *(const ptr *) s; d->len = ATOMlen(tpe, *(const ptr *) s); break; default: assert(ATOMextern(ATOMstorage(tpe))); d->len = ATOMlen(tpe, s); d->val.pval = GDKmalloc(d->len); if (d->val.pval == NULL) return NULL; memcpy(d->val.pval, s, d->len); break; } return d; }
str malAtomDefinition(str name, int tpe) { int i; if (strlen(name) >= IDLENGTH) { throw (SYNTAX, "atomDefinition", "Atom name '%s' too long", name); } if (ATOMindex(name) >= 0) { #ifndef HAVE_EMBEDDED /* we can restart embedded MonetDB, making this an expected error */ throw(TYPE, "atomDefinition", "Redefinition of atom '%s'", name); #endif } if (tpe < 0 || tpe >= GDKatomcnt) { throw(TYPE, "atomDefinition", "Undefined atom inheritance '%s'", name); } if (strlen(name) >= sizeof(BATatoms[0].name)) throw(TYPE, "atomDefinition", "Atom name too long '%s'", name); i = ATOMallocate(name); if (is_int_nil(i)) throw(TYPE,"atomDefinition", SQLSTATE(HY001) MAL_MALLOC_FAIL); /* overload atom ? */ if (tpe) { BATatoms[i] = BATatoms[tpe]; strncpy(BATatoms[i].name, name, sizeof(BATatoms[i].name)); BATatoms[i].name[sizeof(BATatoms[i].name) - 1] = 0; /* make coverity happy */ BATatoms[i].storage = ATOMstorage(tpe); } else { /* cannot overload void atoms */ BATatoms[i].storage = i; BATatoms[i].linear = false; } return MAL_SUCCEED; }
void malAtomDefinition(stream *out, str name, int tpe) { int i; if (strlen(name) >= IDLENGTH) { showException(out, SYNTAX, "atomDefinition", "Atom name '%s' too long", name); return; } if (ATOMindex(name) >= 0) { #ifndef HAVE_EMBEDDED /* we can restart embedded MonetDB, making this an expected error */ showException(out, TYPE, "atomDefinition", "Redefinition of atom '%s'", name); #endif return; } if (tpe < 0 || tpe >= GDKatomcnt) { showException(out, TYPE, "atomDefinition", "Undefined atom inheritance '%s'", name); return; } if (strlen(name) >= sizeof(BATatoms[0].name)) return; i = ATOMallocate(name); /* overload atom ? */ if (tpe) { BATatoms[i] = BATatoms[tpe]; strncpy(BATatoms[i].name, name, sizeof(BATatoms[i].name)); BATatoms[i].name[sizeof(BATatoms[i].name) - 1] = 0; /* make coverity happy */ BATatoms[i].storage = ATOMstorage(tpe); } else { /* cannot overload void atoms */ BATatoms[i].storage = i; BATatoms[i].linear = 0; } }
static BAT * MATproject_( BAT *map, BAT **bats, int len ) { BAT *res = NULL; if (ATOMstorage(bats[0]->ttype) <= TYPE_void) { /*error*/ } else if (ATOMvarsized(bats[0]->ttype)) { res = MATproject_var(map, bats, len); } else if (ATOMsize(bats[0]->ttype) == sizeof(bte)) { res = MATproject_bte(map, bats, len, bats[0]->ttype); } else if (ATOMsize(bats[0]->ttype) == sizeof(sht)) { res = MATproject_sht(map, bats, len, bats[0]->ttype); } else if (ATOMsize(bats[0]->ttype) == sizeof(int)) { res = MATproject_int(map, bats, len, bats[0]->ttype); } else if (ATOMsize(bats[0]->ttype) == sizeof(lng)) { res = MATproject_lng(map, bats, len, bats[0]->ttype); #ifdef HAVE_HGE } else if (ATOMsize(bats[0]->ttype) == sizeof(hge)) { res = MATproject_hge(map, bats, len, bats[0]->ttype); #endif } else { res = MATproject_any(map, bats, len); } if(res){ res->tsorted = 0; res->trevsorted = 0; res->T->nonil = MATnonil(bats, len); } return res; }
/* * Enable incremental packing. The SQL front-end requires * fixed oid sequences. */ str MATpackIncrement(Client cntxt, MalBlkPtr mb, MalStkPtr stk, InstrPtr p) { bat *ret = getArgReference_bat(stk,p,0); int pieces; BAT *b, *bb, *bn; size_t newsize; (void) cntxt; b = BATdescriptor( stk->stk[getArg(p,1)].val.ival); if ( b == NULL) throw(MAL, "mat.pack", RUNTIME_OBJECT_MISSING); if ( getArgType(mb,p,2) == TYPE_int){ /* first step, estimate with some slack */ pieces = stk->stk[getArg(p,2)].val.ival; bn = BATnew(TYPE_void, b->ttype?b->ttype:TYPE_oid, (BUN)(1.2 * BATcount(b) * pieces), TRANSIENT); if (bn == NULL) throw(MAL, "mat.pack", MAL_MALLOC_FAIL); /* allocate enough space for the vheap, but not for strings, * since BATappend does clever things for strings */ if ( b->T->vheap && bn->T->vheap && ATOMstorage(b->ttype) != TYPE_str){ newsize = b->T->vheap->size * pieces; if (HEAPextend(bn->T->vheap, newsize, TRUE) != GDK_SUCCEED) throw(MAL, "mat.pack", MAL_MALLOC_FAIL); } BATseqbase(bn, b->H->seq); BATseqbase(BATmirror(bn), b->T->seq); BATappend(bn,b,FALSE); assert(!bn->H->nil || !bn->H->nonil); assert(!bn->T->nil || !bn->T->nonil); bn->H->align = (pieces-1); BBPkeepref(*ret = bn->batCacheid); BBPunfix(b->batCacheid); } else { /* remaining steps */ bb = BATdescriptor(stk->stk[getArg(p,2)].val.ival); if ( bb ){ if (BATcount(b) == 0) BATseqbase(b, bb->H->seq); if (BATcount(b) == 0) BATseqbase(BATmirror(b), bb->T->seq); BATappend(b,bb,FALSE); } b->H->align--; if(b->H->align == 0) BATsetaccess(b, BAT_READ); assert(!b->H->nil || !b->H->nonil); assert(!b->T->nil || !b->T->nonil); BBPkeepref(*ret = b->batCacheid); if( bb) BBPunfix(bb->batCacheid); } return MAL_SUCCEED; }
atom * atom_int( sql_allocator *sa, sql_subtype *tpe, #ifdef HAVE_HGE hge val #else lng val #endif ) { if (tpe->type->eclass == EC_FLT) { return atom_float(sa, tpe, (double) val); } else { atom *a = atom_create(sa); a->isnull = 0; a->tpe = *tpe; a->data.vtype = tpe->type->localtype; switch (ATOMstorage(a->data.vtype)) { case TYPE_bte: a->data.val.btval = (bte) val; break; case TYPE_sht: a->data.val.shval = (sht) val; break; case TYPE_int: a->data.val.ival = (int) val; break; case TYPE_wrd: a->data.val.wval = (wrd) val; break; case TYPE_oid: a->data.val.oval = (oid) val; break; case TYPE_lng: a->data.val.lval = (lng) val; break; #ifdef HAVE_HGE case TYPE_hge: a->data.val.hval = val; break; #endif default: printf("atom_int %d\n", a->data.vtype); assert(0); } a->d = (dbl) val; a->data.len = 0; if (atom_debug) fprintf(stderr, "atom_int(%s,%.40g)\n", tpe->type->sqlname, (dbl)val); return a; } }
bool ATOMisdescendant(int tpe, int parent) { int cur = -1; while (cur != tpe) { cur = tpe; if (cur == parent) return true; tpe = ATOMstorage(tpe); } return false; }
atom * atom_general(sql_allocator *sa, sql_subtype *tpe, char *val) { atom *a; ptr p = NULL; if (atom_debug) fprintf(stderr, "atom_general(%s,%s)\n", tpe->type->sqlname, val); if (tpe->type->localtype == TYPE_str) return atom_string(sa, tpe, val); a = atom_create(sa); a->tpe = *tpe; a->data.val.pval = NULL; a->data.vtype = tpe->type->localtype; a->data.len = 0; assert(a->data.vtype >= 0); if (val) { int type = a->data.vtype; a->isnull = 0; if (ATOMstorage(type) == TYPE_str) { a->isnull = 0; a->data.val.sval = sql2str(sa_strdup(sa, val)); a->data.len = (int)strlen(a->data.val.sval); } else { int res = ATOMfromstr(type, &p, &a->data.len, val); /* no result or nil means error (SQL has NULL not nil) */ if (res < 0 || !p || ATOMcmp(type, p, ATOMnilptr(type)) == 0) { /*_DELETE(val);*/ if (p) GDKfree(p); return NULL; } VALset(&a->data, a->data.vtype, p); SA_VALcopy(sa, &a->data, &a->data); if (p && ATOMextern(a->data.vtype) == 0) GDKfree(p); /*_DELETE(val);*/ } } else { p = ATOMnilptr(a->data.vtype); VALset(&a->data, a->data.vtype, p); a->isnull = 1; } return a; }
/* also see VALptr */ void * VALget(ValPtr v) { switch (ATOMstorage(v->vtype)) { case TYPE_void: return (void *) &v->val.oval; case TYPE_bte: return (void *) &v->val.btval; case TYPE_sht: return (void *) &v->val.shval; case TYPE_int: return (void *) &v->val.ival; case TYPE_flt: return (void *) &v->val.fval; case TYPE_dbl: return (void *) &v->val.dval; case TYPE_lng: return (void *) &v->val.lval; case TYPE_str: return (void *) v->val.sval; default: return (void *) v->val.pval; } }
/* Set V to the type/value combination in T/P. Also see VALinit. In * this version, if P refers to an external type, no new memory is * allocated, but instead the pointer P is given to V. */ ValPtr VALset(ValPtr v, int t, ptr p) { switch (ATOMstorage(v->vtype = t)) { case TYPE_void: v->val.oval = *(oid *) p; break; case TYPE_bte: v->val.btval = *(bte *) p; break; case TYPE_sht: v->val.shval = *(sht *) p; break; case TYPE_int: v->val.ival = *(int *) p; break; case TYPE_flt: v->val.fval = *(flt *) p; break; case TYPE_dbl: v->val.dval = *(dbl *) p; break; case TYPE_lng: v->val.lval = *(lng *) p; break; #ifdef HAVE_HGE case TYPE_hge: v->val.hval = *(hge *) p; break; #endif case TYPE_str: v->val.sval = (str) p; v->len = ATOMlen(t, p); break; case TYPE_ptr: v->val.pval = *(ptr *) p; v->len = ATOMlen(t, *(ptr *) p); break; default: v->val.pval = p; v->len = ATOMlen(t, p); break; } return v; }
hge #else lng #endif atom_get_int(atom *a) { #ifdef HAVE_HGE hge r = 0; #else lng r = 0; #endif if (!a->isnull) { switch (ATOMstorage(a->data.vtype)) { case TYPE_bte: r = a->data.val.btval; break; case TYPE_sht: r = a->data.val.shval; break; case TYPE_int: r = a->data.val.ival; break; case TYPE_oid: r = a->data.val.oval; break; case TYPE_wrd: r = a->data.val.wval; break; case TYPE_lng: r = a->data.val.lval; break; #ifdef HAVE_HGE case TYPE_hge: r = a->data.val.hval; break; #endif } } return r; }
ValPtr VALinit(ValPtr d, int tpe, const void *s) { if (ATOMextern(tpe) == 0) { d->vtype = tpe; memcpy(&d->val.ival, s, ATOMlen(tpe, s)); } else if (s == 0) { GDKerror("VALinit:unsupported init\n"); d->vtype = TYPE_int; } else if (tpe >= TYPE_str && ATOMstorage(tpe) == TYPE_str) { d->vtype = TYPE_str; d->val.sval = GDKstrdup(s); d->len = strLen(s); } else { d->vtype = tpe; d->len = ATOMlen(tpe, s); d->val.pval = GDKmalloc(d->len); memcpy(d->val.pval, s, d->len); } return d; }
str MKEYhash(Client cntxt, MalBlkPtr mb, MalStkPtr stk, InstrPtr p) { wrd *res; ptr val; int tpe = getArgType(mb,p,1); (void) cntxt; res= getArgReference_wrd(stk,p,0); val= getArgReference(stk,p,1); switch (ATOMstorage(tpe)) { case TYPE_bte: *res = MKEYHASH_bte(val); break; case TYPE_sht: *res = MKEYHASH_sht(val); break; case TYPE_int: case TYPE_flt: *res = MKEYHASH_int(val); break; case TYPE_lng: case TYPE_dbl: *res = MKEYHASH_lng(val); break; #ifdef HAVE_HGE case TYPE_hge: *res = MKEYHASH_hge(val); break; #endif default: if (ATOMextern(tpe)) *res = ATOMhash(tpe, *(ptr*)val); else *res = ATOMhash(tpe, val); break; } return MAL_SUCCEED; }
char * BKCdelete_bun(bat *r, const bat *bid, const oid *h, const void *t) { BAT *b; if ((b = BATdescriptor(*bid)) == NULL) throw(MAL, "bat.delete_bun", RUNTIME_OBJECT_MISSING); if ((b = setaccess(b, BAT_WRITE)) == NULL) throw(MAL, "bat.delete_bun", OPERATION_FAILED); if (b->ttype >= TYPE_str && ATOMstorage(b->ttype) >= TYPE_str) { if (t == 0 || *(str*)t == 0) t = (ptr) str_nil; else t = (ptr) *(str *)t; } if (BUNdel(b, h, t, FALSE) != GDK_SUCCEED) { BBPunfix(b->batCacheid); throw(MAL, "bat.delete_bun", GDK_EXCEPTION); } BBPkeepref(*r = b->batCacheid); return MAL_SUCCEED; }
str BKCappend_val_force_wrap(bat *r, const bat *bid, const void *u, const bit *force) { BAT *b; if ((b = BATdescriptor(*bid)) == NULL) throw(MAL, "bat.append", RUNTIME_OBJECT_MISSING); if ((b = setaccess(b, BAT_WRITE)) == NULL) throw(MAL, "bat.append", OPERATION_FAILED); if (b->ttype >= TYPE_str && ATOMstorage(b->ttype) >= TYPE_str) { if (u == 0 || *(str*)u == 0) u = (ptr) str_nil; else u = (ptr) *(str *)u; } if (BUNappend(b, u, *force) != GDK_SUCCEED) { BBPunfix(b->batCacheid); throw(MAL, "bat.append", GDK_EXCEPTION); } BBPkeepref(*r = b->batCacheid); return MAL_SUCCEED; }
/* * The prime routine for the BAT layer is to create a new hash index. * Its argument is the element type and the maximum number of BUNs be * stored under the hash function. */ BAT * BAThash(BAT *b, BUN masksize) { BAT *o = NULL; lng t0,t1; (void) t0; (void) t1; if (VIEWhparent(b)) { bat p = VIEWhparent(b); o = b; b = BATdescriptor(p); if (!ALIGNsynced(o, b) || BUNfirst(o) != BUNfirst(b)) { BBPunfix(b->batCacheid); b = o; o = NULL; } } MT_lock_set(&GDKhashLock(ABS(b->batCacheid)), "BAThash"); if (b->H->hash == NULL) { unsigned int tpe = ATOMstorage(b->htype); BUN cnt = BATcount(b); BUN mask; BUN p = BUNfirst(b), q = BUNlast(b), r; Hash *h = NULL; Heap *hp = NULL; str nme = BBP_physical(b->batCacheid); BATiter bi = bat_iterator(b); ALGODEBUG fprintf(stderr, "#BAThash: create hash(" BUNFMT ");\n", BATcount(b)); /* cnt = 0, hopefully there is a proper capacity from * which we can derive enough information */ if (!cnt) cnt = BATcapacity(b); if (b->htype == TYPE_void) { if (b->hseqbase == oid_nil) { MT_lock_unset(&GDKhashLock(ABS(b->batCacheid)), "BAThash"); ALGODEBUG fprintf(stderr, "#BAThash: cannot create hash-table on void-NIL column.\n"); return NULL; } ALGODEBUG fprintf(stderr, "#BAThash: creating hash-table on void column..\n"); tpe = TYPE_void; } /* determine hash mask size p = first; then no dynamic * scheme */ if (masksize > 0) { mask = HASHmask(masksize); } else if (ATOMsize(ATOMstorage(tpe)) == 1) { mask = (1 << 8); } else if (ATOMsize(ATOMstorage(tpe)) == 2) { mask = (1 << 12); } else if (b->hkey) { mask = HASHmask(cnt); } else { /* dynamic hash: we start with * HASHmask(cnt/64); if there are too many * collisions we try HASHmask(cnt/16), then * HASHmask(cnt/4), and finally * HASHmask(cnt). */ mask = HASHmask(cnt >> 6); p += (cnt >> 2); /* try out on first 25% of b */ if (p > q) p = q; } if (mask < 1024) mask = 1024; t0 = GDKusec(); do { BUN nslots = mask >> 3; /* 1/8 full is too full */ r = BUNfirst(b); if (hp) { HEAPfree(hp); GDKfree(hp); } if (h) { ALGODEBUG fprintf(stderr, "#BAThash: retry hash construction\n"); GDKfree(h); } /* create the hash structures */ hp = (Heap *) GDKzalloc(sizeof(Heap)); if (hp && (hp->filename = GDKmalloc(strlen(nme) + 12)) != NULL) sprintf(hp->filename, "%s.%chash", nme, b->batCacheid > 0 ? 'h' : 't'); if (hp == NULL || hp->filename == NULL || (h = HASHnew(hp, ATOMtype(b->htype), BATcapacity(b), mask)) == NULL) { MT_lock_unset(&GDKhashLock(ABS(b->batCacheid)), "BAThash"); if (hp != NULL) { GDKfree(hp->filename); GDKfree(hp); } return NULL; } switch (tpe) { case TYPE_bte: starthash(bte); break; case TYPE_sht: starthash(sht); break; case TYPE_int: case TYPE_flt: starthash(int); break; case TYPE_dbl: case TYPE_lng: starthash(lng); break; default: for (; r < p; r++) { ptr v = BUNhead(bi, r); BUN c = (BUN) heap_hash_any(b->H->vheap, h, v); if ( HASHget(h,c) == HASHnil(h) && nslots-- == 0) break; /* mask too full */ HASHputlink(h,r, HASHget(h,c)); HASHput(h,c, r); } break; } } while (r < p && mask < cnt && (mask <<= 2)); /* finish the hashtable with the current mask */ p = r; switch (tpe) { case TYPE_bte: finishhash(bte); break; case TYPE_sht: finishhash(sht); break; case TYPE_int: case TYPE_flt: finishhash(int); break; case TYPE_dbl: case TYPE_lng: finishhash(lng); break; default: for (; p < q; p++) { ptr v = BUNhead(bi, p); BUN c = (BUN) heap_hash_any(b->H->vheap, h, v); HASHputlink(h,p, HASHget(h,c)); HASHput(h,c,p); } break; } b->H->hash = h; t1 = GDKusec(); ALGODEBUG fprintf(stderr, "#BAThash: hash construction "LLFMT" usec\n", t1-t0); ALGODEBUG HASHcollisions(b,b->H->hash); }
str BKCreuseBAT(bat *ret, const bat *bid, const bat *did) { BAT *b, *d, *bn, *bs; oid oidx = 0, bidx, *o, *ol; gdk_return res; if ((b = BATdescriptor(*bid)) == NULL) { throw(MAL, "bat.reuse", RUNTIME_OBJECT_MISSING); } if ( b->htype != TYPE_void) { BBPunfix(b->batCacheid); throw(MAL, "bat.reuse", SEMANTIC_TYPE_MISMATCH); } if ((d = BATdescriptor(*did)) == NULL) { BBPunfix(b->batCacheid); throw(MAL, "bat.reuse", RUNTIME_OBJECT_MISSING); } bn= BATnew(b->htype, b->ttype, BATcount(b) - BATcount(d), TRANSIENT); if (bn == NULL) { BBPunfix(b->batCacheid); BBPunfix(d->batCacheid); throw(MAL, "bat.reuse", MAL_MALLOC_FAIL ); } res = BATsubsort(&bs, NULL, NULL, d, NULL, NULL, 0, 0); BBPunfix(d->batCacheid); if (res != GDK_SUCCEED) { BBPunfix(b->batCacheid); BBPunfix(bn->batCacheid); throw(MAL, "bat.reuse", MAL_MALLOC_FAIL ); } oidx = b->hseqbase; bidx = oidx + BATcount(b)-1; o = (oid*)Tloc(bs, BUNfirst(bs)); ol= (oid*)Tloc(bs, BUNlast(bs)); switch(ATOMstorage(b->ttype) ){ case TYPE_bte: reuseloop(bte); break; case TYPE_sht: reuseloop(sht); break; case TYPE_int: reuseloop(int); break; case TYPE_lng: reuseloop(lng); break; #ifdef HAVE_HGE case TYPE_hge: reuseloop(hge); break; #endif case TYPE_flt: reuseloop(flt); break; case TYPE_dbl: reuseloop(dbl); break; case TYPE_oid: reuseloop(oid); break; case TYPE_str: /* to be done based on its index width */ default: if (ATOMvarsized(bn->ttype)) { BUN p = BUNfirst(b); BUN q = BUNlast(b); BATiter bi = bat_iterator(b); for (;p<q; oidx++, p++) { if ( *o == oidx ){ while ( ol > o && ol[-1] == bidx) { bidx--; q--; ol--; } BUNappend(bn, BUNtail(bi, --q), FALSE); o += (o < ol); bidx--; } else { BUNappend(bn, BUNtail(bi, p), FALSE); } } } else { switch( b->T->width){ case 1:reuseloop(bte); break; case 2:reuseloop(sht); break; case 4:reuseloop(int); break; case 8:reuseloop(lng); break; #ifdef HAVE_HGE case 16:reuseloop(hge); break; #endif default: throw(MAL, "bat.shrink", "Illegal argument type"); } } } BATsetcount(bn, BATcount(b) - BATcount(bs)); BATseqbase(bn, b->hseqbase); bn->tsorted = 0; bn->trevsorted = 0; bn->tdense = 0; bn->tkey = b->tkey; if (!(bn->batDirty&2)) BATsetaccess(bn, BAT_READ); BBPunfix(b->batCacheid); BBPunfix(bs->batCacheid); BBPkeepref(*ret= bn->batCacheid); return MAL_SUCCEED; }
str MKEYbathash(bat *res, const bat *bid) { BAT *b, *dst; wrd *r; BUN n; if ((b = BATdescriptor(*bid)) == NULL) throw(SQL, "mkey.bathash", RUNTIME_OBJECT_MISSING); assert(BAThvoid(b) || BAThrestricted(b)); n = BATcount(b); dst = BATnew(TYPE_void, TYPE_wrd, n, TRANSIENT); if (dst == NULL) { BBPunfix(b->batCacheid); throw(SQL, "mkey.bathash", MAL_MALLOC_FAIL); } BATseqbase(dst, b->hseqbase); BATsetcount(dst, n); r = (wrd *) Tloc(dst, BUNfirst(dst)); switch (ATOMstorage(b->ttype)) { case TYPE_void: { oid o = b->tseqbase; if (o == oid_nil) while (n-- > 0) *r++ = wrd_nil; else while (n-- > 0) *r++ = (wrd) o++; break; } case TYPE_bte: { bte *v = (bte *) Tloc(b, BUNfirst(b)); while (n-- > 0) { *r++ = MKEYHASH_bte(v); v++; } break; } case TYPE_sht: { sht *v = (sht *) Tloc(b, BUNfirst(b)); while (n-- > 0) { *r++ = MKEYHASH_sht(v); v++; } break; } case TYPE_int: case TYPE_flt: { int *v = (int *) Tloc(b, BUNfirst(b)); while (n-- > 0) { *r++ = MKEYHASH_int(v); v++; } break; } case TYPE_lng: case TYPE_dbl: { lng *v = (lng *) Tloc(b, BUNfirst(b)); while (n-- > 0) { *r++ = MKEYHASH_lng(v); v++; } break; } #ifdef HAVE_HGE case TYPE_hge: { hge *v = (hge *) Tloc(b, BUNfirst(b)); while (n-- > 0) { *r++ = MKEYHASH_hge(v); v++; } break; } #endif default: { BATiter bi = bat_iterator(b); BUN (*hash)(const void *) = BATatoms[b->ttype].atomHash; int (*cmp)(const void *, const void *) = ATOMcompare(b->ttype); void *nil = ATOMnilptr(b->ttype); BUN i; const void *v; BATloop(b, i, n) { v = BUNtail(bi, i); if ((*cmp)(v, nil) == 0) *r++ = wrd_nil; else *r++ = (wrd) (*hash)(v); } break; } }
static str MATsort(Client cntxt, MalBlkPtr mb, MalStkPtr stk, InstrPtr pci, int rev) { bat *res_id = (bat*) getArgReference(stk,pci,0); /* result sorted */ bat *map_id = (bat*) getArgReference(stk,pci,1); /* result map */ BAT *res = NULL, *map = NULL; /* rest of the args are sorted parts, (excluding sorted and map) */ BAT **bats = GDKzalloc(sizeof(BAT*) * pci->argc - 2); BUN pcnt = 0; int i, len = pci->argc-2; (void) cntxt; (void) mb; (void) stk; if( bats == NULL) throw(SQL, "mat.sortTail",MAL_MALLOC_FAIL); for (i=2; i<pci->argc; i++) { bat id = *(bat*) getArgReference(stk,pci,i); bats[i-2] = BATdescriptor(id); if (!bats[i-2]) goto error; pcnt += BATcount(bats[i-2]); } if (ATOMstorage(bats[0]->ttype) <= TYPE_void) { /*error*/ } else if (ATOMvarsized(bats[0]->ttype)) { res = MATsort_any(&map, bats, len, pcnt, rev); } else if (ATOMsize(bats[0]->ttype) == sizeof(bte)) { res = MATsort_bte(&map, bats, len, pcnt, rev); } else if (ATOMsize(bats[0]->ttype) == sizeof(sht)) { res = MATsort_sht(&map, bats, len, pcnt, rev); } else if (ATOMsize(bats[0]->ttype) == sizeof(int)) { res = MATsort_int(&map, bats, len, pcnt, rev); } else if (ATOMsize(bats[0]->ttype) == sizeof(lng)) { res = MATsort_lng(&map, bats, len, pcnt, rev); #ifdef HAVE_HGE } else if (ATOMsize(bats[0]->ttype) == sizeof(hge)) { res = MATsort_hge(&map, bats, len, pcnt, rev); #endif } else { res = MATsort_any(&map, bats, len, pcnt, rev); } if (res) { res->T->nonil = MATnonil(bats, len); if (rev) { res->trevsorted = 1; res->tsorted = res->batCount <= 1; } else { res->tsorted = 1; res->trevsorted = res->batCount <= 1; } } error: for (i=0; i<len && bats[i]; i++) BBPunfix(bats[i]->batCacheid); GDKfree(bats); if (map && res) { map->tsorted = 0; map->trevsorted = 0; BBPkeepref( *map_id = map->batCacheid); BBPkeepref( *res_id = res->batCacheid); return MAL_SUCCEED; } if (map) BBPunfix(map->batCacheid); throw(SQL, "mat.sortTail","Cannot access descriptor"); }