Esempio n. 1
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, as_sindex_key *skey, void *val)
{
	int ret = AS_SINDEX_OK;
	uint64_t uk = * (uint64_t *) val;
	uint64_t bv = skey->b[0].u.i64;

	if (!pimd->ibtr) {
		SITRACE(imd->si, DML, debug, "AI_BTREE_FAIL: Delete failed no ibtr %d %lu", bv, uk);
		return AS_SINDEX_KEY_NOTFOUND;
	}
	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, &skey->b[0].digest);
	}
	else {
		init_ai_objLong(&ncol, skey->b[0].u.i64);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, (cf_digest *)val);
	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	as_sindex_release_data_memory(imd, (bb - ab));
	SITRACE(imd->si, DML, debug, "ai__btree_delete(N): key: %d - %lu", bv, uk);
	return ret;
}
Esempio n. 2
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void * skey, cf_digest * value)
{
	int ret = AS_SINDEX_OK;

	if (!pimd->ibtr) {
		return AS_SINDEX_KEY_NOTFOUND;
	}

	ai_obj ncol;
	if (C_IS_DG(imd->sktype)) {
		init_ai_objFromDigest(&ncol, (cf_digest *)skey);
	}
	else {
		// TODO - ai_obj type is LONG for both Geo and Long
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);

	uint64_t before = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	uint64_t after = pimd->ibtr->msize + pimd->ibtr->nsize;
	cf_atomic64_sub(&imd->si->ns->n_bytes_sindex_memory, (before - after));

	return ret;
}
Esempio n. 3
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void *skey, cf_digest *value)
{
	ai_obj ncol;
	if (C_IS_DG(imd->sktype)) {
		init_ai_objFromDigest(&ncol, (cf_digest*)skey);
	}
	else {
		// TODO - ai_obj type is LONG for both Geo and Long
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);


	uint64_t before = pimd->ibtr->msize + pimd->ibtr->nsize;
	int ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_DIGEST);
	uint64_t after = pimd->ibtr->msize + pimd->ibtr->nsize;
	cf_atomic64_add(&imd->si->ns->n_bytes_sindex_memory, (after - before));

	if (ret && ret != AS_SINDEX_KEY_FOUND) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		return AS_SINDEX_ERR_NO_MEMORY;
	}
	return ret;
}
Esempio n. 4
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void * skey, cf_digest * value)
{
	int ret = AS_SINDEX_OK;

	if (!pimd->ibtr) {
		return AS_SINDEX_KEY_NOTFOUND;
	}

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, (cf_digest *)skey);
	}
	else {
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);
	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	as_sindex_release_data_memory(imd, (bb - ab));
	return ret;
}
Esempio n. 5
0
int
ai_btree_query(as_sindex_metadata *imd, as_sindex_range *srange, as_sindex_qctx *qctx)
{
	bool err = 1;
	if (!srange->isrange) { // EQUALITY LOOKUP
		ai_obj afk;
		if (C_IS_Y(imd->dtype)) {
			init_ai_objFromDigest(&afk, &srange->start.digest);
		}
		else {
			init_ai_objLong(&afk, srange->start.u.i64);
		}
		err = get_recl(imd, &afk, qctx);
	} else {                // RANGE LOOKUP
		err = get_numeric_range_recl(imd, srange->start.u.i64, srange->end.u.i64, qctx);
	}
	return (err ? AS_SINDEX_ERR_NO_MEMORY :
			(qctx->n_bdigs >= qctx->bsize) ? AS_SINDEX_CONTINUE : AS_SINDEX_OK);
}
Esempio n. 6
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, as_sindex_key *skey, void *value)
{
	int ret = AS_SINDEX_OK;
	uint64_t uk = *(uint64_t *)value;
	cf_digest *keyd = (cf_digest *)value;

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, &skey->b[0].digest);
	}
	else {
		init_ai_objLong(&ncol, skey->b[0].u.i64);
	}
	ai_obj apk;
	init_ai_objFromDigest(&apk, keyd);

	cf_detail(AS_SINDEX, "Insert: %ld %ld %ld", *(uint64_t *) &ncol.y, *(uint64_t *) &skey->b[0].digest, *((uint64_t *) &apk.y));

	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_U160);
	if (ret == AS_SINDEX_KEY_FOUND) {
		goto END;
	} else if (ret != AS_SINDEX_OK) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	if (!as_sindex_reserve_data_memory(imd, (ab - bb))) {
		reduced_iRem(pimd->ibtr, &ncol, &apk);
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	SITRACE(imd->si, DML, debug, "ai__btree_insert(N): %s key: %d val %lu", imd->iname, skey->b[0].u.i64, uk);

END:

	return ret;
}
Esempio n. 7
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void *skey, cf_digest *value)
{
	int ret = AS_SINDEX_OK;

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, (cf_digest*)skey);
	}
	else {
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);


	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_U160);
	if (ret == AS_SINDEX_KEY_FOUND) {
		goto END;
	} else if (ret != AS_SINDEX_OK) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	if (!as_sindex_reserve_data_memory(imd, (ab - bb))) {
		reduced_iRem(pimd->ibtr, &ncol, &apk);
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}

END:

	return ret;
}
Esempio n. 8
0
/*
 * Return 0  in case of success
 *        -1 in case of failure
 */
static int
get_numeric_range_recl(as_sindex_metadata *imd, uint64_t begk, uint64_t endk, as_sindex_qctx *qctx)
{
	ai_obj sfk;
	init_ai_objLong(&sfk, qctx->new_ibtr ? begk : qctx->bkey->l);
	ai_obj efk;
	init_ai_objLong(&efk, endk);
	as_sindex_pmetadata *pimd = &imd->pimd[qctx->pimd_idx];
	bool fullrng              = qctx->new_ibtr;
	int ret                   = 0;
	btSIter *bi               = btGetRangeIter(pimd->ibtr, &sfk, &efk, 1);
	btEntry *be;

	if (bi) {
		while ((be = btRangeNext(bi, 1))) {
			ai_obj  *ikey  = be->key;
			ai_nbtr *anbtr = be->val;

			if (!anbtr) {
				ret = -1;
				break;
			}

			// figure out nbtr to deal with. If the key which was
			// used last time vanishes work with next key. If the
			// key exist but 'last' entry made to list in the last
			// iteration; Move to next nbtr
			if (!fullrng) {
				if (!ai_objEQ(&sfk, ikey)) {
					fullrng = 1; // bkey disappeared
				} else if (qctx->nbtr_done) {
					qctx->nbtr_done = false;
					// If we are moving to the next key, we need 
					// to search the full range.
					fullrng = 1;
					continue;
				}
			}

			if (anbtr->is_btree) {
				if (add_recs_from_nbtr(imd, ikey, anbtr->u.nbtr, qctx, fullrng)) {
					ret = -1;
					break;
				}
			} else {
				if (add_recs_from_arr(imd, ikey, anbtr->u.arr, qctx)) {
					ret = -1;
					break;
				}
			}

			// Since add_recs_from_arr() returns entire thing and do not support the batch limit,
			// >= operator is needed here.
			if (qctx->n_bdigs >= qctx->bsize) {
				break;
			}

			// If it reaches here, this means last key could not fill the batch.
			// So if we are to start a new key, search should be done on full range 
			// and the new nbtr is obviously not done.
			fullrng         = 1;
			qctx->nbtr_done = false;
		}
		btReleaseRangeIterator(bi);
	}
	return ret;
}