Esempio n. 1
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, as_sindex_key *skey, void *val)
{
	int ret = AS_SINDEX_OK;
	uint64_t uk = * (uint64_t *) val;
	uint64_t bv = skey->b[0].u.i64;

	if (!pimd->ibtr) {
		SITRACE(imd->si, DML, debug, "AI_BTREE_FAIL: Delete failed no ibtr %d %lu", bv, uk);
		return AS_SINDEX_KEY_NOTFOUND;
	}
	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, &skey->b[0].digest);
	}
	else {
		init_ai_objLong(&ncol, skey->b[0].u.i64);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, (cf_digest *)val);
	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	as_sindex_release_data_memory(imd, (bb - ab));
	SITRACE(imd->si, DML, debug, "ai__btree_delete(N): key: %d - %lu", bv, uk);
	return ret;
}
Esempio n. 2
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void * skey, cf_digest * value)
{
	int ret = AS_SINDEX_OK;

	if (!pimd->ibtr) {
		return AS_SINDEX_KEY_NOTFOUND;
	}

	ai_obj ncol;
	if (C_IS_DG(imd->sktype)) {
		init_ai_objFromDigest(&ncol, (cf_digest *)skey);
	}
	else {
		// TODO - ai_obj type is LONG for both Geo and Long
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);

	uint64_t before = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	uint64_t after = pimd->ibtr->msize + pimd->ibtr->nsize;
	cf_atomic64_sub(&imd->si->ns->n_bytes_sindex_memory, (before - after));

	return ret;
}
Esempio n. 3
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void *skey, cf_digest *value)
{
	ai_obj ncol;
	if (C_IS_DG(imd->sktype)) {
		init_ai_objFromDigest(&ncol, (cf_digest*)skey);
	}
	else {
		// TODO - ai_obj type is LONG for both Geo and Long
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);


	uint64_t before = pimd->ibtr->msize + pimd->ibtr->nsize;
	int ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_DIGEST);
	uint64_t after = pimd->ibtr->msize + pimd->ibtr->nsize;
	cf_atomic64_add(&imd->si->ns->n_bytes_sindex_memory, (after - before));

	if (ret && ret != AS_SINDEX_KEY_FOUND) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		return AS_SINDEX_ERR_NO_MEMORY;
	}
	return ret;
}
Esempio n. 4
0
int
ai_btree_delete(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void * skey, cf_digest * value)
{
	int ret = AS_SINDEX_OK;

	if (!pimd->ibtr) {
		return AS_SINDEX_KEY_NOTFOUND;
	}

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, (cf_digest *)skey);
	}
	else {
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);
	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iRem(pimd->ibtr, &ncol, &apk);
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	as_sindex_release_data_memory(imd, (bb - ab));
	return ret;
}
Esempio n. 5
0
/*
 * Deletes the digest as in the passed in as gc_list, bound by n2del number of
 * elements per iteration, with *deleted successful deletes.
 */
bool
ai_btree_defrag_list(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, cf_ll *gc_list, ulong n2del, ulong *deleted)
{
	// If n2del is zero here, that means caller do not want to defrag
	if (n2del == 0 ) {
		return false;
	}
	ulong success = 0;
	as_namespace *ns = imd->si->ns;
	// STEP 3: go thru the PKtoDeleteList and delete the keys
	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	uint64_t validation_time_ns = 0;
	uint64_t deletion_time_ns   = 0;
	while (cf_ll_size(gc_list)) {
		cf_ll_element        * ele  = cf_ll_get_head(gc_list);
		ll_sindex_gc_element * node = (ll_sindex_gc_element * )ele;
		objs_to_defrag_arr   * dt   = node->objs_to_defrag;

		// check before deleting. The digest may re-appear after the list
		// creation and before deletion from the secondary index

		int i = 0;
		while (dt->num != 0) {
			i = dt->num - 1;
			SET_TIME_FOR_SINDEX_GC_HIST(validation_time_ns);
			int ret = as_sindex_can_defrag_record(ns, &(dt->acol_digs[i].dig));
			SINDEX_GC_HIST_INSERT_DATA_POINT(sindex_gc_validate_obj_hist, validation_time_ns);
			validation_time_ns = 0;
			if (ret == AS_SINDEX_GC_SKIP_ITERATION) {
				goto END;
			} else if (ret == AS_SINDEX_GC_OK) {
				ai_obj           apk;
				init_ai_objFromDigest(&apk, &(dt->acol_digs[i].dig));
				ai_obj          *acol = &(dt->acol_digs[i].acol);
				cf_detail(AS_SINDEX, "Defragged %lu %ld", acol->l, *((uint64_t *)&apk.y));
				
				SET_TIME_FOR_SINDEX_GC_HIST(deletion_time_ns);
				if (reduced_iRem(pimd->ibtr, acol, &apk) == AS_SINDEX_OK) {
					success++;
					SINDEX_GC_HIST_INSERT_DATA_POINT(sindex_gc_delete_obj_hist, deletion_time_ns);
				}
				deletion_time_ns = 0;
			}
			dt->num -= 1;
			n2del--;
			if (n2del == 0) {
				goto END;
			}
		}
		cf_ll_delete(gc_list, (cf_ll_element*)node);
	}

END:
	as_sindex_release_data_memory(imd, (bb -  pimd->ibtr->msize - pimd->ibtr->nsize));
	*deleted += success;
	return cf_ll_size(gc_list) ? true : false;
}
Esempio n. 6
0
static void
ai_arr_move_to_tree(ai_arr *arr, bt *nbtr)
{
	for (int i = 0; i < arr->used; i++) {
		ai_obj apk;
		init_ai_objFromDigest(&apk, (cf_digest *)&arr->data[i * CF_DIGEST_KEY_SZ]);
		if (!btIndNodeAdd(nbtr, &apk)) {
			// what to do ??
			continue;
		}
	}
}
Esempio n. 7
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, as_sindex_key *skey, void *value)
{
	int ret = AS_SINDEX_OK;
	uint64_t uk = *(uint64_t *)value;
	cf_digest *keyd = (cf_digest *)value;

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, &skey->b[0].digest);
	}
	else {
		init_ai_objLong(&ncol, skey->b[0].u.i64);
	}
	ai_obj apk;
	init_ai_objFromDigest(&apk, keyd);

	cf_detail(AS_SINDEX, "Insert: %ld %ld %ld", *(uint64_t *) &ncol.y, *(uint64_t *) &skey->b[0].digest, *((uint64_t *) &apk.y));

	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_U160);
	if (ret == AS_SINDEX_KEY_FOUND) {
		goto END;
	} else if (ret != AS_SINDEX_OK) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	if (!as_sindex_reserve_data_memory(imd, (ab - bb))) {
		reduced_iRem(pimd->ibtr, &ncol, &apk);
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	SITRACE(imd->si, DML, debug, "ai__btree_insert(N): %s key: %d val %lu", imd->iname, skey->b[0].u.i64, uk);

END:

	return ret;
}
Esempio n. 8
0
int
ai_btree_put(as_sindex_metadata *imd, as_sindex_pmetadata *pimd, void *skey, cf_digest *value)
{
	int ret = AS_SINDEX_OK;

	ai_obj ncol;
	if (C_IS_Y(imd->dtype)) {
		init_ai_objFromDigest(&ncol, (cf_digest*)skey);
	}
	else {
		init_ai_objLong(&ncol, *(ulong *)skey);
	}

	ai_obj apk;
	init_ai_objFromDigest(&apk, value);


	ulong bb = pimd->ibtr->msize + pimd->ibtr->nsize;
	ret = reduced_iAdd(pimd->ibtr, &ncol, &apk, COL_TYPE_U160);
	if (ret == AS_SINDEX_KEY_FOUND) {
		goto END;
	} else if (ret != AS_SINDEX_OK) {
		cf_warning(AS_SINDEX, "Insert into the btree failed");
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}
	ulong ab = pimd->ibtr->msize + pimd->ibtr->nsize;
	if (!as_sindex_reserve_data_memory(imd, (ab - bb))) {
		reduced_iRem(pimd->ibtr, &ncol, &apk);
		ret = AS_SINDEX_ERR_NO_MEMORY;
		goto END;
	}

END:

	return ret;
}
Esempio n. 9
0
/*
 * Return 0 in case of success
 *       -1 in case of failure
 */
static int
add_recs_from_nbtr(as_sindex_metadata *imd, ai_obj *ikey, bt *nbtr, as_sindex_qctx *qctx, bool fullrng)
{
	int ret = 0;
	ai_obj sfk, efk;
	init_ai_obj(&sfk);
	init_ai_obj(&efk);
	btSIter *nbi;
	btEntry *nbe;
	btSIter stack_nbi;

	if (fullrng) {
		nbi = btSetFullRangeIter(&stack_nbi, nbtr, 1, NULL);
	} else { // search from LAST batches end-point
		init_ai_objFromDigest(&sfk, &qctx->bdig);
		assignMaxKey(nbtr, &efk);
		nbi = btSetRangeIter(&stack_nbi, nbtr, &sfk, &efk, 1);
	}
 	if (nbi) {
		while ((nbe = btRangeNext(nbi, 1))) {
			ai_obj *akey = nbe->key;
			// FIRST can be REPEAT (last batch)
			if (!fullrng && ai_objEQ(&sfk, akey)) {
				continue;
			}
			if (btree_addsinglerec(imd, ikey, (cf_digest *)&akey->y, qctx->recl, &qctx->n_bdigs,
									qctx->can_partition_query, qctx->partitions_pre_reserved)) {
				ret = -1;
				break;
			}
			if (qctx->n_bdigs == qctx->bsize) {
				if (ikey) {
					ai_objClone(qctx->bkey, ikey);
				}
				cloneDigestFromai_obj(&qctx->bdig, akey);
				break;
			}
		}
		btReleaseRangeIterator(nbi);
	} else {
		cf_warning(AS_QUERY, "Could not find nbtr iterator.. skipping !!");
	}
	return ret;
}
Esempio n. 10
0
int
ai_btree_query(as_sindex_metadata *imd, as_sindex_range *srange, as_sindex_qctx *qctx)
{
	bool err = 1;
	if (!srange->isrange) { // EQUALITY LOOKUP
		ai_obj afk;
		if (C_IS_Y(imd->dtype)) {
			init_ai_objFromDigest(&afk, &srange->start.digest);
		}
		else {
			init_ai_objLong(&afk, srange->start.u.i64);
		}
		err = get_recl(imd, &afk, qctx);
	} else {                // RANGE LOOKUP
		err = get_numeric_range_recl(imd, srange->start.u.i64, srange->end.u.i64, qctx);
	}
	return (err ? AS_SINDEX_ERR_NO_MEMORY :
			(qctx->n_bdigs >= qctx->bsize) ? AS_SINDEX_CONTINUE : AS_SINDEX_OK);
}