Exemple #1
0
// Called when writes encounter a "doomed" record, to delete the doomed record
// and create a new one in place without giving up the record lock.
// FIXME - won't be able to "rescue" with future sindex method - will go away.
void
as_record_rescue(as_index_ref *r_ref, as_namespace *ns)
{
	record_delete_adjust_sindex(r_ref->r, ns);
	as_record_destroy(r_ref->r, ns);
	as_index_clear_record_info(r_ref->r);
	cf_atomic64_incr(&ns->n_objects);
}
Exemple #2
0
// If there's an element with specified digest in the tree, return a locked
// and reserved reference to it in index_ref. If not, create an element with
// this digest, insert it into the tree, and return a locked and reserved
// reference to it in index_ref.
//
// Returns:
//		 1 - created and inserted (reference returned in index_ref)
//		 0 - found already existing (reference returned in index_ref)
//		-1 - error (index_ref untouched)
int
as_index_get_insert_vlock(as_index_tree *tree, cf_digest *keyd,
		as_index_ref *index_ref)
{
	int cmp = 0;
	bool retry;

	// Save parents as we search for the specified element's insertion point.
	as_index_ele eles[64];
	as_index_ele *ele;

	do {
		ele = eles;

		pthread_mutex_lock(&tree->lock);

		// Search for the specified element, or a parent to insert it under.

		ele->parent = NULL; // we'll never look this far up
		ele->me_h = tree->root_h;
		ele->me = tree->root;

		cf_arenax_handle t_h = tree->root->left_h;
		as_index *t = RESOLVE_H(t_h);

		while (t_h != tree->sentinel_h) {
			ele++;
			ele->parent = ele - 1;
			ele->me_h = t_h;
			ele->me = t;

			if ((cmp = cf_digest_compare(keyd, &t->key)) == 0) {
				// The element already exists, simply return it.

				as_index_reserve(t);
				cf_atomic_int_incr(&g_config.global_record_ref_count);

				pthread_mutex_unlock(&tree->lock);

				if (! index_ref->skip_lock) {
					olock_vlock(g_config.record_locks, keyd, &index_ref->olock);
					cf_atomic_int_incr(&g_config.global_record_lock_count);
				}

				index_ref->r = t;
				index_ref->r_h = t_h;

				return 0;
			}

			t_h = cmp > 0 ? t->left_h : t->right_h;
			t = RESOLVE_H(t_h);
		}

		// We didn't find the tree element, so we'll be inserting it.

		retry = false;

		if (EBUSY == pthread_mutex_trylock(&tree->reduce_lock)) {
			// The tree is being reduced - could take long, unlock so reads and
			// overwrites aren't blocked.
			pthread_mutex_unlock(&tree->lock);

			// Wait until the tree reduce is done...
			pthread_mutex_lock(&tree->reduce_lock);
			pthread_mutex_unlock(&tree->reduce_lock);

			// ... and start over - we unlocked, so the tree may have changed.
			retry = true;
		}
	} while (retry);

	// Create a new element and insert it.

	// Make the new element.
	cf_arenax_handle n_h = cf_arenax_alloc(tree->arena);

	if (n_h == 0) {
		cf_warning(AS_INDEX, "arenax alloc failed");
		pthread_mutex_unlock(&tree->reduce_lock);
		pthread_mutex_unlock(&tree->lock);
		return -1;
	}

	as_index *n = RESOLVE_H(n_h);

	n->rc = 2; // one for create (eventually balanced by delete), one for caller
	cf_atomic_int_add(&g_config.global_record_ref_count, 2);

	n->key = *keyd;

	n->left_h = n->right_h = tree->sentinel_h; // n starts as a leaf element
	n->color = AS_RED; // n's color starts as red

	// Make sure we can detect that the record isn't initialized.
	as_index_clear_record_info(n);

	// Insert the new element n under parent ele.
	if (ele->me == tree->root || 0 < cmp) {
		ele->me->left_h = n_h;
	}
	else {
		ele->me->right_h = n_h;
	}

	ele++;
	ele->parent = ele - 1;
	ele->me_h = n_h;
	ele->me = n;

	// Rebalance the tree as needed.
	as_index_insert_rebalance(tree, ele);

	tree->elements++;

	pthread_mutex_unlock(&tree->reduce_lock);
	pthread_mutex_unlock(&tree->lock);

	if (! index_ref->skip_lock) {
		olock_vlock(g_config.record_locks, keyd, &index_ref->olock);
		cf_atomic_int_incr(&g_config.global_record_lock_count);
	}

	index_ref->r = n;
	index_ref->r_h = n_h;

	return 1;
}