void rtree_delete(rtree_t *rtree) { rtree_delete_subtree(rtree, rtree->root, 0); rtree->dalloc(rtree); }
static void rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree, unsigned level) { size_t nchildren = ZU(1) << rtree_levels[level].bits; if (level + 2 < RTREE_HEIGHT) { for (size_t i = 0; i < nchildren; i++) { rtree_node_elm_t *node = (rtree_node_elm_t *)atomic_load_p(&subtree[i].child, ATOMIC_RELAXED); if (node != NULL) { rtree_delete_subtree(tsdn, rtree, node, level + 1); } } } else { for (size_t i = 0; i < nchildren; i++) { rtree_leaf_elm_t *leaf = (rtree_leaf_elm_t *)atomic_load_p(&subtree[i].child, ATOMIC_RELAXED); if (leaf != NULL) { rtree_leaf_dalloc(tsdn, rtree, leaf); } } } if (subtree != rtree->root) { rtree_node_dalloc(tsdn, rtree, subtree); } }
void rtree_delete(rtree_t *rtree) { rtree_delete_subtree(rtree, rtree->root, 0); if (rtree->dalloc) rtree->dalloc(rtree->pool, rtree); malloc_mutex_destroy(&rtree->mutex); }
static void rtree_delete_subtree(rtree_t *rtree, void **node, unsigned level) { if (level < rtree->height - 1) { size_t nchildren, i; nchildren = ZU(1) << rtree->level2bits[level]; for (i = 0; i < nchildren; i++) { void **child = (void **)node[i]; if (child != NULL) rtree_delete_subtree(rtree, child, level + 1); } } rtree->dalloc(node); }
void rtree_delete(tsdn_t *tsdn, rtree_t *rtree) { # if RTREE_HEIGHT > 1 rtree_delete_subtree(tsdn, rtree, rtree->root, 0); # endif }