/**
 * Compute the union of 'number' bitmaps using a heap. This can
 * sometimes be faster than roaring_bitmap_or_many which uses
 * a naive algorithm. Caller is responsible for freeing the
 * result.
 */
roaring_bitmap_t *roaring_bitmap_or_many_heap(uint32_t number,
		const roaring_bitmap_t **x) {
	if (number == 0) {
		return roaring_bitmap_create();
	}
	if (number == 1) {
		return roaring_bitmap_copy(x[0]);
	}
	roaring_pq_t *pq = create_pq(x, number);
	while (pq->size > 1) {
		roaring_pq_element_t x1 = pq_poll(pq);
		roaring_pq_element_t x2 = pq_poll(pq);

		if (x1.is_temporary && x2.is_temporary) {
                        roaring_bitmap_t *newb =lazy_or_from_lazy_inputs(x1.bitmap,
					x2.bitmap);
			uint64_t bsize = roaring_bitmap_portable_size_in_bytes(newb);
			roaring_pq_element_t newelement = { .size = bsize, .is_temporary =
					true, .bitmap = newb };
			pq_add(pq, &newelement);
		} else if (x2.is_temporary) {
                        roaring_bitmap_lazy_or_inplace(x2.bitmap, x1.bitmap);
			x2.size = roaring_bitmap_portable_size_in_bytes(x2.bitmap);
			pq_add(pq, &x2);
		} else if (x1.is_temporary) {
			roaring_bitmap_lazy_or_inplace(x1.bitmap, x2.bitmap);
			x1.size = roaring_bitmap_portable_size_in_bytes(x1.bitmap);

			pq_add(pq, &x1);
		} else {
		  roaring_bitmap_t *newb = roaring_bitmap_lazy_or(x1.bitmap,
					x2.bitmap);
			uint64_t bsize = roaring_bitmap_portable_size_in_bytes(newb);
			roaring_pq_element_t newelement = { .size = bsize, .is_temporary =
					true, .bitmap = newb };

			pq_add(pq, &newelement);
		}

	}
	roaring_pq_element_t X = pq_poll(pq);
	roaring_bitmap_t *answer = X.bitmap;
	roaring_bitmap_repair_after_lazy(answer);
	pq_free(pq);
	return answer;
}
/**
 * Compute the union of 'number' bitmaps using a heap. This can
 * sometimes be faster than roaring_bitmap_or_many which uses
 * a naive algorithm. Caller is responsible for freeing the
 * result.
 */
roaring_bitmap_t *roaring_bitmap_or_many_heap(uint32_t number,
                                              const roaring_bitmap_t **x) {
    if (number == 0) {
        return roaring_bitmap_create();
    }
    if (number == 1) {
        return roaring_bitmap_copy(x[0]);
    }
    roaring_pq_t *pq = create_pq(x, number);
    while (pq->size > 1) {
        roaring_pq_element_t x1 = pq_poll(pq);
        roaring_pq_element_t x2 = pq_poll(pq);

        if (x1.is_temporary && x2.is_temporary) {
            roaring_bitmap_t *newb =
                lazy_or_from_lazy_inputs(x1.bitmap, x2.bitmap);
            // should normally return a fresh new bitmap *except* that
            // it can return x1.bitmap or x2.bitmap in degenerate cases
            bool temporary = !((newb == x1.bitmap) && (newb == x2.bitmap));
            uint64_t bsize = roaring_bitmap_portable_size_in_bytes(newb);
            roaring_pq_element_t newelement = {
                .size = bsize, .is_temporary = temporary, .bitmap = newb};
            pq_add(pq, &newelement);
        } else if (x2.is_temporary) {
Example #3
0
/**
 * Repeatedly merge the closest pair of the given markers until they don't overlap.
 *
 * The centroid rule is used for merging.  I.e. each marker's area represents
 * a population of points. Merging two markers removes the originals and creates a
 * new marker with area the sum of the originals and center at the average position
 * of the two merged populations.
 *
 * The markers array must include extra buffer space.  If there are n markers, the
 * array must have 2n-1 spaces, with only the first n initialized.
 *
 * The number of markers after merging is returned. Zero or more of these will be
 * marked deleted_p and should be ignored.
 *
 * This algorithm is O(n k log n), where k is the maximum number of simultaneously
 * overlapping markers in the original, unmerged set.
 */
int merge_markers_fast(MARKER_INFO *info, MARKER *markers, int n_markers) {
    int augmented_length = 2 * n_markers - 1;
    NewArrayDecl(int, n_nghbr, augmented_length);
    NewArrayDecl(MARKER_DISTANCE, mindist, augmented_length);
    NewArrayDecl(int, inv_nghbr_head, augmented_length);
    NewArrayDecl(int, inv_nghbr_next, augmented_length);
    NewArrayDecl(int, tmp, augmented_length); // Too big
    // Do not free the following array!  It's owned by the priority queue.
    NewArrayDecl(int, heap, augmented_length); // Too big

    // Specialized quadtree supports finding closest marker to any given one.
    QUADTREE_DECL(qt);

    // Priority queue keyed on distances of overlapping pairs of markers.
    PRIORITY_QUEUE_DECL(pq);

    /// Extent of markers in the domain.
    MARKER_EXTENT ext[1];

    // Get a bounding box for the whole collection of markers.
    get_marker_array_extent(markers, n_markers, ext);

    // Set up the quadtree with the bounding box. Choose tree depth heuristically.
    int max_depth = high_bit_position(n_markers) / 4 + 3;
    qt_setup(qt, max_depth, ext->x, ext->y, ext->w, ext->h, info);

    // Insert all the markers in the quadtree.
    for (int i = 0; i < n_markers; i++)
        qt_insert(qt, markers + i);

    // Set all the inverse nearest neighbor links to null.
    for (int i = 0; i < augmented_length; i++)
        inv_nghbr_head[i] = inv_nghbr_next[i] = -1;

    // Initialize the heap by adding an index for each overlapping pair. The
    // The heap holds indices into the array of min-distance keys. An index for
    // pair a->bis added iff markers with indices a and b overlap and b < a.
    int heap_size = 0;
    for (int a = 0; a < n_markers; a++) {
        int b = qt_nearest_wrt(markers, qt, a);
        if (0 <= b && b < a) {
            n_nghbr[a] = b;
            mindist[a] = mr_distance(info, markers + a, markers + b);
            heap[heap_size++] = a;

            // Here we are building a linked list of markers that have b as nearest.
            inv_nghbr_next[a] = inv_nghbr_head[b];
            inv_nghbr_head[b] = a;
        }
    }

    // Now install the raw heap array into the priority queue. After this it's owned by the queue.
    pq_set_up_heap(pq, heap, heap_size, mindist, augmented_length); // Too big

    while (!pq_empty_p(pq)) {

        // Get nearest pair from priority queue.
        int a = pq_get_min(pq);
        int b = n_nghbr[a];

        // Delete both of the nearest pair from all data structures.
        pq_delete(pq, b);
        qt_delete(qt, markers + a);
        qt_delete(qt, markers + b);
        mr_set_deleted(markers + a);
        mr_set_deleted(markers + b);

        // Capture the inv lists of both a and b in tmp.
        int tmp_size = 0;
        for (int p = inv_nghbr_head[a]; p >= 0; p = inv_nghbr_next[p])
            if (!markers[p].deleted_p)
                tmp[tmp_size++] = p;
        for (int p = inv_nghbr_head[b]; p >= 0; p = inv_nghbr_next[p])
            if (!markers[p].deleted_p)
                tmp[tmp_size++] = p;

        // Create a new merged marker. Adding it after all others means
        // nothing already in the heap could have it as nearest.
        int aa = n_markers++;
        mr_merge(info, markers, aa, a, b);

        // Add to quadtree.
        qt_insert(qt, markers + aa);

        // Find nearest overlapping neighbor of the merged marker, if any.
        int bb = qt_nearest_wrt(markers, qt, aa);
        if (0 <= bb) {
            n_nghbr[aa] = bb;
            mindist[aa] = mr_distance(info, markers + aa, markers + bb);
            pq_add(pq, aa);
            inv_nghbr_next[aa] = inv_nghbr_head[bb];
            inv_nghbr_head[bb] = aa;
        }

        // Reset the nearest neighbors of the inverse neighbors of the deletions.
        for (int i = 0; i < tmp_size; i++) {
            int aa = tmp[i];
            int bb = qt_nearest_wrt(markers, qt, aa);
            if (0 <= bb && bb < aa) {
                n_nghbr[aa] = bb;
                mindist[aa] = mr_distance(info, markers + aa, markers + bb);
                pq_update(pq, aa);
                inv_nghbr_next[aa] = inv_nghbr_head[bb];
                inv_nghbr_head[bb] = aa;
            } else {
                pq_delete(pq, aa);
            }
        }
    }
    qt_clear(qt);
    pq_clear(pq);
    Free(n_nghbr);
    Free(mindist);
    Free(inv_nghbr_head);
    Free(inv_nghbr_next);
    Free(tmp);
    return n_markers;
}
Example #4
0
/*
 * return 0 if nothing was modidied
 * return 1 if elevation was modified
 */
int do_flatarea(int index, CELL ele, CELL *alt_org, CELL *alt_new)
{
    int upr, upc, r, c, ct_dir;
    CELL is_in_list, is_worked, this_in_list;
    int index_doer, index_up;
    int n_flat_cells = 0, counter;
    CELL ele_nbr, min_ele_diff;
    int uphill_order, downhill_order, max_uphill_order, max_downhill_order;
    int last_order;

    struct pq *up_pq = pq_create();
    struct pq *down_pq = pq_create();

    struct orders inc_order, *order_found, *nbr_order_found;
    struct RB_TREE *order_tree = rbtree_create(cmp_orders, sizeof(struct orders));

    pq_add(index, down_pq);
    pq_add(index, up_pq);
    inc_order.downhill = -1;
    inc_order.uphill = 0;
    inc_order.index = index;
    inc_order.flag = 0;
    rbtree_insert(order_tree, &inc_order);

    n_flat_cells = 1;

    min_ele_diff = INT_MAX;
    max_uphill_order = max_downhill_order = 0;

    /* get uphill start points */
    G_debug(2, "get uphill start points");
    counter = 0;
    while (down_pq->size) {
	if ((index_doer = pq_drop(down_pq)) == -1)
	    G_fatal_error("get start points: no more points in down queue");

	seg_index_rc(alt_seg, index_doer, &r, &c);

	FLAG_SET(flat_done, r, c);

	/* check all neighbours, breadth first search */
	for (ct_dir = 0; ct_dir < sides; ct_dir++) {
	    /* get r, c (upr, upc) for this neighbour */
	    upr = r + nextdr[ct_dir];
	    upc = c + nextdc[ct_dir];
	    /* check if r, c are within region */
	    if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
		index_up = SEG_INDEX(alt_seg, upr, upc);
		is_in_list = FLAG_GET(in_list, upr, upc);
		is_worked = FLAG_GET(worked, upr, upc);
		ele_nbr = alt_org[index_up];

		if (ele_nbr == ele && !is_worked) {

		    inc_order.downhill = -1;
		    inc_order.uphill = -1;
		    inc_order.index = index_up;
		    inc_order.flag = 0;
		    /* not yet added to queue */
		    if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL) {
			n_flat_cells++;

			/* add to down queue if not yet in there */
			pq_add(index_up, down_pq);

			/* add to up queue if not yet in there */
			if (is_in_list) {
			    pq_add(index_up, up_pq);
			    /* set uphill order to 0 */
			    inc_order.uphill = 0;
			    counter++;
			}
			rbtree_insert(order_tree, &inc_order);
		    }
		}
	    }
	}
    }
    /* flat area too small, not worth the effort */
    if (n_flat_cells < 5) {
	/* clean up */
	pq_destroy(up_pq);
	pq_destroy(down_pq);
	rbtree_destroy(order_tree);

	return 0;
    }
    
    G_debug(2, "%d flat cells, %d cells in tree, %d start cells",
               n_flat_cells, (int)order_tree->count, counter);

    pq_destroy(down_pq);
    down_pq = pq_create();

    /* got uphill start points, do uphill correction */
    G_debug(2, "got uphill start points, do uphill correction");
    counter = 0;
    uphill_order = 1;
    while (up_pq->size) {
	int is_in_down_queue = 0;

	if ((index_doer = pq_drop(up_pq)) == -1)
	    G_fatal_error("uphill order: no more points in up queue");

	seg_index_rc(alt_seg, index_doer, &r, &c);
	this_in_list = FLAG_GET(in_list, r, c);

	/* get uphill order for this point */
	inc_order.index = index_doer;
	if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
	    G_fatal_error(_("flat cell escaped for uphill correction"));

	last_order = uphill_order - 1;
	uphill_order = order_found->uphill;

	if (last_order > uphill_order)
	    G_warning(_("queue error: last uphill order %d > current uphill order %d"),
	              last_order, uphill_order);

	/* debug */
	if (uphill_order == -1)
	    G_fatal_error(_("uphill order not set"));

	if (max_uphill_order < uphill_order)
	    max_uphill_order = uphill_order;

	uphill_order++;
	counter++;

	/* check all neighbours, breadth first search */
	for (ct_dir = 0; ct_dir < sides; ct_dir++) {
	    /* get r, c (upr, upc) for this neighbour */
	    upr = r + nextdr[ct_dir];
	    upc = c + nextdc[ct_dir];
	    /* check if r, c are within region */
	    if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
		index_up = SEG_INDEX(alt_seg, upr, upc);
		is_in_list = FLAG_GET(in_list, upr, upc);
		is_worked = FLAG_GET(worked, upr, upc);
		ele_nbr = alt_org[index_up];

		/* all cells that are in_list should have been added
		 * previously as uphill start points */
		if (ele_nbr == ele && !is_worked) {

		    inc_order.index = index_up;
		    if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL) {
			G_fatal_error(_("flat cell escaped in uphill correction"));
		    }

		    /* not yet added to queue */
		    if (nbr_order_found->uphill == -1) {
			if (is_in_list)
			    G_warning("cell should be in queue");
			/* add to up queue */
			pq_add(index_up, up_pq);
			/* set nbr uphill order = current uphill order + 1 */
			nbr_order_found->uphill = uphill_order;
		    }
		}
		/* add focus cell to down queue */
		if (!this_in_list && !is_in_down_queue &&
		    ele_nbr != ele && !is_in_list && !is_worked) {
		    pq_add(index_doer, down_pq);
		    /* set downhill order to 0 */
		    order_found->downhill = 0;
		    is_in_down_queue = 1;
		}
		if (ele_nbr > ele && min_ele_diff > ele_nbr - ele)
		    min_ele_diff = ele_nbr - ele;
	    }
	}
    }
    /* debug: all flags should be set to 0 */

    pq_destroy(up_pq);
    up_pq = pq_create();

    /* got downhill start points, do downhill correction */
    G_debug(2, "got downhill start points, do downhill correction");
    downhill_order = 1;
    while (down_pq->size) {
	if ((index_doer = pq_drop(down_pq)) == -1)
	    G_fatal_error(_("downhill order: no more points in down queue"));

	seg_index_rc(alt_seg, index_doer, &r, &c);
	this_in_list = FLAG_GET(in_list, r, c);

	/* get downhill order for this point */
	inc_order.index = index_doer;
	if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
	    G_fatal_error(_("flat cell escaped for downhill correction"));

	last_order = downhill_order - 1;
	downhill_order = order_found->downhill;

	if (last_order > downhill_order)
	    G_warning(_("queue error: last downhill order %d > current downhill order %d"),
	              last_order, downhill_order);

	/* debug */
	if (downhill_order == -1)
	    G_fatal_error(_("downhill order: downhill order not set"));

	if (max_downhill_order < downhill_order)
	    max_downhill_order = downhill_order;

	downhill_order++;

	/* check all neighbours, breadth first search */
	for (ct_dir = 0; ct_dir < sides; ct_dir++) {
	    /* get r, c (upr, upc) for this neighbour */
	    upr = r + nextdr[ct_dir];
	    upc = c + nextdc[ct_dir];
	    /* check if r, c are within region */
	    if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
		index_up = SEG_INDEX(alt_seg, upr, upc);
		is_in_list = FLAG_GET(in_list, upr, upc);
		is_worked = FLAG_GET(worked, upr, upc);
		ele_nbr = alt_org[index_up];

		if (ele_nbr == ele && !is_worked) {

		    inc_order.index = index_up;
		    if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL)
			G_fatal_error(_("flat cell escaped in downhill correction"));

		    /* not yet added to queue */
		    if (nbr_order_found->downhill == -1) {
			
			/* add to down queue */
			pq_add(index_up, down_pq);
			/* set nbr downhill order = current downhill order + 1 */
			nbr_order_found->downhill = downhill_order;

			/* add to up queue */
			if (is_in_list) {
			    pq_add(index_up, up_pq);
			    /* set flag */
			    nbr_order_found->flag = 1;
			}
		    }
		}
	    }
	}
    }

    /* got uphill and downhill order, adjust ele */

    /* increment: ele += uphill_order +  max_downhill_order - downhill_order */
    /* decrement: ele += uphill_order - max_uphill_order - downhill_order */

    G_debug(2, "adjust ele");
    while (up_pq->size) {
	if ((index_doer = pq_drop(up_pq)) == -1)
	    G_fatal_error("no more points in up queue");

	seg_index_rc(alt_seg, index_doer, &r, &c);
	this_in_list = FLAG_GET(in_list, r, c);

	/* get uphill and downhill order for this point */
	inc_order.index = index_doer;
	if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
	    G_fatal_error(_("flat cell escaped for adjustment"));

	uphill_order = order_found->uphill;
	downhill_order = order_found->downhill;

	/* debug */
	if (uphill_order == -1)
	    G_fatal_error(_("adjustment: uphill order not set"));
	if (!this_in_list && downhill_order == -1)
	    G_fatal_error(_("adjustment: downhill order not set"));

	/* increment */
	if (this_in_list) {
	    downhill_order = max_downhill_order;
	    uphill_order = 0;
	}
	alt_new[index_doer] +=
	    (uphill_order + (double)(max_downhill_order - downhill_order) / 2.0 + 0.5) / 2.0 + 0.5;

	/* check all neighbours, breadth first search */
	for (ct_dir = 0; ct_dir < sides; ct_dir++) {
	    /* get r, c (upr, upc) for this neighbour */
	    upr = r + nextdr[ct_dir];
	    upc = c + nextdc[ct_dir];
	    /* check if r, c are within region */
	    if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
		index_up = SEG_INDEX(alt_seg, upr, upc);
		is_in_list = FLAG_GET(in_list, upr, upc);
		is_worked = FLAG_GET(worked, upr, upc);
		ele_nbr = alt_org[index_up];

		if (ele_nbr == ele && !is_worked) {

		    inc_order.index = index_up;
		    if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL)
			G_fatal_error(_("flat cell escaped in adjustment"));

		    /* not yet added to queue */
		    if (nbr_order_found->flag == 0) {
			if (is_in_list)
			    G_warning("adjustment: in_list cell should be in queue");
			/* add to up queue */
			pq_add(index_up, up_pq);
			nbr_order_found->flag = 1;
		    }
		}
	    }
	}
    }

    /* clean up */
    pq_destroy(up_pq);
    pq_destroy(down_pq);
    rbtree_destroy(order_tree);
    
    return 1;
}