A range tower is a bitmap, implemented as an augmented binary tree.
Beyond the usual features of a bitmap, a range tower can efficiently
implement a "splice" operation that shifts ranges of bits left or right.
This feature does cost memory and time, so use a range tower only if this
feature is actually needed. Otherwise, use a range set (see range-set.h),
which can do everything that a range tower can do except the "splice"
operation.
Each operation has O(lg N) cost, where N is the number of contiguous
regions of 1-bits in the bitmap. Also, a cache reduces the second and
subsequent containment tests within a single contiguous region to O(1).
src/libpspp/range-map.h \
src/libpspp/range-set.c \
src/libpspp/range-set.h \
+ src/libpspp/range-tower.c \
+ src/libpspp/range-tower.h \
src/libpspp/sparse-array.c \
src/libpspp/sparse-array.h \
src/libpspp/sparse-xarray.c \
--- /dev/null
+/* pspp - a program for statistical analysis.
+ Copyright (C) 2007, 2009, 2011 Free Software Foundation, Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* Bitmap, implemented as a balanced binary tree. */
+
+/* If you add routines in this file, please add a corresponding
+ test to range-tower-test.c. This test program should achieve
+ 100% coverage of lines and branches in this code, as reported
+ by "gcov -b". */
+
+#include <config.h>
+
+#include "libpspp/range-tower.h"
+
+#include <limits.h>
+#include <stdlib.h>
+
+#include "libpspp/assertion.h"
+#include "libpspp/compiler.h"
+#include "libpspp/pool.h"
+
+#include "gl/minmax.h"
+#include "gl/xalloc.h"
+
+static void reaugment_range_tower_node (struct abt_node *, const void *aux);
+static void delete_node (struct range_tower *, struct range_tower_node *);
+
+static void destroy_pool (void *);
+
+static void
+print_structure (const struct abt_node *node_)
+{
+ struct range_tower_node *node;
+
+ if (node_ == NULL)
+ return;
+ node = abt_data (node_, struct range_tower_node, abt_node);
+ printf ("%lu+%lu/%d", node->n_zeros, node->n_ones, node->abt_node.level);
+ if (node->abt_node.down[0] || node->abt_node.down[1])
+ {
+ printf ("(");
+ print_structure (node->abt_node.down[0]);
+ printf (",");
+ print_structure (node->abt_node.down[1]);
+ printf (")");
+ }
+}
+
+/* Prints the regions in RT to stdout. */
+static void UNUSED
+print_regions (const char *title, const struct range_tower *rt)
+{
+ const struct range_tower_node *node;
+
+ printf ("%s:", title);
+ for (node = range_tower_first__ (rt); node != NULL;
+ node = range_tower_next__ (rt, node))
+ printf (" (%lu,%lu)", node->n_zeros, node->n_ones);
+ printf ("\n");
+ printf ("structure:");
+ print_structure (rt->abt.root);
+ printf ("\n");
+}
+
+static struct range_tower_node *
+range_tower_node_from_abt_node (const struct abt_node *abt_node)
+{
+ return abt_data (abt_node, struct range_tower_node, abt_node);
+}
+
+/* Returns the total width (zeros and ones) of the nodes in the subtree rooted
+ at P, or 0 if P is null. */
+static unsigned long int
+subtree_width (const struct abt_node *p)
+{
+ return p != NULL ? range_tower_node_from_abt_node (p)->subtree_width : 0;
+}
+
+/* Returns the position of the first 1-bit in NODE.
+
+ The performance of this function is O(lg n) in the number of nodes in the
+ range tower. It is often possible to avoid calling this function, either by
+ taking advantage of the NODE_START parameter to tower_lookup or by
+ incrementally keeping track of height while iterating through a tower. In
+ the former case the asymptotic performance is no different, since
+ tower_lookup is also O(lg n), but in the latter case performance improves
+ from O(lg n) to O(1). */
+unsigned long int
+range_tower_node_get_start (const struct range_tower_node *node)
+{
+ const struct abt_node *p = &node->abt_node;
+ unsigned long start = subtree_width (p->down[0]) + range_tower_node_from_abt_node (p)->n_zeros;
+ while (p->up != NULL)
+ {
+ if (p == p->up->down[1])
+ {
+ const struct range_tower_node *up
+ = range_tower_node_from_abt_node (p->up);
+ start += subtree_width (p->up->down[0]) + up->n_zeros + up->n_ones;
+ }
+ p = p->up;
+ }
+ return start;
+}
+
+/* Returns one past the position of the last 1-bit in NODE.
+
+ Like range_tower_node_get_start(), the performance of this function is O(lg
+ n) in the number of nodes in the range tower. */
+unsigned long int
+range_tower_node_get_end (const struct range_tower_node *node)
+{
+ return range_tower_node_get_start (node) + node->n_ones;
+}
+
+/* Creates and returns a new, empty range tower. */
+struct range_tower *
+range_tower_create (void)
+{
+ return range_tower_create_pool (NULL);
+}
+
+static struct range_tower *
+range_tower_create_pool__ (struct pool *pool)
+{
+ struct range_tower *rt = xmalloc (sizeof *rt);
+
+ rt->pool = pool;
+ if (pool != NULL)
+ pool_register (pool, destroy_pool, rt);
+
+ abt_init (&rt->abt, NULL, reaugment_range_tower_node, NULL);
+ rt->cache_end = 0;
+
+ return rt;
+}
+
+/* Creates and returns a new, empty range tower in the given POOL. */
+struct range_tower *
+range_tower_create_pool (struct pool *pool)
+{
+ struct range_tower_node *node;
+ struct range_tower *rt;
+
+ rt = range_tower_create_pool__ (pool);
+
+ node = xmalloc (sizeof *node);
+ node->n_zeros = ULONG_MAX;
+ node->n_ones = 0;
+ abt_insert_after (&rt->abt, NULL, &node->abt_node);
+
+ return rt;
+}
+
+/* Creates and returns a clone of OLD range tower in the given POOL
+ (which may be null). */
+struct range_tower *
+range_tower_clone (const struct range_tower *old, struct pool *pool)
+{
+ const struct range_tower_node *old_node;
+ struct abt_node *prev_node;
+ struct range_tower *new;
+
+ new = range_tower_create_pool__ (pool);
+ prev_node = NULL;
+ for (old_node = range_tower_first__ (old); old_node != NULL;
+ old_node = range_tower_next__ (old, old_node))
+ {
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = old_node->n_zeros;
+ new_node->n_ones = old_node->n_ones;
+
+ abt_insert_after (&new->abt, prev_node, &new_node->abt_node);
+ prev_node = &new_node->abt_node;
+ }
+ return new;
+}
+
+/* Destroys range tower RT. */
+void
+range_tower_destroy (struct range_tower *rt)
+{
+ if (rt != NULL)
+ {
+ if (rt->pool != NULL)
+ pool_unregister (rt->pool, rt);
+ while (!abt_is_empty (&rt->abt))
+ delete_node (rt, range_tower_first__ (rt));
+ free (rt);
+ }
+}
+
+/* Sets the WIDTH bits starting at START in RT to 1-bits. */
+void
+range_tower_set1 (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ assert (width == 0 || start + width - 1 >= start);
+
+ node = range_tower_lookup (rt, start, &node_start);
+ while (width > 0)
+ {
+ unsigned long int node_ofs = start - node_start;
+
+ if (node_ofs >= node->n_zeros)
+ {
+ /* There are already some 1-bits here, so skip them. */
+ unsigned long ones_left = (node->n_zeros + node->n_ones) - node_ofs;
+ if (width <= ones_left)
+ return;
+
+ start += ones_left;
+ width -= ones_left;
+ node_start += node->n_zeros + node->n_ones;
+ node_ofs = 0;
+ node = range_tower_next__ (rt, node);
+ }
+
+ /* Invalidate cache. */
+ rt->cache_end = 0;
+
+ if (node_ofs == 0)
+ {
+ if (node_start > 0)
+ {
+ struct range_tower_node *prev = range_tower_prev__ (rt, node);
+ if (width >= node->n_zeros)
+ {
+ /* All zeros in NODE are replaced by ones. Change NODE's
+ entire width into PREV's trailing ones, e.g. 00001111
+ 00001111 becomes 0000111111111111. */
+ int node_width = node->n_zeros + node->n_ones;
+ abt_delete (&rt->abt, &node->abt_node);
+ prev->n_ones += node_width;
+ abt_reaugmented (&rt->abt, &prev->abt_node);
+ if (width <= node_width)
+ return;
+
+ /* Go around again with NODE replaced by PREV's new
+ successor. */
+ width -= node_width;
+ start += node_width;
+ node = range_tower_next__ (rt, prev);
+ node_start += node_width;
+ }
+ else
+ {
+ /* Leading zeros in NODE change into trailing ones in PREV,
+ but trailing zeros in NODE remain, e.g. 00001111 00001111
+ becomes 0000111111 001111. */
+ node->n_zeros -= width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ prev->n_ones += width;
+ abt_reaugmented (&rt->abt, &prev->abt_node);
+ return;
+ }
+ }
+ else
+ {
+ if (width >= node->n_zeros)
+ {
+ /* All zeros in NODE are replaced by ones, e.g. 00001111
+ becomes 11111111. */
+ node->n_ones += node->n_zeros;
+ node->n_zeros = 0;
+ if (width <= node->n_ones)
+ return;
+
+ start += node->n_ones;
+ node_start += node->n_ones;
+ width -= node->n_ones;
+ node = range_tower_next__ (rt, node);
+ }
+ else
+ {
+ /* Leading zeros in NODE (which starts at offset 0) are
+ replaced by ones, but some zeros remain. This requires a
+ node split, e.g. 00001111 becomes 11 001111. */
+ struct range_tower_node *new_node;
+
+ node->n_zeros -= width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = 0;
+ new_node->n_ones = width;
+ abt_insert_before (&rt->abt, &node->abt_node,
+ &new_node->abt_node);
+ return;
+ }
+ }
+ }
+ else
+ {
+ unsigned long int zeros_left = node->n_zeros - node_ofs;
+ if (width >= zeros_left)
+ {
+ /* Trailing zeros in NODE are replaced by ones, but leading
+ zeros remain, e.g. 00001111 becomes 00111111. */
+ node->n_zeros -= zeros_left;
+ node->n_ones += zeros_left;
+ if (width <= node->n_ones)
+ return;
+ start += node->n_ones;
+ width -= node->n_ones;
+ node_start += node->n_zeros + node->n_ones;
+ node = range_tower_next__ (rt, node);
+ }
+ else
+ {
+ /* Zeros that are neither leading or trailing turn into ones.
+ Split the node into two nodes, e.g. 00001111 becomes 011
+ 01111. */
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_ones = node->n_ones;
+ new_node->n_zeros = zeros_left - width;
+
+ node->n_zeros = node_ofs;
+ node->n_ones = width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ abt_insert_after (&rt->abt, &node->abt_node,
+ &new_node->abt_node);
+ return;
+ }
+ }
+ }
+}
+
+/* Sets the WIDTH bits starting at START in RT to 0-bits. */
+void
+range_tower_set0 (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ assert (width == 0 || start + width - 1 >= start);
+
+ node = range_tower_lookup (rt, start, &node_start);
+ while (width > 0)
+ {
+ unsigned long int node_ofs = start - node_start;
+
+ if (node_ofs < node->n_zeros)
+ {
+ /* Deleting zeros is a no-op, so skip them. */
+ unsigned long zeros_left = node->n_zeros - node_ofs;
+ if (zeros_left >= width)
+ {
+ /* We are deleting only existing zeros. Nothing to do. */
+ return;
+ }
+
+ width -= zeros_left;
+ start += zeros_left;
+ node_ofs = node->n_zeros;
+ }
+
+ rt->cache_end = 0;
+
+ if (node_ofs == node->n_zeros)
+ {
+ if (node->n_ones > width)
+ {
+ /* DELTA leading ones within NODE turn into zeros, but some ones
+ remain, e.g. 00001111 becomes 00111111. No reaugmentation
+ because n_zeros + n_ones doesn't change. */
+ node->n_zeros += width;
+ node->n_ones -= width;
+ return;
+ }
+ else
+ {
+ /* All ones in NODE turn into zeros, so merge NODE with the
+ following node, e.g. 00001111 00001111 becomes
+ 0000000000001111, and then do it again with the merged
+ node. */
+ unsigned long int next_zeros, next_ones;
+ struct range_tower_node *next;
+
+ next = range_tower_next__ (rt, node);
+ if (next == NULL)
+ {
+ node->n_zeros += node->n_ones;
+ node->n_ones = 0;
+ return;
+ }
+
+ next_zeros = next->n_zeros;
+ next_ones = next->n_ones;
+ abt_delete (&rt->abt, &next->abt_node);
+
+ node->n_zeros += node->n_ones + next_zeros;
+ node->n_ones = next_ones;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ }
+ }
+ else if (node_ofs + width >= node->n_zeros + node->n_ones)
+ {
+ /* Trailing ones in NODE turn into zeros, but leading ones remain,
+ e.g. 000011{11} 00001111 becomes 000011 {00}00001111. Give the
+ trailing ones to the next node as zeros and go around again with
+ the next node. */
+ struct range_tower_node *next;
+ unsigned long int delta;
+
+ delta = node->n_ones - (node_ofs - node->n_zeros);
+ node->n_ones -= delta;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ next = range_tower_next__ (rt, node);
+ if (next == NULL)
+ {
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = delta;
+ new_node->n_ones = 0;
+
+ abt_insert_before (&rt->abt, NULL, &new_node->abt_node);
+ return;
+ }
+
+ next->n_zeros += delta;
+ abt_reaugmented (&rt->abt, &next->abt_node);
+
+ node_start += node->n_zeros + node->n_ones;
+ start = node_start;
+ node = next;
+ }
+ else
+ {
+ /* Ones that are neither leading or trailing turn into zeros,
+ e.g. 00001111 becomes 00001 001. Split the node into two nodes
+ and we're done. */
+ unsigned long int end = start + width;
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = width;
+ new_node->n_ones = (node_start + node->n_zeros + node->n_ones) - end;
+
+ node->n_ones = node_ofs - node->n_zeros;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
+ return;
+ }
+ }
+}
+
+static void
+range_tower_delete__ (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ rt->cache_end = 0;
+ node = range_tower_lookup (rt, start, &node_start);
+ for (;;)
+ {
+ unsigned long int node_ofs = start - node_start;
+
+ if (node_ofs < node->n_zeros)
+ {
+ if (node_ofs + width < node->n_zeros)
+ {
+ node->n_zeros -= width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ break;
+ }
+ else if (node_ofs > 0)
+ {
+ width -= node->n_zeros - node_ofs;
+ node->n_zeros = node_ofs;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ if (width == 0)
+ break;
+ /* Continue with 1-bits. */
+ }
+ else if (width < node->n_zeros + node->n_ones)
+ {
+ struct range_tower_node *prev = range_tower_prev__ (rt, node);
+ unsigned long int ones_left;
+
+ ones_left = (node->n_zeros + node->n_ones) - width;
+ if (prev != NULL)
+ {
+ abt_delete (&rt->abt, &node->abt_node);
+ prev->n_ones += ones_left;
+ abt_reaugmented (&rt->abt, &prev->abt_node);
+ }
+ else
+ {
+ node->n_zeros = 0;
+ node->n_ones = ones_left;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ }
+ break;
+ }
+ else
+ {
+ /* Delete entire node. */
+ struct range_tower_node *next = range_tower_next__ (rt, node);
+
+ width -= node->n_zeros + node->n_ones;
+ abt_delete (&rt->abt, &node->abt_node);
+ if (next == NULL)
+ break;
+
+ node = next;
+ continue;
+ }
+ }
+
+ if (node_ofs + width < node->n_zeros + node->n_ones)
+ {
+ node->n_ones -= width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ break;
+ }
+ else if (node_ofs > node->n_zeros)
+ {
+ unsigned long int ones_ofs = node_ofs - node->n_zeros;
+ width -= node->n_ones - ones_ofs;
+ node->n_ones = ones_ofs;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ if (width == 0)
+ break;
+ /* continue with next node */
+ node_start += node->n_zeros + node->n_ones;
+ node = range_tower_next__ (rt, node);
+ }
+ else
+ {
+ /* Merge with next node */
+ struct range_tower_node *next = range_tower_next__ (rt, node);
+ if (next != NULL)
+ {
+ unsigned long int next_zeros = next->n_zeros;
+ unsigned long int next_ones = next->n_ones;
+
+ abt_delete (&rt->abt, &next->abt_node);
+
+ width -= node->n_ones;
+
+ node->n_zeros += next_zeros;
+ node->n_ones = next_ones;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ if (width == 0)
+ break;
+ }
+ else
+ {
+ node->n_ones = 0;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ break;
+ }
+ }
+ }
+}
+
+void
+range_tower_delete (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ struct range_tower_node *node;
+
+ if (width == 0)
+ return;
+
+ assert (start + width - 1 >= start);
+
+ range_tower_delete__ (rt, start, width);
+
+ node = range_tower_last__ (rt);
+ if (node != NULL && node->n_ones == 0)
+ {
+ node->n_zeros += width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ }
+ else
+ {
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = width;
+ new_node->n_ones = 0;
+
+ abt_insert_before (&rt->abt, NULL, &new_node->abt_node);
+ }
+}
+
+static struct range_tower_node *
+range_tower_insert0__ (struct range_tower *rt, struct range_tower_node *node,
+ unsigned long int *node_startp,
+ unsigned long int start, unsigned long int width)
+{
+ unsigned long int node_ofs = start - *node_startp;
+
+ if (node_ofs <= node->n_zeros)
+ {
+ /* 00+00+001111 => 0000001111. */
+ node->n_zeros += width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ return node;
+ }
+ else
+ {
+ /* 000011+00+11 => 000011 0011. */
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = width;
+ new_node->n_ones = node->n_zeros + node->n_ones - node_ofs;
+
+ node->n_ones -= new_node->n_ones;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
+
+ *node_startp += node->n_zeros + node->n_ones;
+ return new_node;
+ }
+}
+
+void
+range_tower_insert0 (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ if (width == 0)
+ return;
+
+ assert (width == 0 || start + width - 1 >= start);
+
+ if (start + width == ULONG_MAX)
+ range_tower_set0 (rt, start, width);
+ else
+ {
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ range_tower_delete__ (rt, ULONG_MAX - width, width);
+
+ node = range_tower_lookup (rt, start, &node_start);
+ range_tower_insert0__ (rt, node, &node_start, start, width);
+ }
+}
+
+static struct range_tower_node *
+range_tower_insert1__ (struct range_tower *rt, struct range_tower_node *node,
+ unsigned long int *node_startp,
+ unsigned long int start, unsigned long int width)
+{
+
+ unsigned long int node_start = *node_startp;
+ unsigned long int node_ofs = start - node_start;
+
+ if (node_ofs >= node->n_zeros)
+ {
+ node->n_ones += width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ return node;
+ }
+ else if (node_ofs == 0 && node_start > 0)
+ {
+ struct range_tower_node *prev = range_tower_prev__ (rt, node);
+
+ prev->n_ones += width;
+ abt_reaugmented (&rt->abt, &prev->abt_node);
+
+ *node_startp += width;
+ return node;
+ }
+ else
+ {
+ /* 00001111 => 0+11+0001111 => 011 0001111 */
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = node->n_zeros - node_ofs;
+ new_node->n_ones = node->n_ones;
+
+ node->n_zeros = node_ofs;
+ node->n_ones = width;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
+
+ *node_startp += node->n_zeros + node->n_ones;
+ return new_node;
+ }
+}
+
+void
+range_tower_insert1 (struct range_tower *rt,
+ unsigned long int start, unsigned long int width)
+{
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ if (width == 0)
+ return;
+
+ range_tower_delete__ (rt, ULONG_MAX - width, width);
+
+ assert (width == 0 || start + width - 1 >= start);
+
+ node = range_tower_lookup (rt, start, &node_start);
+ range_tower_insert1__ (rt, node, &node_start, start, width);
+}
+
+void
+range_tower_move (struct range_tower *rt,
+ unsigned long int old_start,
+ unsigned long int new_start,
+ unsigned long int width)
+{
+ unsigned long int node_start;
+ int i;
+
+ if (width == 0 || old_start == new_start)
+ return;
+
+ assert (old_start + width - 1 >= old_start);
+ assert (new_start + width - 1 >= new_start);
+
+ i = 0;
+ do
+ {
+ struct range_tower_node *node;
+ unsigned long int node_ofs;
+ unsigned long int zeros, ones;
+
+ node = range_tower_lookup (rt, old_start, &node_start);
+ node_ofs = old_start - node_start;
+
+ if (node_ofs >= node->n_zeros)
+ {
+ unsigned long int max_ones;
+
+ zeros = 0;
+ max_ones = (node->n_zeros + node->n_ones) - node_ofs;
+ ones = MIN (width, max_ones);
+ }
+ else
+ {
+ unsigned long int max_zeros;
+
+ max_zeros = node->n_zeros - node_ofs;
+ zeros = MIN (width, max_zeros);
+ if (zeros < width)
+ ones = MIN (width - zeros, node->n_ones);
+ else
+ ones = 0;
+ }
+
+ node->n_zeros -= zeros;
+ node->n_ones -= ones;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+
+ if (node->n_zeros == 0)
+ {
+ if (node->n_ones == 0)
+ abt_delete (&rt->abt, &node->abt_node);
+ else if (node_start > 0)
+ {
+ /* Merge with previous. */
+ unsigned long int n_ones = node->n_ones;
+ struct range_tower_node *prev = range_tower_prev__ (rt, node);
+
+ abt_delete (&rt->abt, &node->abt_node);
+ prev->n_ones += n_ones;
+ abt_reaugmented (&rt->abt, &prev->abt_node);
+ }
+ }
+ else if (node->n_ones == 0)
+ {
+ struct range_tower_node *next = range_tower_next__ (rt, node);
+ if (next != NULL)
+ {
+ /* Merge with next. */
+ unsigned long int n_zeros = node->n_zeros;
+
+ abt_delete (&rt->abt, &node->abt_node);
+ next->n_zeros += n_zeros;
+ abt_reaugmented (&rt->abt, &next->abt_node);
+ }
+ }
+
+ if (new_start < old_start)
+ {
+ node = range_tower_lookup (rt, new_start, &node_start);
+ if (zeros)
+ {
+ node = range_tower_insert0__ (rt, node, &node_start,
+ new_start, zeros);
+ old_start += zeros;
+ new_start += zeros;
+ }
+
+ if (ones)
+ {
+ node = range_tower_insert1__ (rt, node, &node_start,
+ new_start, ones);
+ old_start += ones;
+ new_start += ones;
+ }
+ }
+ else
+ {
+ unsigned long int remaining = width - (zeros + ones);
+
+ if (new_start + remaining < ULONG_MAX - (zeros + ones))
+ {
+ node = range_tower_lookup (rt, new_start + remaining,
+ &node_start);
+ if (zeros)
+ {
+ node = range_tower_insert0__ (rt, node, &node_start,
+ new_start + remaining, zeros);
+ new_start += zeros;
+ }
+
+ if (ones)
+ {
+ node = range_tower_insert1__ (rt, node, &node_start,
+ new_start + remaining, ones);
+ new_start += ones;
+ }
+ }
+ else
+ {
+ node = range_tower_last__ (rt);
+ if (zeros)
+ {
+ if (node->n_ones)
+ {
+ struct range_tower_node *new_node;
+
+ new_node = xmalloc (sizeof *new_node);
+ new_node->n_zeros = zeros;
+ new_node->n_ones = 0;
+
+ abt_insert_after (&rt->abt, &node->abt_node,
+ &new_node->abt_node);
+
+ node_start += node->n_zeros + node->n_ones;
+ node = new_node;
+ }
+ else
+ {
+ node->n_zeros += zeros;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ }
+ }
+ if (ones)
+ {
+ node->n_ones += ones;
+ abt_reaugmented (&rt->abt, &node->abt_node);
+ }
+
+ new_start += zeros + ones;
+ }
+ }
+ width -= zeros + ones;
+ }
+ while (width > 0);
+}
+
+/* Returns true if there is a 1-bit at the given POSITION in RT,
+ false otherwise. */
+bool
+range_tower_contains (const struct range_tower *rt_, unsigned long int position)
+{
+ struct range_tower *rt = CONST_CAST (struct range_tower *, rt_);
+ if (position >= rt->cache_end || position < rt->cache_start)
+ {
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ node = range_tower_lookup (rt, position, &node_start);
+ if (position < node_start + node->n_zeros)
+ {
+ rt->cache_start = node_start;
+ rt->cache_end = node_start + node->n_zeros;
+ rt->cache_value = false;
+ }
+ else
+ {
+ rt->cache_start = node_start + node->n_zeros;
+ rt->cache_end = rt->cache_start + node->n_ones;
+ rt->cache_value = true;
+ }
+ }
+ return rt->cache_value;
+}
+
+/* Returns the smallest position of a 1-bit greater than or
+ equal to START. Returns ULONG_MAX if there is no 1-bit with
+ position greater than or equal to START. */
+unsigned long int
+range_tower_scan (const struct range_tower *rt_, unsigned long int start)
+{
+ struct range_tower *rt = CONST_CAST (struct range_tower *, rt_);
+
+ if (start < rt->cache_end && start >= rt->cache_start && rt->cache_value)
+ return start;
+
+ if (start != ULONG_MAX)
+ {
+ struct range_tower_node *node;
+ unsigned long int node_start;
+
+ node = range_tower_lookup (rt, start, &node_start);
+ if (node->n_ones)
+ {
+ rt->cache_start = node_start + node->n_zeros;
+ rt->cache_end = rt->cache_start + node->n_ones;
+ rt->cache_value = true;
+ return MAX (start, rt->cache_start);
+ }
+ else
+ {
+ rt->cache_start = node_start;
+ rt->cache_end = ULONG_MAX;
+ rt->cache_value = false;
+ }
+ }
+ return ULONG_MAX;
+}
+\f
+/* Recalculates the subtree_width of NODE based on its LEFT and
+ RIGHT children's "subtree_width"s. */
+static void
+reaugment_range_tower_node (struct abt_node *node_, const void *aux UNUSED)
+{
+ struct range_tower_node *node = range_tower_node_from_abt_node (node_);
+
+ node->subtree_width = node->n_zeros + node->n_ones;
+ if (node->abt_node.down[0] != NULL)
+ {
+ struct range_tower_node *left;
+
+ left = range_tower_node_from_abt_node (node->abt_node.down[0]);
+ node->subtree_width += left->subtree_width;
+ }
+ if (node->abt_node.down[1] != NULL)
+ {
+ struct range_tower_node *right;
+
+ right = range_tower_node_from_abt_node (node->abt_node.down[1]);
+ node->subtree_width += right->subtree_width;
+ }
+}
+
+/* Deletes NODE from RT and frees it. */
+static void
+delete_node (struct range_tower *rt, struct range_tower_node *node)
+{
+ abt_delete (&rt->abt, &node->abt_node);
+ free (node);
+}
+
+struct range_tower_node *
+range_tower_lookup (const struct range_tower *rt, unsigned long int position,
+ unsigned long int *node_start)
+{
+ const struct range_tower_node *node;
+ const struct abt_node *abt_node;
+
+ abt_node = rt->abt.root;
+ node = range_tower_node_from_abt_node (abt_node);
+ *node_start = subtree_width (abt_node->down[0]);
+ for (;;)
+ {
+ unsigned long int left_width = subtree_width (abt_node->down[0]);
+
+ if (position < left_width)
+ {
+ abt_node = abt_node->down[0];
+ *node_start -= left_width - subtree_width (abt_node->down[0]);
+ }
+ else
+ {
+ unsigned long int node_width = node->n_zeros + node->n_ones;
+
+ position -= left_width;
+ if (position < node_width)
+ return CONST_CAST (struct range_tower_node *, node);
+
+ position -= node_width;
+ abt_node = abt_node->down[1];
+ left_width = subtree_width (abt_node->down[0]);
+ *node_start += node_width + left_width;
+ }
+ node = range_tower_node_from_abt_node (abt_node);
+ }
+}
+
+/* Destroys range tower RT.
+ Helper function for range_tower_create_pool. */
+static void
+destroy_pool (void *rt_)
+{
+ struct range_tower *rt = rt_;
+ rt->pool = NULL;
+ range_tower_destroy (rt);
+}
--- /dev/null
+/* PSPP - a program for statistical analysis.
+ Copyright (C) 2007, 2009, 2011 Free Software Foundation, Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* Bitmap, implemented as an augmented binary tree.
+
+ Beyond the usual features of a bitmap, a range tower can efficiently
+ implement a "splice" operation that shifts ranges of bits left or right.
+ This feature does cost memory and time, so use a range tower only if this
+ feature is actually needed. Otherwise, use a range set (see range-set.h),
+ which can do everything that a range tower can do except the "splice"
+ operation.
+
+ Each operation has O(lg N) cost, where N is the number of contiguous regions
+ of 1-bits in the bitmap. Also, a cache reduces the second and subsequent
+ containment tests within a single contiguous region to O(1). */
+
+#ifndef LIBPSPP_RANGE_TOWER_H
+#define LIBPSPP_RANGE_TOWER_H
+
+#include <limits.h>
+#include <stdbool.h>
+#include "libpspp/abt.h"
+#include "libpspp/cast.h"
+
+/* A tower of ranges. */
+struct range_tower
+ {
+ struct pool *pool; /* Pool for freeing range_tower. */
+ struct abt abt; /* Tree of range_tower_nodes. */
+
+ /* Cache. */
+ unsigned long int cache_start; /* Start of region. */
+ unsigned long int cache_end; /* One past end of region. */
+ bool cache_value; /* Is the region in the tower? */
+ };
+
+/* A node in the range tower. */
+struct range_tower_node
+ {
+ struct abt_node abt_node; /* Augmented binary tree node. */
+ unsigned long int n_zeros; /* Number of leading zeros. */
+ unsigned long int n_ones; /* Number of ones following the zeros. */
+ unsigned long int subtree_width; /* n_zeros + n_ones + sum of descendants. */
+ };
+
+struct range_tower *range_tower_create (void);
+struct range_tower *range_tower_create_pool (struct pool *);
+struct range_tower *range_tower_clone (const struct range_tower *,
+ struct pool *);
+void range_tower_destroy (struct range_tower *);
+
+void range_tower_splice (struct range_tower *,
+ unsigned long int start,
+ unsigned long int old_width,
+ unsigned long int new_width);
+
+void range_tower_set1 (struct range_tower *,
+ unsigned long int start, unsigned long int width);
+void range_tower_set0 (struct range_tower *,
+ unsigned long int start, unsigned long int width);
+
+void range_tower_insert1 (struct range_tower *,
+ unsigned long int start, unsigned long int width);
+void range_tower_insert0 (struct range_tower *,
+ unsigned long int start, unsigned long int width);
+
+void range_tower_delete (struct range_tower *,
+ unsigned long int start, unsigned long int width);
+
+void range_tower_move (struct range_tower *,
+ unsigned long int old_start,
+ unsigned long int new_start,
+ unsigned long int width);
+
+bool range_tower_contains (const struct range_tower *,
+ unsigned long int position);
+unsigned long int range_tower_scan (const struct range_tower *,
+ unsigned long int start);
+
+static inline bool range_tower_is_empty (const struct range_tower *);
+
+#define RANGE_TOWER_FOR_EACH(NODE, START, RANGE_TOWER) \
+ for ((NODE) = range_tower_first (RANGE_TOWER), (START) = 0; \
+ (NODE) && ((START) += (NODE)->n_zeros, true); \
+ (START) += (NODE)->n_ones, \
+ (NODE) = range_tower_next (RANGE_TOWER, NODE))
+
+static inline const struct range_tower_node *range_tower_first (
+ const struct range_tower *);
+static inline const struct range_tower_node *range_tower_next (
+ const struct range_tower *, const struct range_tower_node *);
+static inline const struct range_tower_node *range_tower_last (
+ const struct range_tower *);
+static inline const struct range_tower_node *range_tower_prev (
+ const struct range_tower *, const struct range_tower_node *);
+unsigned long int range_tower_node_get_start (const struct range_tower_node *);
+unsigned long int range_tower_node_get_end (const struct range_tower_node *);
+static inline unsigned long int range_tower_node_get_width (
+ const struct range_tower_node *);
+\f
+/* Inline functions. */
+
+static inline struct range_tower_node *range_tower_node_from_abt__ (
+ const struct abt_node *);
+static inline struct range_tower_node *range_tower_next__ (
+ const struct range_tower *, const struct range_tower_node *);
+static inline struct range_tower_node *range_tower_first__ (
+ const struct range_tower *);
+static inline struct range_tower_node *range_tower_prev__ (
+ const struct range_tower *, const struct range_tower_node *);
+static inline struct range_tower_node *range_tower_last__ (
+ const struct range_tower *);
+
+/* Returns true if RS contains no 1-bits, false otherwise. */
+static inline bool
+range_tower_is_empty (const struct range_tower *rs)
+{
+ const struct range_tower_node *node =
+ abt_data (rs->abt.root, struct range_tower_node, abt_node);
+
+ return node->n_zeros == ULONG_MAX;
+}
+
+/* Returns the node representing the first contiguous region of
+ 1-bits in RS, or a null pointer if RS is empty.
+ Any call to range_tower_set1, range_tower_set0, or
+ range_tower_allocate invalidates the returned node. */
+static inline const struct range_tower_node *
+range_tower_first (const struct range_tower *rs)
+{
+ const struct range_tower_node *node = range_tower_first__ (rs);
+ return node->n_ones ? node : NULL;
+}
+
+/* If NODE is nonnull, returns the node representing the next
+ contiguous region of 1-bits in RS following NODE, or a null
+ pointer if NODE is the last region in RS.
+ If NODE is null, returns the first region in RS, as for
+ range_tower_first.
+ Any call to range_tower_set1, range_tower_set0, or
+ range_tower_allocate invalidates the returned node. */
+static inline const struct range_tower_node *
+range_tower_next (const struct range_tower *rs,
+ const struct range_tower_node *node)
+{
+ if (node != NULL)
+ {
+ const struct range_tower_node *next = range_tower_next__ (rs, node);
+ return next != NULL && next->n_ones ? next : NULL;
+ }
+ else
+ return range_tower_first (rs);
+}
+
+/* Returns the node representing the last contiguous region of
+ 1-bits in RS, or a null pointer if RS is empty.
+ Any call to range_tower_set1, range_tower_set0, or
+ range_tower_allocate invalidates the returned node. */
+static inline const struct range_tower_node *
+range_tower_last (const struct range_tower *rs)
+{
+ const struct range_tower_node *node = range_tower_last__ (rs);
+ return node->n_ones ? node : range_tower_prev__(rs, node);
+}
+
+/* If NODE is nonnull, returns the node representing the previous
+ contiguous region of 1-bits in RS following NODE, or a null
+ pointer if NODE is the first region in RS.
+ If NODE is null, returns the last region in RS, as for
+ range_tower_last.
+ Any call to range_tower_set1, range_tower_set0, or
+ range_tower_allocate invalidates the returned node. */
+static inline const struct range_tower_node *
+range_tower_prev (const struct range_tower *rs,
+ const struct range_tower_node *node)
+{
+ return node != NULL ? range_tower_prev__ (rs, node) : range_tower_last (rs);
+}
+
+/* Returns the number of contiguous 1-bits in NODE. */
+static inline unsigned long int
+range_tower_node_get_width (const struct range_tower_node *node)
+{
+ return node->n_ones;
+}
+\f
+/* Internal helper functions. */
+
+/* Returns the range_tower_node corresponding to the given
+ ABT_NODE. Returns a null pointer if ABT_NODE is null. */
+static inline struct range_tower_node *
+range_tower_node_from_abt__ (const struct abt_node *abt_node)
+{
+ return (abt_node
+ ? abt_data (abt_node, struct range_tower_node, abt_node)
+ : NULL);
+}
+
+/* Returns the next range_tower_node in RS after NODE,
+ or a null pointer if NODE is the last node in RS. */
+static inline struct range_tower_node *
+range_tower_next__ (const struct range_tower *rs,
+ const struct range_tower_node *node)
+{
+ return range_tower_node_from_abt__ (abt_next (&rs->abt, &node->abt_node));
+}
+
+/* Returns the first range_tower_node in RS,
+ or a null pointer if RS is empty. */
+static inline struct range_tower_node *
+range_tower_first__ (const struct range_tower *rs)
+{
+ return range_tower_node_from_abt__ (abt_first (&rs->abt));
+}
+
+/* Returns the previous range_tower_node in RS after NODE,
+ or a null pointer if NODE is the first node in RS. */
+static inline struct range_tower_node *
+range_tower_prev__ (const struct range_tower *rs,
+ const struct range_tower_node *node)
+{
+ return range_tower_node_from_abt__ (abt_prev (&rs->abt, &node->abt_node));
+}
+
+/* Returns the last range_tower_node in RS,
+ or a null pointer if RS is empty. */
+static inline struct range_tower_node *
+range_tower_last__ (const struct range_tower *rs)
+{
+ return range_tower_node_from_abt__ (abt_last (&rs->abt));
+}
+
+struct range_tower_node *range_tower_lookup (
+ const struct range_tower *, unsigned long int position,
+ unsigned long int *node_start);
+
+#endif /* libpspp/range-tower.h */
tests/libpspp/llx-test \
tests/libpspp/range-map-test \
tests/libpspp/range-set-test \
+ tests/libpspp/range-tower-test \
tests/libpspp/sparse-array-test \
tests/libpspp/sparse-xarray-test \
tests/libpspp/str-test \
tests_libpspp_range_set_test_CPPFLAGS = $(AM_CPPFLAGS) -DASSERT_LEVEL=10
tests_libpspp_range_set_test_LDADD = src/libpspp/liblibpspp.la gl/libgl.la
+tests_libpspp_range_tower_test_SOURCES = \
+ tests/libpspp/range-tower-test.c
+tests_libpspp_range_tower_test_CPPFLAGS = $(AM_CPPFLAGS) -DASSERT_LEVEL=10
+tests_libpspp_range_tower_test_LDADD = src/libpspp/libpspp.la gl/libgl.la
+
tests_libpspp_str_test_SOURCES = \
tests/libpspp/str-test.c
tests_libpspp_str_test_LDADD = src/libpspp/liblibpspp.la gl/libgl.la
tests/libpspp/llx.at \
tests/libpspp/range-map.at \
tests/libpspp/range-set.at \
+ tests/libpspp/range-tower.at \
tests/libpspp/sparse-array.at \
tests/libpspp/sparse-xarray-test.at \
tests/libpspp/str.at \
tests/valgrind/llx-test \
tests/valgrind/range-map-test \
tests/valgrind/range-set-test \
+ tests/valgrind/range-tower-test \
tests/valgrind/sparse-array-test \
tests/valgrind/sparse-xarray-test \
tests/valgrind/str-test \
--- /dev/null
+/* PSPP - a program for statistical analysis.
+ Copyright (C) 2007, 2009, 2010, 2011 Free Software Foundation, Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* This is a test program for the routines defined in
+ range-tower.c. This test program aims to be as comprehensive as
+ possible. With -DNDEBUG, "gcov -b" should report 100%
+ coverage of lines and branches in range-tower.c routines.
+ (Without -DNDEBUG, branches caused by failed assertions will
+ not be taken.) "valgrind --leak-check=yes
+ --show-reachable=yes" should give a clean report, both with
+ and without -DNDEBUG. */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "libpspp/range-tower.h"
+
+#include <assert.h>
+#include <limits.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "libpspp/compiler.h"
+#include "libpspp/pool.h"
+
+#include "gl/minmax.h"
+#include "gl/xalloc.h"
+\f
+/* Exit with a failure code.
+ (Place a breakpoint on this function while debugging.) */
+static void
+check_die (void)
+{
+ abort ();
+}
+
+/* If OK is not true, prints a message about failure on the
+ current source file and the given LINE and terminates. */
+static void
+check_func (bool ok, int line)
+{
+ if (!ok)
+ {
+ fprintf (stderr, "%s:%d: check failed\n", __FILE__, line);
+ check_die ();
+ }
+}
+
+/* Verifies that EXPR evaluates to true.
+ If not, prints a message citing the calling line number and
+ terminates. */
+#define check(EXPR) check_func ((EXPR), __LINE__)
+\f
+/* A contiguous region. */
+struct region
+ {
+ unsigned long int start; /* Start of region. */
+ unsigned long int end; /* One past the end. */
+ };
+
+/* Number of bits in an unsigned int. */
+#define UINT_BIT (CHAR_BIT * sizeof (unsigned int))
+
+/* Returns the number of contiguous 1-bits in X starting from bit
+ 0.
+ This implementation is designed to be obviously correct, not
+ to be efficient. */
+static int
+count_one_bits (unsigned long int x)
+{
+ int count = 0;
+ while (x & 1)
+ {
+ count++;
+ x >>= 1;
+ }
+ return count;
+}
+
+/* Searches the bits in PATTERN from right to left starting from
+ bit OFFSET for one or more 1-bits. If any are found, sets
+ *START to the bit index of the first and *WIDTH to the number
+ of contiguous 1-bits and returns true. Otherwise, returns
+ false.
+ This implementation is designed to be obviously correct, not
+ to be efficient. */
+static bool
+next_region (unsigned int pattern, unsigned int offset,
+ unsigned long int *start, unsigned long int *width)
+{
+ unsigned int i;
+
+ assert (offset <= UINT_BIT);
+ for (i = offset; i < UINT_BIT; i++)
+ if (pattern & (1u << i))
+ {
+ *start = i;
+ *width = count_one_bits (pattern >> i);
+ return true;
+ }
+ return false;
+}
+
+/* Searches the bits in PATTERN from left to right starting from
+ just beyond bit OFFSET for one or more 1-bits. If any are
+ found, sets *START to the bit index of the first and *WIDTH to
+ the number of contiguous 1-bits and returns true. Otherwise,
+ returns false.
+ This implementation is designed to be obviously correct, not
+ to be efficient. */
+static bool
+prev_region (unsigned int pattern, unsigned int offset,
+ unsigned long int *start, unsigned long int *width)
+{
+ unsigned int i;
+
+ assert (offset <= UINT_BIT);
+ for (i = offset; i-- > 0; )
+ if (pattern & (1u << i))
+ {
+ *start = i;
+ *width = 1;
+ while (i-- > 0 && pattern & (1u << i))
+ {
+ ++*width;
+ --*start;
+ }
+ return true;
+ }
+ return false;
+}
+
+/* Searches the bits in PATTERN from right to left starting from
+ bit OFFSET. Returns the bit index of the first 1-bit found,
+ or ULONG_MAX if none is found. */
+static unsigned long int
+next_1bit (unsigned int pattern, unsigned int offset,
+ unsigned int pattern_offset)
+{
+ for (; offset < UINT_BIT; offset++)
+ if (pattern & (1u << offset))
+ return offset + pattern_offset;
+ return ULONG_MAX;
+}
+
+static void
+print_structure (const struct abt_node *node_)
+{
+ struct range_tower_node *node;
+
+ if (node_ == NULL)
+ return;
+ node = abt_data (node_, struct range_tower_node, abt_node);
+ printf ("%lu+%lu/%d", node->n_zeros, node->n_ones, node->abt_node.level);
+ if (node->abt_node.down[0] || node->abt_node.down[1])
+ {
+ printf ("(");
+ print_structure (node->abt_node.down[0]);
+ printf (",");
+ print_structure (node->abt_node.down[1]);
+ printf (")");
+ }
+}
+
+/* Prints the regions in RT to stdout. */
+static void UNUSED
+print_regions (const struct range_tower *rt)
+{
+ const struct range_tower_node *node;
+
+ printf ("contents:");
+ for (node = range_tower_first__ (rt); node != NULL;
+ node = range_tower_next__ (rt, node))
+ printf (" (%lu,%lu)", node->n_zeros, node->n_ones);
+ printf ("\n");
+ printf ("structure:");
+ print_structure (rt->abt.root);
+ printf ("\n");
+}
+
+static void
+check_tree (const struct abt_node *abt_node, unsigned long int *subtree_width)
+{
+ const struct range_tower_node *node = range_tower_node_from_abt__ (abt_node);
+ unsigned long int left_width, right_width;
+
+ if (node == NULL)
+ {
+ *subtree_width = 0;
+ return;
+ }
+
+ check_tree (node->abt_node.down[0], &left_width);
+ check_tree (node->abt_node.down[1], &right_width);
+
+ *subtree_width = node->n_zeros + node->n_ones + left_width + right_width;
+ check (node->subtree_width == *subtree_width);
+}
+
+/* Checks that the regions in RT match the bits in PATTERN. */
+static void
+check_pattern (const struct range_tower *rt, unsigned int pattern,
+ unsigned long int offset)
+{
+ const struct range_tower_node *node;
+ unsigned long int start, start2, width;
+ unsigned long int tree_width;
+ unsigned long int s1, s2;
+ int i;
+
+ check_tree (rt->abt.root, &tree_width);
+ check (tree_width == ULONG_MAX);
+
+ if (offset > ULONG_MAX - 32)
+ {
+ pattern <<= offset - (ULONG_MAX - 32);
+ offset = ULONG_MAX - 32;
+ }
+
+ for (node = rand () % 2 ? range_tower_first (rt) : range_tower_next (rt, NULL),
+ start = width = 0;
+ next_region (pattern, start + width, &start, &width);
+ node = range_tower_next (rt, node))
+ {
+ unsigned long int node_start;
+ unsigned long int x;
+
+ check (node != NULL);
+ check (range_tower_node_get_start (node) == start + offset);
+ check (range_tower_node_get_end (node) == start + offset + width);
+ check (range_tower_node_get_width (node) == width);
+
+ x = start + offset - node->n_zeros;
+ check (range_tower_lookup (rt, x, &node_start) == node);
+ check (node_start == start + offset - node->n_zeros);
+
+ x = start + offset + width - 1;
+ check (range_tower_lookup (rt, x, &node_start) == node);
+ check (node_start == start + offset - node->n_zeros);
+ }
+ check (node == NULL);
+
+ start = width = 0;
+ RANGE_TOWER_FOR_EACH (node, start2, rt)
+ {
+ check (next_region (pattern, start + width, &start, &width));
+ check (start + offset == start2);
+ check (range_tower_node_get_width (node) == width);
+ }
+ check (!next_region (pattern, start + width, &start, &width));
+
+ for (node = rand () % 2 ? range_tower_last (rt) : range_tower_prev (rt, NULL),
+ start = UINT_BIT;
+ prev_region (pattern, start, &start, &width);
+ node = range_tower_prev (rt, node))
+ {
+ check (node != NULL);
+ check (range_tower_node_get_start (node) == offset + start);
+ check (range_tower_node_get_end (node) == offset + start + width);
+ check (range_tower_node_get_width (node) == width);
+ }
+ check (node == NULL);
+
+ /* Scan from all possible positions, resetting the cache each
+ time, to ensure that we get the correct answers without
+ caching. */
+ for (start = 0; start <= 32; start++)
+ {
+ struct range_tower *nonconst_rt = CONST_CAST (struct range_tower *, rt);
+
+ nonconst_rt->cache_end = 0;
+ s1 = range_tower_scan (rt, offset + start);
+ s2 = next_1bit (pattern, start, offset);
+ check (s1 == s2);
+ }
+
+ /* Scan in forward order to exercise expected cache behavior. */
+ for (s1 = range_tower_scan (rt, 0), s2 = next_1bit (pattern, 0, offset); ;
+ s1 = range_tower_scan (rt, s1 + 1), s2 = next_1bit (pattern, (s2 - offset) + 1, offset))
+ {
+ check (s1 == s2);
+ if (s1 == ULONG_MAX)
+ break;
+ }
+
+ /* Scan in random order to frustrate cache. */
+ for (i = 0; i < 32; i++)
+ {
+ start = rand () % 32;
+ s1 = range_tower_scan (rt, start + offset);
+ s2 = next_1bit (pattern, start, offset);
+ check (s1 == s2);
+ }
+
+ /* Test range_tower_scan() with negative cache. */
+ check (!range_tower_contains (rt, 999));
+ if (offset < 1111)
+ check (range_tower_scan (rt, 1111) == ULONG_MAX);
+
+ /* Check for containment without caching. */
+ for (i = 0; i < UINT_BIT; i++)
+ {
+ struct range_tower *nonconst_rt = CONST_CAST (struct range_tower *, rt);
+ nonconst_rt->cache_end = 0;
+ check (range_tower_contains (rt, i + offset)
+ == ((pattern & (1u << i)) != 0));
+ }
+
+ /* Check for containment with caching. */
+ for (i = 0; i < UINT_BIT; i++)
+ check (range_tower_contains (rt, i + offset)
+ == ((pattern & (1u << i)) != 0));
+
+ check (!range_tower_contains (rt,
+ UINT_BIT + rand () % (ULONG_MAX - UINT_BIT * 2)));
+
+ check (range_tower_is_empty (rt) == (pattern == 0));
+}
+
+/* Creates and returns a range tower that contains regions for the
+ bits tower in PATTERN. */
+static struct range_tower *
+make_pattern (unsigned int pattern, unsigned long int offset)
+{
+ unsigned long int start = 0;
+ unsigned long int width = 0;
+ struct range_tower *rt = range_tower_create_pool (NULL);
+ while (next_region (pattern, start + width, &start, &width))
+ range_tower_set1 (rt, start + offset, width);
+ check_pattern (rt, pattern, offset);
+ return rt;
+}
+
+/* Returns an unsigned int with bits OFS...OFS+CNT (exclusive)
+ tower to 1, other bits tower to 0. */
+static unsigned int
+bit_range (unsigned int ofs, unsigned int cnt)
+{
+ assert (ofs < UINT_BIT);
+ assert (cnt <= UINT_BIT);
+ assert (ofs + cnt <= UINT_BIT);
+
+ return cnt < UINT_BIT ? ((1u << cnt) - 1) << ofs : UINT_MAX;
+}
+\f
+/* Tests setting all possible ranges of 1s into all possible range sets (up to
+ a small maximum number of bits). */
+static void
+test_set1 (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int start, width;
+ int k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (start = 0; start < positions; start++)
+ for (width = 0; width + start <= positions; width++)
+ {
+ unsigned long int offset = k ? ULONG_MAX - positions : 0;
+ struct range_tower *rt, *rt2;
+ unsigned int final_pat;
+
+ rt = make_pattern (init_pat, offset);
+ range_tower_set1 (rt, offset + start, width);
+ final_pat = init_pat | bit_range (start, width);
+ check_pattern (rt, final_pat, offset);
+ rt2 = range_tower_clone (rt, NULL);
+ check_pattern (rt2, final_pat, offset);
+ range_tower_destroy (rt);
+ range_tower_destroy (rt2);
+ }
+}
+
+/* Tests setting all possible ranges of 0s into all possible range sets (up to
+ a small maximum number of bits). */
+static void
+test_set0 (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int start, width, k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (start = 0; start < positions; start++)
+ for (width = 0; start + width <= positions; width++)
+ {
+ unsigned long int offset = k ? ULONG_MAX - positions : 0;
+ struct range_tower *rt;
+ unsigned int final_pat;
+
+ rt = make_pattern (init_pat, offset);
+ range_tower_set0 (rt, offset + start, width);
+ final_pat = init_pat & ~bit_range (start, width);
+ check_pattern (rt, final_pat, offset);
+ range_tower_destroy (rt);
+ }
+}
+
+/* Tests inserting all possible ranges of 0s into all possible range sets (up
+ to a small maximum number of bits). */
+static void
+test_insert0 (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int start, width, k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (start = 0; start < positions; start++)
+ for (width = 0; start + width <= positions; width++)
+ {
+ unsigned long int offset = k ? ULONG_MAX - positions : 0;
+ struct range_tower *rt;
+ unsigned int final_pat;
+
+ rt = make_pattern (init_pat, offset);
+ range_tower_insert0 (rt, offset + start, width);
+ final_pat = init_pat & bit_range (0, start);
+ final_pat |= (init_pat & bit_range (start, positions - start)) << width;
+ check_pattern (rt, final_pat, offset);
+ range_tower_destroy (rt);
+ }
+}
+
+/* Tests inserting all possible ranges of 1s into all possible range sets (up
+ to a small maximum number of bits). */
+static void
+test_insert1 (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int start, width, k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (start = 0; start < positions; start++)
+ for (width = 0; start + width <= positions; width++)
+ {
+ struct range_tower *rt;
+ unsigned int final_pat;
+
+ rt = make_pattern (init_pat, 0);
+ range_tower_insert1 (rt, start, width);
+ final_pat = init_pat & bit_range (0, start);
+ final_pat |= bit_range (start, width);
+ final_pat |= (init_pat & bit_range (start, positions - start)) << width;
+ check_pattern (rt, final_pat, 0);
+ range_tower_destroy (rt);
+ }
+}
+
+/* Tests setting all possible ranges from all possible range sets (up to a
+ small maximum number of bits). */
+static void
+test_delete (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int start, width, k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (start = 0; start < positions; start++)
+ for (width = 0; start + width <= positions; width++)
+ {
+ unsigned long int offset = k ? ULONG_MAX - positions : 0;
+ struct range_tower *rt;
+ unsigned int final_pat;
+
+ rt = make_pattern (init_pat, offset);
+ range_tower_delete (rt, start + offset, width);
+ final_pat = init_pat & bit_range (0, start);
+ final_pat |= (init_pat & (UINT_MAX << (start + width))) >> width;
+ check_pattern (rt, final_pat, offset);
+ range_tower_destroy (rt);
+ }
+}
+
+/* Tests moving all possible ranges (up to a small maximum number of bits). */
+static void
+test_move (void)
+{
+ const int positions = 9;
+ unsigned int init_pat;
+ int new_start, old_start, width, k;
+
+ for (k = 0; k < 2; k++)
+ for (init_pat = 0; init_pat < (1u << positions); init_pat++)
+ for (width = 0; width <= positions; width++)
+ for (new_start = 0; new_start + width <= positions; new_start++)
+ for (old_start = 0; old_start + width <= positions; old_start++)
+ {
+ unsigned long int offset = k ? ULONG_MAX - positions : 0;
+ struct range_tower *rt;
+ unsigned int final_pat;
+
+ if (new_start == old_start || width == 0)
+ final_pat = init_pat;
+ else if (new_start < old_start)
+ {
+ final_pat = init_pat & bit_range (0, new_start);
+ final_pat |= (init_pat & bit_range (old_start, width)) >> (old_start - new_start);
+ final_pat |= (init_pat & bit_range (new_start, old_start - new_start)) << width;
+ final_pat |= init_pat & bit_range (old_start + width, positions - (old_start + width));
+ }
+ else
+ {
+ final_pat = init_pat & bit_range (0, old_start);
+ final_pat |= (init_pat & bit_range (old_start + width, new_start - old_start)) >> width;
+ final_pat |= (init_pat & bit_range (old_start, width)) << (new_start - old_start);
+ final_pat |= init_pat & bit_range (new_start + width, positions - (new_start + width));
+ }
+
+ rt = make_pattern (init_pat, offset);
+ range_tower_move (rt, old_start + offset, new_start + offset,
+ width);
+ check_pattern (rt, final_pat, offset);
+ range_tower_destroy (rt);
+ }
+}
+
+/* Tests freeing a range tower through a pool. */
+static void
+test_pool (void)
+{
+ struct pool *pool;
+ struct range_tower *rt;
+
+ /* Destroy the range tower, then the pool.
+ Makes sure that this doesn't cause a double-free. */
+ pool = pool_create ();
+ rt = range_tower_create_pool (pool);
+ range_tower_set1 (rt, 1, 10);
+ range_tower_destroy (rt);
+ pool_destroy (pool);
+
+ /* Just destroy the pool.
+ Makes sure that this doesn't cause a leak. */
+ pool = pool_create ();
+ rt = range_tower_create_pool (pool);
+ range_tower_set1 (rt, 1, 10);
+ pool_destroy (pool);
+}
+
+/* Tests range_tower_destroy(NULL). */
+static void
+test_destroy_null (void)
+{
+ range_tower_destroy (NULL);
+}
+\f
+/* Main program. */
+
+struct test
+ {
+ const char *name;
+ const char *description;
+ void (*function) (void);
+ };
+
+static const struct test tests[] =
+ {
+ {
+ "set1",
+ "set1",
+ test_set1
+ },
+ {
+ "set0",
+ "set0",
+ test_set0
+ },
+ {
+ "insert0",
+ "insert0",
+ test_insert0
+ },
+ {
+ "insert1",
+ "insert1",
+ test_insert1
+ },
+ {
+ "delete",
+ "delete",
+ test_delete
+ },
+ {
+ "move",
+ "move",
+ test_move
+ },
+ {
+ "pool",
+ "pool",
+ test_pool
+ },
+ {
+ "destroy-null",
+ "destroy null",
+ test_destroy_null
+ },
+ };
+
+enum { N_TESTS = sizeof tests / sizeof *tests };
+
+int
+main (int argc, char *argv[])
+{
+ int i;
+
+ if (argc != 2)
+ {
+ fprintf (stderr, "exactly one argument required; use --help for help\n");
+ return EXIT_FAILURE;
+ }
+ else if (!strcmp (argv[1], "--help"))
+ {
+ printf ("%s: test range tower library\n"
+ "usage: %s TEST-NAME\n"
+ "where TEST-NAME is one of the following:\n",
+ argv[0], argv[0]);
+ for (i = 0; i < N_TESTS; i++)
+ printf (" %s\n %s\n", tests[i].name, tests[i].description);
+ return 0;
+ }
+ else
+ {
+ for (i = 0; i < N_TESTS; i++)
+ if (!strcmp (argv[1], tests[i].name))
+ {
+ tests[i].function ();
+ return 0;
+ }
+
+ fprintf (stderr, "unknown test %s; use --help for help\n", argv[1]);
+ return EXIT_FAILURE;
+ }
+}
--- /dev/null
+AT_BANNER([range tower library])
+
+m4_define([CHECK_RANGE_TOWER],
+ [AT_SETUP([range-tower -- $1])
+ AT_CHECK([range-tower-test $1])
+ AT_CLEANUP])
+
+CHECK_RANGE_TOWER([set1])
+CHECK_RANGE_TOWER([set0])
+CHECK_RANGE_TOWER([insert0])
+CHECK_RANGE_TOWER([insert1])
+CHECK_RANGE_TOWER([delete])
+CHECK_RANGE_TOWER([move])
+CHECK_RANGE_TOWER([pool])
+CHECK_RANGE_TOWER([destroy-null])