}
}
}
+
+/* Returns the smallest position of a 1-bit greater than or
+ equal to START. Returns ULONG_MAX if there is no 1-bit with
+ position greater than or equal to START. */
+unsigned long int
+range_set_scan (const struct range_set *rs_, unsigned long int start)
+{
+ struct range_set *rs = (struct range_set *) rs_;
+ unsigned long int retval = ULONG_MAX;
+ struct bt_node *bt_node;
+
+ if (start < rs->cache_end && start >= rs->cache_start && rs->cache_value)
+ return start;
+ bt_node = rs->bt.root;
+ while (bt_node != NULL)
+ {
+ struct range_set_node *node = range_set_node_from_bt__ (bt_node);
+ if (start < node->start)
+ {
+ retval = node->start;
+ bt_node = node->bt_node.down[0];
+ }
+ else if (start >= node->end)
+ bt_node = node->bt_node.down[1];
+ else
+ {
+ rs->cache_start = node->start;
+ rs->cache_end = node->end;
+ rs->cache_value = true;
+ return start;
+ }
+ }
+ return retval;
+}
\f
/* Compares `range_set_node's A and B and returns a strcmp-like
return value. */
bool range_set_allocate_fully (struct range_set *, unsigned long int request,
unsigned long int *start);
bool range_set_contains (const struct range_set *, unsigned long int position);
+unsigned long int range_set_scan (const struct range_set *,
+ unsigned long int start);
static inline bool range_set_is_empty (const struct range_set *);
return false;
}
+/* Searches the bits in PATTERN from right to left starting from
+ bit OFFSET. Returns the bit index of the first 1-bit found,
+ or ULONG_MAX if none is found. */
+static unsigned long int
+next_1bit (unsigned int pattern, unsigned int offset)
+{
+ for (; offset < UINT_BIT; offset++)
+ if (pattern & (1u << offset))
+ return offset;
+ return ULONG_MAX;
+}
+
/* Prints the regions in RS to stdout. */
static void UNUSED
print_regions (const struct range_set *rs)
{
const struct range_set_node *node;
unsigned long int start, width;
+ unsigned long int s1, s2;
int i;
for (node = rand () % 2 ? range_set_first (rs) : range_set_next (rs, NULL),
}
check (node == NULL);
+ /* Scan from all possible positions, resetting the cache each
+ time, to ensure that we get the correct answers without
+ caching. */
+ for (start = 0; start <= 32; start++)
+ {
+ struct range_set *nonconst_rs = (struct range_set *) rs;
+ nonconst_rs->cache_end = 0;
+ s1 = range_set_scan (rs, start);
+ s2 = next_1bit (pattern, start);
+ check (s1 == s2);
+ }
+
+ /* Scan in forward order to exercise expected cache behavior. */
+ for (s1 = range_set_scan (rs, 0), s2 = next_1bit (pattern, 0); ;
+ s1 = range_set_scan (rs, s1 + 1), s2 = next_1bit (pattern, s2 + 1))
+ {
+ check (s1 == s2);
+ if (s1 == ULONG_MAX)
+ break;
+ }
+
+ /* Scan in random order to frustrate cache. */
+ for (i = 0; i < 32; i++)
+ {
+ start = rand () % 32;
+ s1 = range_set_scan (rs, start);
+ s2 = next_1bit (pattern, start);
+ check (s1 == s2);
+ }
+
+ /* Test range_set_scan() with negative cache. */
+ check (!range_set_contains (rs, 999));
+ check (range_set_scan (rs, 1111) == ULONG_MAX);
+
for (i = 0; i < UINT_BIT; i++)
check (range_set_contains (rs, i) == ((pattern & (1u << i)) != 0));
check (!range_set_contains (rs,