+ for (node = rand () % 2 ? range_set_last (rs) : range_set_prev (rs, NULL),
+ start = UINT_BIT;
+ prev_region (pattern, start, &start, &width);
+ node = range_set_prev (rs, node))
+ {
+ check (node != NULL);
+ check (range_set_node_get_start (node) == start);
+ check (range_set_node_get_end (node) == start + width);
+ check (range_set_node_get_width (node) == width);
+ }
+ check (node == NULL);
+
+ /* Scan from all possible positions, resetting the cache each
+ time, to ensure that we get the correct answers without
+ caching. */
+ for (start = 0; start <= 32; start++)
+ {
+ struct range_set *nonconst_rs = (struct range_set *) rs;
+ nonconst_rs->cache_end = 0;
+ s1 = range_set_scan (rs, start);
+ s2 = next_1bit (pattern, start);
+ check (s1 == s2);
+ }
+
+ /* Scan in forward order to exercise expected cache behavior. */
+ for (s1 = range_set_scan (rs, 0), s2 = next_1bit (pattern, 0); ;
+ s1 = range_set_scan (rs, s1 + 1), s2 = next_1bit (pattern, s2 + 1))
+ {
+ check (s1 == s2);
+ if (s1 == ULONG_MAX)
+ break;
+ }
+
+ /* Scan in random order to frustrate cache. */
+ for (i = 0; i < 32; i++)
+ {
+ start = rand () % 32;
+ s1 = range_set_scan (rs, start);
+ s2 = next_1bit (pattern, start);
+ check (s1 == s2);
+ }
+
+ /* Test range_set_scan() with negative cache. */
+ check (!range_set_contains (rs, 999));
+ check (range_set_scan (rs, 1111) == ULONG_MAX);
+