1 /* pspp - a program for statistical analysis.
2 Copyright (C) 2007, 2009, 2011 Free Software Foundation, Inc.
4 This program is free software: you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation, either version 3 of the License, or
7 (at your option) any later version.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>. */
17 /* Bitmap, implemented as a balanced binary tree. */
19 /* If you add routines in this file, please add a corresponding
20 test to range-tower-test.c. This test program should achieve
21 100% coverage of lines and branches in this code, as reported
26 #include "libpspp/range-tower.h"
31 #include "libpspp/assertion.h"
32 #include "libpspp/compiler.h"
33 #include "libpspp/pool.h"
35 #include "gl/minmax.h"
36 #include "gl/xalloc.h"
38 static void reaugment_range_tower_node (struct abt_node *, const void *aux);
39 static void delete_node (struct range_tower *, struct range_tower_node *);
41 static void destroy_pool (void *);
44 print_structure (const struct abt_node *node_)
46 struct range_tower_node *node;
50 node = abt_data (node_, struct range_tower_node, abt_node);
51 printf ("%lu+%lu/%d", node->n_zeros, node->n_ones, node->abt_node.level);
52 if (node->abt_node.down[0] || node->abt_node.down[1])
55 print_structure (node->abt_node.down[0]);
57 print_structure (node->abt_node.down[1]);
62 /* Prints the regions in RT to stdout. */
64 print_regions (const char *title, const struct range_tower *rt)
66 const struct range_tower_node *node;
68 printf ("%s:", title);
69 for (node = range_tower_first__ (rt); node != NULL;
70 node = range_tower_next__ (rt, node))
71 printf (" (%lu,%lu)", node->n_zeros, node->n_ones);
73 printf ("structure:");
74 print_structure (rt->abt.root);
78 static struct range_tower_node *
79 range_tower_node_from_abt_node (const struct abt_node *abt_node)
81 return abt_data (abt_node, struct range_tower_node, abt_node);
84 /* Returns the total width (zeros and ones) of the nodes in the subtree rooted
85 at P, or 0 if P is null. */
86 static unsigned long int
87 subtree_width (const struct abt_node *p)
89 return p != NULL ? range_tower_node_from_abt_node (p)->subtree_width : 0;
92 /* Returns the position of the first 1-bit in NODE.
94 The performance of this function is O(lg n) in the number of nodes in the
95 range tower. It is often possible to avoid calling this function, either by
96 taking advantage of the NODE_START parameter to tower_lookup or by
97 incrementally keeping track of height while iterating through a tower. In
98 the former case the asymptotic performance is no different, since
99 tower_lookup is also O(lg n), but in the latter case performance improves
100 from O(lg n) to O(1). */
102 range_tower_node_get_start (const struct range_tower_node *node)
104 const struct abt_node *p = &node->abt_node;
105 unsigned long start = subtree_width (p->down[0]) + range_tower_node_from_abt_node (p)->n_zeros;
106 while (p->up != NULL)
108 if (p == p->up->down[1])
110 const struct range_tower_node *up
111 = range_tower_node_from_abt_node (p->up);
112 start += subtree_width (p->up->down[0]) + up->n_zeros + up->n_ones;
119 /* Returns one past the position of the last 1-bit in NODE.
121 Like range_tower_node_get_start(), the performance of this function is O(lg
122 n) in the number of nodes in the range tower. */
124 range_tower_node_get_end (const struct range_tower_node *node)
126 return range_tower_node_get_start (node) + node->n_ones;
129 /* Creates and returns a new, empty range tower. */
131 range_tower_create (void)
133 return range_tower_create_pool (NULL);
136 static struct range_tower *
137 range_tower_create_pool__ (struct pool *pool)
139 struct range_tower *rt = xmalloc (sizeof *rt);
143 pool_register (pool, destroy_pool, rt);
145 abt_init (&rt->abt, NULL, reaugment_range_tower_node, NULL);
151 /* Creates and returns a new, empty range tower in the given POOL. */
153 range_tower_create_pool (struct pool *pool)
155 struct range_tower_node *node;
156 struct range_tower *rt;
158 rt = range_tower_create_pool__ (pool);
160 node = xmalloc (sizeof *node);
161 node->n_zeros = ULONG_MAX;
163 abt_insert_after (&rt->abt, NULL, &node->abt_node);
168 /* Creates and returns a clone of OLD range tower in the given POOL
169 (which may be null). */
171 range_tower_clone (const struct range_tower *old, struct pool *pool)
173 const struct range_tower_node *old_node;
174 struct abt_node *prev_node;
175 struct range_tower *new;
177 new = range_tower_create_pool__ (pool);
179 for (old_node = range_tower_first__ (old); old_node != NULL;
180 old_node = range_tower_next__ (old, old_node))
182 struct range_tower_node *new_node;
184 new_node = xmalloc (sizeof *new_node);
185 new_node->n_zeros = old_node->n_zeros;
186 new_node->n_ones = old_node->n_ones;
188 abt_insert_after (&new->abt, prev_node, &new_node->abt_node);
189 prev_node = &new_node->abt_node;
194 /* Destroys range tower RT. */
196 range_tower_destroy (struct range_tower *rt)
200 if (rt->pool != NULL)
201 pool_unregister (rt->pool, rt);
202 while (!abt_is_empty (&rt->abt))
203 delete_node (rt, range_tower_first__ (rt));
208 /* Sets the WIDTH bits starting at START in RT to 1-bits. */
210 range_tower_set1 (struct range_tower *rt,
211 unsigned long int start, unsigned long int width)
213 struct range_tower_node *node;
214 unsigned long int node_start;
216 assert (width == 0 || start + width - 1 >= start);
218 node = range_tower_lookup (rt, start, &node_start);
221 unsigned long int node_ofs = start - node_start;
223 if (node_ofs >= node->n_zeros)
225 /* There are already some 1-bits here, so skip them. */
226 unsigned long ones_left = (node->n_zeros + node->n_ones) - node_ofs;
227 if (width <= ones_left)
232 node_start += node->n_zeros + node->n_ones;
234 node = range_tower_next__ (rt, node);
237 /* Invalidate cache. */
244 struct range_tower_node *prev = range_tower_prev__ (rt, node);
245 if (width >= node->n_zeros)
247 /* All zeros in NODE are replaced by ones. Change NODE's
248 entire width into PREV's trailing ones, e.g. 00001111
249 00001111 becomes 0000111111111111. */
250 int node_width = node->n_zeros + node->n_ones;
251 abt_delete (&rt->abt, &node->abt_node);
252 prev->n_ones += node_width;
253 abt_reaugmented (&rt->abt, &prev->abt_node);
254 if (width <= node_width)
257 /* Go around again with NODE replaced by PREV's new
261 node = range_tower_next__ (rt, prev);
262 node_start += node_width;
266 /* Leading zeros in NODE change into trailing ones in PREV,
267 but trailing zeros in NODE remain, e.g. 00001111 00001111
268 becomes 0000111111 001111. */
269 node->n_zeros -= width;
270 abt_reaugmented (&rt->abt, &node->abt_node);
272 prev->n_ones += width;
273 abt_reaugmented (&rt->abt, &prev->abt_node);
279 if (width >= node->n_zeros)
281 /* All zeros in NODE are replaced by ones, e.g. 00001111
283 node->n_ones += node->n_zeros;
285 if (width <= node->n_ones)
288 start += node->n_ones;
289 node_start += node->n_ones;
290 width -= node->n_ones;
291 node = range_tower_next__ (rt, node);
295 /* Leading zeros in NODE (which starts at offset 0) are
296 replaced by ones, but some zeros remain. This requires a
297 node split, e.g. 00001111 becomes 11 001111. */
298 struct range_tower_node *new_node;
300 node->n_zeros -= width;
301 abt_reaugmented (&rt->abt, &node->abt_node);
303 new_node = xmalloc (sizeof *new_node);
304 new_node->n_zeros = 0;
305 new_node->n_ones = width;
306 abt_insert_before (&rt->abt, &node->abt_node,
307 &new_node->abt_node);
314 unsigned long int zeros_left = node->n_zeros - node_ofs;
315 if (width >= zeros_left)
317 /* Trailing zeros in NODE are replaced by ones, but leading
318 zeros remain, e.g. 00001111 becomes 00111111. */
319 node->n_zeros -= zeros_left;
320 node->n_ones += zeros_left;
321 if (width <= node->n_ones)
323 start += node->n_ones;
324 width -= node->n_ones;
325 node_start += node->n_zeros + node->n_ones;
326 node = range_tower_next__ (rt, node);
330 /* Zeros that are neither leading or trailing turn into ones.
331 Split the node into two nodes, e.g. 00001111 becomes 011
333 struct range_tower_node *new_node;
335 new_node = xmalloc (sizeof *new_node);
336 new_node->n_ones = node->n_ones;
337 new_node->n_zeros = zeros_left - width;
339 node->n_zeros = node_ofs;
340 node->n_ones = width;
341 abt_reaugmented (&rt->abt, &node->abt_node);
343 abt_insert_after (&rt->abt, &node->abt_node,
344 &new_node->abt_node);
351 /* Sets the WIDTH bits starting at START in RT to 0-bits. */
353 range_tower_set0 (struct range_tower *rt,
354 unsigned long int start, unsigned long int width)
356 struct range_tower_node *node;
357 unsigned long int node_start;
359 assert (width == 0 || start + width - 1 >= start);
361 node = range_tower_lookup (rt, start, &node_start);
364 unsigned long int node_ofs = start - node_start;
366 if (node_ofs < node->n_zeros)
368 /* Deleting zeros is a no-op, so skip them. */
369 unsigned long zeros_left = node->n_zeros - node_ofs;
370 if (zeros_left >= width)
372 /* We are deleting only existing zeros. Nothing to do. */
378 node_ofs = node->n_zeros;
383 if (node_ofs == node->n_zeros)
385 if (node->n_ones > width)
387 /* DELTA leading ones within NODE turn into zeros, but some ones
388 remain, e.g. 00001111 becomes 00111111. No reaugmentation
389 because n_zeros + n_ones doesn't change. */
390 node->n_zeros += width;
391 node->n_ones -= width;
396 /* All ones in NODE turn into zeros, so merge NODE with the
397 following node, e.g. 00001111 00001111 becomes
398 0000000000001111, and then do it again with the merged
400 unsigned long int next_zeros, next_ones;
401 struct range_tower_node *next;
403 next = range_tower_next__ (rt, node);
406 node->n_zeros += node->n_ones;
411 next_zeros = next->n_zeros;
412 next_ones = next->n_ones;
413 abt_delete (&rt->abt, &next->abt_node);
415 node->n_zeros += node->n_ones + next_zeros;
416 node->n_ones = next_ones;
417 abt_reaugmented (&rt->abt, &node->abt_node);
420 else if (node_ofs + width >= node->n_zeros + node->n_ones)
422 /* Trailing ones in NODE turn into zeros, but leading ones remain,
423 e.g. 000011{11} 00001111 becomes 000011 {00}00001111. Give the
424 trailing ones to the next node as zeros and go around again with
426 struct range_tower_node *next;
427 unsigned long int delta;
429 delta = node->n_ones - (node_ofs - node->n_zeros);
430 node->n_ones -= delta;
431 abt_reaugmented (&rt->abt, &node->abt_node);
433 next = range_tower_next__ (rt, node);
436 struct range_tower_node *new_node;
438 new_node = xmalloc (sizeof *new_node);
439 new_node->n_zeros = delta;
440 new_node->n_ones = 0;
442 abt_insert_before (&rt->abt, NULL, &new_node->abt_node);
446 next->n_zeros += delta;
447 abt_reaugmented (&rt->abt, &next->abt_node);
449 node_start += node->n_zeros + node->n_ones;
455 /* Ones that are neither leading or trailing turn into zeros,
456 e.g. 00001111 becomes 00001 001. Split the node into two nodes
458 unsigned long int end = start + width;
459 struct range_tower_node *new_node;
461 new_node = xmalloc (sizeof *new_node);
462 new_node->n_zeros = width;
463 new_node->n_ones = (node_start + node->n_zeros + node->n_ones) - end;
465 node->n_ones = node_ofs - node->n_zeros;
466 abt_reaugmented (&rt->abt, &node->abt_node);
468 abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
475 range_tower_delete__ (struct range_tower *rt,
476 unsigned long int start, unsigned long int width)
478 struct range_tower_node *node;
479 unsigned long int node_start;
482 node = range_tower_lookup (rt, start, &node_start);
485 unsigned long int node_ofs = start - node_start;
487 if (node_ofs < node->n_zeros)
489 if (node_ofs + width < node->n_zeros)
491 node->n_zeros -= width;
492 abt_reaugmented (&rt->abt, &node->abt_node);
495 else if (node_ofs > 0)
497 width -= node->n_zeros - node_ofs;
498 node->n_zeros = node_ofs;
499 abt_reaugmented (&rt->abt, &node->abt_node);
502 /* Continue with 1-bits. */
504 else if (width < node->n_zeros + node->n_ones)
506 struct range_tower_node *prev = range_tower_prev__ (rt, node);
507 unsigned long int ones_left;
509 ones_left = (node->n_zeros + node->n_ones) - width;
512 abt_delete (&rt->abt, &node->abt_node);
513 prev->n_ones += ones_left;
514 abt_reaugmented (&rt->abt, &prev->abt_node);
519 node->n_ones = ones_left;
520 abt_reaugmented (&rt->abt, &node->abt_node);
526 /* Delete entire node. */
527 struct range_tower_node *next = range_tower_next__ (rt, node);
529 width -= node->n_zeros + node->n_ones;
530 abt_delete (&rt->abt, &node->abt_node);
539 if (node_ofs + width < node->n_zeros + node->n_ones)
541 node->n_ones -= width;
542 abt_reaugmented (&rt->abt, &node->abt_node);
545 else if (node_ofs > node->n_zeros)
547 unsigned long int ones_ofs = node_ofs - node->n_zeros;
548 width -= node->n_ones - ones_ofs;
549 node->n_ones = ones_ofs;
550 abt_reaugmented (&rt->abt, &node->abt_node);
553 /* continue with next node */
554 node_start += node->n_zeros + node->n_ones;
555 node = range_tower_next__ (rt, node);
559 /* Merge with next node */
560 struct range_tower_node *next = range_tower_next__ (rt, node);
563 unsigned long int next_zeros = next->n_zeros;
564 unsigned long int next_ones = next->n_ones;
566 abt_delete (&rt->abt, &next->abt_node);
568 width -= node->n_ones;
570 node->n_zeros += next_zeros;
571 node->n_ones = next_ones;
572 abt_reaugmented (&rt->abt, &node->abt_node);
580 abt_reaugmented (&rt->abt, &node->abt_node);
588 range_tower_delete (struct range_tower *rt,
589 unsigned long int start, unsigned long int width)
591 struct range_tower_node *node;
596 assert (start + width - 1 >= start);
598 range_tower_delete__ (rt, start, width);
600 node = range_tower_last__ (rt);
601 if (node != NULL && node->n_ones == 0)
603 node->n_zeros += width;
604 abt_reaugmented (&rt->abt, &node->abt_node);
608 struct range_tower_node *new_node;
610 new_node = xmalloc (sizeof *new_node);
611 new_node->n_zeros = width;
612 new_node->n_ones = 0;
614 abt_insert_before (&rt->abt, NULL, &new_node->abt_node);
618 static struct range_tower_node *
619 range_tower_insert0__ (struct range_tower *rt, struct range_tower_node *node,
620 unsigned long int *node_startp,
621 unsigned long int start, unsigned long int width)
623 unsigned long int node_ofs = start - *node_startp;
625 if (node_ofs <= node->n_zeros)
627 /* 00+00+001111 => 0000001111. */
628 node->n_zeros += width;
629 abt_reaugmented (&rt->abt, &node->abt_node);
635 /* 000011+00+11 => 000011 0011. */
636 struct range_tower_node *new_node;
638 new_node = xmalloc (sizeof *new_node);
639 new_node->n_zeros = width;
640 new_node->n_ones = node->n_zeros + node->n_ones - node_ofs;
642 node->n_ones -= new_node->n_ones;
643 abt_reaugmented (&rt->abt, &node->abt_node);
644 abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
646 *node_startp += node->n_zeros + node->n_ones;
652 range_tower_insert0 (struct range_tower *rt,
653 unsigned long int start, unsigned long int width)
658 assert (width == 0 || start + width - 1 >= start);
660 if (start + width == ULONG_MAX)
661 range_tower_set0 (rt, start, width);
664 struct range_tower_node *node;
665 unsigned long int node_start;
667 range_tower_delete__ (rt, ULONG_MAX - width, width);
669 node = range_tower_lookup (rt, start, &node_start);
670 range_tower_insert0__ (rt, node, &node_start, start, width);
674 static struct range_tower_node *
675 range_tower_insert1__ (struct range_tower *rt, struct range_tower_node *node,
676 unsigned long int *node_startp,
677 unsigned long int start, unsigned long int width)
680 unsigned long int node_start = *node_startp;
681 unsigned long int node_ofs = start - node_start;
683 if (node_ofs >= node->n_zeros)
685 node->n_ones += width;
686 abt_reaugmented (&rt->abt, &node->abt_node);
689 else if (node_ofs == 0 && node_start > 0)
691 struct range_tower_node *prev = range_tower_prev__ (rt, node);
693 prev->n_ones += width;
694 abt_reaugmented (&rt->abt, &prev->abt_node);
696 *node_startp += width;
701 /* 00001111 => 0+11+0001111 => 011 0001111 */
702 struct range_tower_node *new_node;
704 new_node = xmalloc (sizeof *new_node);
705 new_node->n_zeros = node->n_zeros - node_ofs;
706 new_node->n_ones = node->n_ones;
708 node->n_zeros = node_ofs;
709 node->n_ones = width;
710 abt_reaugmented (&rt->abt, &node->abt_node);
712 abt_insert_after (&rt->abt, &node->abt_node, &new_node->abt_node);
714 *node_startp += node->n_zeros + node->n_ones;
720 range_tower_insert1 (struct range_tower *rt,
721 unsigned long int start, unsigned long int width)
723 struct range_tower_node *node;
724 unsigned long int node_start;
729 range_tower_delete__ (rt, ULONG_MAX - width, width);
731 assert (width == 0 || start + width - 1 >= start);
733 node = range_tower_lookup (rt, start, &node_start);
734 range_tower_insert1__ (rt, node, &node_start, start, width);
738 range_tower_move (struct range_tower *rt,
739 unsigned long int old_start,
740 unsigned long int new_start,
741 unsigned long int width)
743 unsigned long int node_start;
746 if (width == 0 || old_start == new_start)
749 assert (old_start + width - 1 >= old_start);
750 assert (new_start + width - 1 >= new_start);
755 struct range_tower_node *node;
756 unsigned long int node_ofs;
757 unsigned long int zeros, ones;
759 node = range_tower_lookup (rt, old_start, &node_start);
760 node_ofs = old_start - node_start;
762 if (node_ofs >= node->n_zeros)
764 unsigned long int max_ones;
767 max_ones = (node->n_zeros + node->n_ones) - node_ofs;
768 ones = MIN (width, max_ones);
772 unsigned long int max_zeros;
774 max_zeros = node->n_zeros - node_ofs;
775 zeros = MIN (width, max_zeros);
777 ones = MIN (width - zeros, node->n_ones);
782 node->n_zeros -= zeros;
783 node->n_ones -= ones;
784 abt_reaugmented (&rt->abt, &node->abt_node);
786 if (node->n_zeros == 0)
788 if (node->n_ones == 0)
789 abt_delete (&rt->abt, &node->abt_node);
790 else if (node_start > 0)
792 /* Merge with previous. */
793 unsigned long int n_ones = node->n_ones;
794 struct range_tower_node *prev = range_tower_prev__ (rt, node);
796 abt_delete (&rt->abt, &node->abt_node);
797 prev->n_ones += n_ones;
798 abt_reaugmented (&rt->abt, &prev->abt_node);
801 else if (node->n_ones == 0)
803 struct range_tower_node *next = range_tower_next__ (rt, node);
806 /* Merge with next. */
807 unsigned long int n_zeros = node->n_zeros;
809 abt_delete (&rt->abt, &node->abt_node);
810 next->n_zeros += n_zeros;
811 abt_reaugmented (&rt->abt, &next->abt_node);
815 if (new_start < old_start)
817 node = range_tower_lookup (rt, new_start, &node_start);
820 node = range_tower_insert0__ (rt, node, &node_start,
828 node = range_tower_insert1__ (rt, node, &node_start,
836 unsigned long int remaining = width - (zeros + ones);
838 if (new_start + remaining < ULONG_MAX - (zeros + ones))
840 node = range_tower_lookup (rt, new_start + remaining,
844 node = range_tower_insert0__ (rt, node, &node_start,
845 new_start + remaining, zeros);
851 node = range_tower_insert1__ (rt, node, &node_start,
852 new_start + remaining, ones);
858 node = range_tower_last__ (rt);
863 struct range_tower_node *new_node;
865 new_node = xmalloc (sizeof *new_node);
866 new_node->n_zeros = zeros;
867 new_node->n_ones = 0;
869 abt_insert_after (&rt->abt, &node->abt_node,
870 &new_node->abt_node);
872 node_start += node->n_zeros + node->n_ones;
877 node->n_zeros += zeros;
878 abt_reaugmented (&rt->abt, &node->abt_node);
883 node->n_ones += ones;
884 abt_reaugmented (&rt->abt, &node->abt_node);
887 new_start += zeros + ones;
890 width -= zeros + ones;
895 /* Returns true if there is a 1-bit at the given POSITION in RT,
898 range_tower_contains (const struct range_tower *rt_, unsigned long int position)
900 struct range_tower *rt = CONST_CAST (struct range_tower *, rt_);
901 if (position >= rt->cache_end || position < rt->cache_start)
903 struct range_tower_node *node;
904 unsigned long int node_start;
906 node = range_tower_lookup (rt, position, &node_start);
907 if (position < node_start + node->n_zeros)
909 rt->cache_start = node_start;
910 rt->cache_end = node_start + node->n_zeros;
911 rt->cache_value = false;
915 rt->cache_start = node_start + node->n_zeros;
916 rt->cache_end = rt->cache_start + node->n_ones;
917 rt->cache_value = true;
920 return rt->cache_value;
923 /* Returns the smallest position of a 1-bit greater than or
924 equal to START. Returns ULONG_MAX if there is no 1-bit with
925 position greater than or equal to START. */
927 range_tower_scan (const struct range_tower *rt_, unsigned long int start)
929 struct range_tower *rt = CONST_CAST (struct range_tower *, rt_);
931 if (start < rt->cache_end && start >= rt->cache_start && rt->cache_value)
934 if (start != ULONG_MAX)
936 struct range_tower_node *node;
937 unsigned long int node_start;
939 node = range_tower_lookup (rt, start, &node_start);
942 rt->cache_start = node_start + node->n_zeros;
943 rt->cache_end = rt->cache_start + node->n_ones;
944 rt->cache_value = true;
945 return MAX (start, rt->cache_start);
949 rt->cache_start = node_start;
950 rt->cache_end = ULONG_MAX;
951 rt->cache_value = false;
957 /* Recalculates the subtree_width of NODE based on its LEFT and
958 RIGHT children's "subtree_width"s. */
960 reaugment_range_tower_node (struct abt_node *node_, const void *aux UNUSED)
962 struct range_tower_node *node = range_tower_node_from_abt_node (node_);
964 node->subtree_width = node->n_zeros + node->n_ones;
965 if (node->abt_node.down[0] != NULL)
967 struct range_tower_node *left;
969 left = range_tower_node_from_abt_node (node->abt_node.down[0]);
970 node->subtree_width += left->subtree_width;
972 if (node->abt_node.down[1] != NULL)
974 struct range_tower_node *right;
976 right = range_tower_node_from_abt_node (node->abt_node.down[1]);
977 node->subtree_width += right->subtree_width;
981 /* Deletes NODE from RT and frees it. */
983 delete_node (struct range_tower *rt, struct range_tower_node *node)
985 abt_delete (&rt->abt, &node->abt_node);
989 struct range_tower_node *
990 range_tower_lookup (const struct range_tower *rt, unsigned long int position,
991 unsigned long int *node_start)
993 const struct range_tower_node *node;
994 const struct abt_node *abt_node;
996 abt_node = rt->abt.root;
997 node = range_tower_node_from_abt_node (abt_node);
998 *node_start = subtree_width (abt_node->down[0]);
1001 unsigned long int left_width = subtree_width (abt_node->down[0]);
1003 if (position < left_width)
1005 abt_node = abt_node->down[0];
1006 *node_start -= left_width - subtree_width (abt_node->down[0]);
1010 unsigned long int node_width = node->n_zeros + node->n_ones;
1012 position -= left_width;
1013 if (position < node_width)
1014 return CONST_CAST (struct range_tower_node *, node);
1016 position -= node_width;
1017 abt_node = abt_node->down[1];
1018 left_width = subtree_width (abt_node->down[0]);
1019 *node_start += node_width + left_width;
1021 node = range_tower_node_from_abt_node (abt_node);
1025 /* Destroys range tower RT.
1026 Helper function for range_tower_create_pool. */
1028 destroy_pool (void *rt_)
1030 struct range_tower *rt = rt_;
1032 range_tower_destroy (rt);