vers_vecs/trees/bp/mod.rs
1//! A succinct tree data structure backed by the balanced parenthesis representation.
2//! The tree supports navigation operations between parent, child, and sibling nodes in `O(log n)`
3//! time, as well as subtree size, level-order, and ancestor queries in `O(log n)` time.
4//! The tree is succinct (ideally sublinear space overhead) and pointer-less.
5
6use crate::bit_vec::fast_rs_vec::SelectIntoIter;
7use crate::trees::mmt::MinMaxTree;
8use crate::trees::{IsAncestor, LevelTree, SubtreeSize, Tree};
9use crate::{BitVec, RsVec};
10use std::cmp::{max, min};
11use std::iter::FusedIterator;
12
13/// The default block size for the tree, used in several const generics
14const DEFAULT_BLOCK_SIZE: usize = 512;
15
16const OPEN_PAREN: u64 = 1;
17const CLOSE_PAREN: u64 = 0;
18
19mod builder;
20// re-export the builders toplevel
21pub use builder::BpBuilder;
22
23#[cfg(feature = "bp_u16_lookup")]
24mod lookup;
25#[cfg(feature = "bp_u16_lookup")]
26use lookup::{process_block_bwd, process_block_fwd, LOOKUP_BLOCK_SIZE};
27
28#[cfg(not(feature = "bp_u16_lookup"))]
29mod lookup_query;
30#[cfg(not(feature = "bp_u16_lookup"))]
31use lookup_query::{process_block_bwd, process_block_fwd, LOOKUP_BLOCK_SIZE};
32
33/// A succinct tree data structure based on balanced parenthesis expressions.
34/// A tree with `n` nodes is encoded in a bit vector using `2n` bits plus the rank/select overhead
35/// of the [`RsVec`] implementation.
36/// Additionally, a small pointerless heap data structure stores
37/// additional meta information required to perform most tree operations.
38///
39/// The tree is thus pointer-less and succinct.
40/// It supports tree navigation operations between parent, child, and sibling nodes, both in
41/// depth-first search order and in level order.
42/// All operations run in `O(log n)` time with small overheads.
43///
44/// ## Lookup Table
45/// The tree internally uses a lookup table for subqueries on blocks of bits.
46/// The lookup table requires 4 KiB of memory and is compiled into the binary.
47/// If the `bp_u16_lookup` feature is enabled, a larger lookup table is used, which requires 128 KiB of
48/// memory, but answers queries faster.
49///
50/// ## Block Size
51/// The tree has a block size of 512 bits by default, which can be changed by setting the
52/// `BLOCK_SIZE` generic parameter.
53/// This block size is expected to be a good choice for most applications,
54/// as it will fit a cache line.
55///
56/// If you want to tune the parameter,
57/// the block size should be chosen based on the expected size of the tree and the available memory.
58/// Smaller block sizes increase the size of the supporting data structure but reduce the time
59/// complexity of some operations by a constant amount.
60/// Larger block sizes are best combined with the `bp_u16_lookup` feature to keep the query time
61/// low.
62/// In any case, benchmarking for the specific use case is recommended for tuning.
63///
64/// ## Unbalanced Parentheses
65/// The tree is implemented in a way to theoretically support unbalanced parenthesis expressions
66/// (which encode invalid trees) without panicking.
67/// However, some operations may behave erratically if the parenthesis expression isn't balanced.
68/// Generally, operations specify if they require a balanced tree.
69///
70/// The results of the operations are unspecified,
71/// meaning no guarantees are made about the stability of the results across versions
72/// (except the operations not panicking).
73/// However, for research purposes, this behavior can be useful and should yield expected results
74/// in most cases.
75///
76/// Only the basic operations like [`fwd_search`] and [`bwd_search`],
77/// as well as the tree navigation operations
78/// (defined by the traits [`Tree`], [`IsAncestor`], [`LevelTree`], and [`SubtreeSize`]),
79/// are included in this guarantee.
80/// Additional operations like iterators may panic if the tree is unbalanced (this is documented per
81/// operation).
82///
83/// # Examples
84///
85/// The high-level approach to building a tree is to use the [`BpBuilder`] to construct the tree
86/// using depth-first traversal of all its nodes.
87/// ```rust
88/// # #![allow(long_running_const_eval)] // for some reason this is needed for test cases
89/// use vers_vecs::{BitVec, BpBuilder, BpTree, TreeBuilder, Tree};
90///
91/// let mut builder = BpBuilder::<512>::new();
92///
93/// // build the tree by depth-first traversal
94/// builder.enter_node();
95/// builder.enter_node();
96/// builder.enter_node();
97/// builder.leave_node();
98/// builder.enter_node();
99/// builder.leave_node();
100/// builder.leave_node();
101/// builder.enter_node();
102/// builder.leave_node();
103/// builder.leave_node();
104///
105/// let tree = builder.build().unwrap();
106/// let root = tree.root().unwrap();
107/// assert_eq!(root, 0);
108/// assert_eq!(tree.first_child(root), Some(1));
109/// assert_eq!(tree.next_sibling(1), Some(7));
110/// assert_eq!(tree.next_sibling(7), None);
111///
112/// assert_eq!(root, 0);
113/// assert_eq!(tree.depth(2), 2);
114/// assert_eq!(tree.depth(7), 1);
115/// ```
116///
117/// Alternatively, the tree can be constructed from a [`BitVec`] containing the parenthesis
118/// expression directly.
119/// This is also how trees with unbalanced parenthesis expressions can be constructed.
120///
121/// ```rust
122/// # #![allow(long_running_const_eval)]
123/// use vers_vecs::{BitVec, BpTree, Tree};
124/// let bv = BitVec::pack_sequence_u8(&[0b1101_0111, 0b0010_0100], 8);
125/// let tree = BpTree::<4>::from_bit_vector(bv);
126///
127/// let nodes = tree.dfs_iter().collect::<Vec<_>>();
128/// assert_eq!(nodes, vec![0, 1, 2, 4, 6, 7, 10, 13]);
129/// ```
130///
131/// [`RsVec`]: RsVec
132/// [`fwd_search`]: BpTree::fwd_search
133/// [`bwd_search`]: BpTree::bwd_search
134/// [`Tree`]: Tree
135/// [`IsAncestor`]: IsAncestor
136/// [`LevelTree`]: LevelTree
137/// [`SubtreeSize`]: SubtreeSize
138/// [`BpBuilder`]: BpBuilder
139/// [`BitVec`]: BitVec
140#[derive(Clone, Debug)]
141#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
142pub struct BpTree<const BLOCK_SIZE: usize = DEFAULT_BLOCK_SIZE> {
143 vec: RsVec,
144 min_max_tree: MinMaxTree,
145}
146
147impl<const BLOCK_SIZE: usize> BpTree<BLOCK_SIZE> {
148 /// Construct a new `BpTree` from a given bit vector.
149 #[must_use]
150 pub fn from_bit_vector(bv: BitVec) -> Self {
151 let min_max_tree = MinMaxTree::excess_tree(&bv, BLOCK_SIZE);
152 let vec = bv.into();
153 Self { vec, min_max_tree }
154 }
155
156 /// Search for a position where the excess relative to the starting `index` is `relative_excess`.
157 /// Returns `None` if no such position exists.
158 /// The initial position is never considered in the search.
159 /// Searches forward in the bit vector.
160 ///
161 /// # Arguments
162 /// - `index`: The starting index.
163 /// - `relative_excess`: The desired relative excess value.
164 pub fn fwd_search(&self, index: usize, mut relative_excess: i64) -> Option<usize> {
165 // check for greater than or equal length minus one, because the last element
166 // won't ever have a result from fwd_search
167 if index >= (self.vec.len() - 1) {
168 return None;
169 }
170
171 let block_index = (index + 1) / BLOCK_SIZE;
172 self.fwd_search_block(index, block_index, &mut relative_excess)
173 .map_or_else(
174 |()| {
175 // find the block that contains the desired relative excess
176 let block = self.min_max_tree.fwd_search(block_index, relative_excess);
177
178 // check the result block for the exact position
179 block.and_then(|(block, mut relative_excess)| {
180 self.fwd_search_block(block * BLOCK_SIZE - 1, block, &mut relative_excess)
181 .ok()
182 })
183 },
184 Some,
185 )
186 }
187
188 /// Perform the forward search within one block. If this doesn't yield a result, the caller must
189 /// continue the search in the min-max-tree.
190 ///
191 /// Returns Ok(index) if an index with the desired relative excess is found, or None(excess)
192 /// with the excess at the end of the current block if no index with the desired relative excess
193 /// is found.
194 #[inline(always)]
195 fn fwd_search_block(
196 &self,
197 start_index: usize,
198 block_index: usize,
199 relative_excess: &mut i64,
200 ) -> Result<usize, ()> {
201 let block_boundary = min((block_index + 1) * BLOCK_SIZE, self.vec.len());
202
203 // the boundary at which we can start with table lookups
204 let lookup_boundary = min(
205 (start_index + 1).div_ceil(LOOKUP_BLOCK_SIZE as usize) * LOOKUP_BLOCK_SIZE as usize,
206 block_boundary,
207 );
208 for i in start_index + 1..lookup_boundary {
209 let bit = self.vec.get_unchecked(i);
210 *relative_excess -= if bit == 1 { 1 } else { -1 };
211
212 if *relative_excess == 0 {
213 return Ok(i);
214 }
215 }
216
217 // the boundary up to which we can use table lookups
218 let upper_lookup_boundary = max(
219 lookup_boundary,
220 (block_boundary / LOOKUP_BLOCK_SIZE as usize) * LOOKUP_BLOCK_SIZE as usize,
221 );
222
223 for i in (lookup_boundary..upper_lookup_boundary).step_by(LOOKUP_BLOCK_SIZE as usize) {
224 if let Ok(idx) = process_block_fwd(
225 self.vec
226 .get_bits_unchecked(i, LOOKUP_BLOCK_SIZE as usize)
227 .try_into()
228 .unwrap(),
229 relative_excess,
230 ) {
231 return Ok(i + idx as usize);
232 }
233 }
234
235 // if the upper_lookup_boundary isn't the block_boundary (which happens in non-full blocks, i.e. the last
236 // block in the vector)
237 for i in upper_lookup_boundary..block_boundary {
238 let bit = self.vec.get_unchecked(i);
239 *relative_excess -= if bit == 1 { 1 } else { -1 };
240
241 if *relative_excess == 0 {
242 return Ok(i);
243 }
244 }
245
246 Err(())
247 }
248
249 /// Search for a position where the excess relative to the starting `index` is `relative_excess`.
250 /// Returns `None` if no such position exists.
251 /// The initial position is never considered in the search.
252 /// Searches backward in the bit vector.
253 ///
254 /// # Arguments
255 /// - `index`: The starting index.
256 /// - `relative_excess`: The desired relative excess value.
257 pub fn bwd_search(&self, index: usize, mut relative_excess: i64) -> Option<usize> {
258 if index >= self.vec.len() {
259 return None;
260 }
261
262 // if the index is 0, we cant have a valid result anyway, and this would overflow the
263 // subtraction below, so we report None
264 if index == 0 {
265 return None;
266 }
267
268 // calculate the block we start searching in. It starts at index - 1, so we don't accidentally
269 // search the mM tree and immediately find `index` as the position
270 let block_index = (index - 1) / BLOCK_SIZE;
271
272 // check the current block
273 self.bwd_search_block(index, block_index, &mut relative_excess)
274 .map_or_else(
275 |()| {
276 // find the block that contains the desired relative excess
277 let block = self.min_max_tree.bwd_search(block_index, relative_excess);
278
279 // check the result block for the exact position
280 block.and_then(|(block, mut relative_excess)| {
281 self.bwd_search_block((block + 1) * BLOCK_SIZE, block, &mut relative_excess)
282 .ok()
283 })
284 },
285 Some,
286 )
287 }
288
289 /// Perform the backward search within one block. If this doesn't yield a result, the caller must
290 /// continue the search in the min-max-tree.
291 ///
292 /// Returns Ok(index) if an index with the desired relative excess is found, or None(excess)
293 /// with the excess at the end of the current block if no index with the desired relative excess
294 /// is found.
295 #[inline(always)]
296 fn bwd_search_block(
297 &self,
298 start_index: usize,
299 block_index: usize,
300 relative_excess: &mut i64,
301 ) -> Result<usize, ()> {
302 let block_boundary = min(block_index * BLOCK_SIZE, self.vec.len());
303
304 // the boundary at which we can start with table lookups
305 let lookup_boundary = max(
306 ((start_index - 1) / LOOKUP_BLOCK_SIZE as usize) * LOOKUP_BLOCK_SIZE as usize,
307 block_boundary,
308 );
309 for i in (lookup_boundary..start_index).rev() {
310 let bit = self.vec.get_unchecked(i);
311 *relative_excess += if bit == 1 { 1 } else { -1 };
312
313 if *relative_excess == 0 {
314 return Ok(i);
315 }
316 }
317
318 for i in (block_boundary..lookup_boundary)
319 .step_by(LOOKUP_BLOCK_SIZE as usize)
320 .rev()
321 {
322 if let Ok(idx) = process_block_bwd(
323 self.vec
324 .get_bits_unchecked(i, LOOKUP_BLOCK_SIZE as usize)
325 .try_into()
326 .unwrap(),
327 relative_excess,
328 ) {
329 return Ok(i + idx as usize);
330 }
331 }
332
333 Err(())
334 }
335
336 /// Find the position of the matching closing parenthesis for the opening parenthesis at `index`.
337 /// If the bit at `index` is not an opening parenthesis, the result is meaningless.
338 /// If there is no matching closing parenthesis, `None` is returned.
339 #[must_use]
340 pub fn close(&self, index: usize) -> Option<usize> {
341 if index >= self.vec.len() {
342 return None;
343 }
344
345 self.fwd_search(index, -1)
346 }
347
348 /// Find the position of the matching opening parenthesis for the closing parenthesis at `index`.
349 /// If the bit at `index` is not a closing parenthesis, the result is meaningless.
350 /// If there is no matching opening parenthesis, `None` is returned.
351 #[must_use]
352 pub fn open(&self, index: usize) -> Option<usize> {
353 if index >= self.vec.len() {
354 return None;
355 }
356
357 self.bwd_search(index, -1)
358 }
359
360 /// Find the position of the opening parenthesis that encloses the position `index`.
361 /// This works regardless of whether the bit at `index` is an opening or closing parenthesis.
362 /// If there is no enclosing parenthesis, `None` is returned.
363 #[must_use]
364 pub fn enclose(&self, index: usize) -> Option<usize> {
365 if index >= self.vec.len() {
366 return None;
367 }
368
369 self.bwd_search(
370 index,
371 if self.vec.get_unchecked(index) == 1 {
372 -1
373 } else {
374 -2
375 },
376 )
377 }
378
379 /// Get the excess of open parentheses up to and including the position `index`.
380 /// The excess is the number of open parentheses minus the number of closing parentheses.
381 /// If `index` is out of bounds, the total excess of the parentheses expression is returned.
382 #[must_use]
383 pub fn excess(&self, index: usize) -> i64 {
384 debug_assert!(index < self.vec.len(), "Index out of bounds");
385 self.vec.rank1(index + 1) as i64 - self.vec.rank0(index + 1) as i64
386 }
387
388 /// Iterate over the nodes of the tree.
389 /// The iterator yields the nodes in depth-first (pre-)order.
390 /// This method is an alias for [`dfs_iter`].
391 ///
392 /// If the tree is unbalanced, the iterator returns the node handles in the order they appear in
393 /// the parenthesis expression, and it will return handles that don't have a matching closing
394 /// parenthesis.
395 ///
396 /// [`dfs_iter`]: BpTree::dfs_iter
397 pub fn iter(
398 &self,
399 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
400 self.dfs_iter()
401 }
402
403 /// Iterate over the nodes of the tree in depth-first (pre-)order.
404 /// This is the most efficient way to iterate over all nodes of the tree.
405 ///
406 /// If the tree is unbalanced, the iterator returns the node handles in the order they appear in
407 /// the parenthesis expression, and it will return handles that don't have a matching closing
408 /// parenthesis.
409 pub fn dfs_iter(
410 &self,
411 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
412 self.vec.iter1()
413 }
414
415 /// Iterate over the nodes of a valid tree in depth-first (post-)order.
416 /// This is slower than the pre-order iteration.
417 ///
418 /// # Panics
419 /// The iterator may panic at any point if the parenthesis expression is unbalanced.
420 pub fn dfs_post_iter(
421 &self,
422 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
423 self.vec.iter0().map(|n| self.open(n).unwrap())
424 }
425
426 /// Iterate over a subtree rooted at `node` in depth-first (pre-)order.
427 /// The iteration starts with the node itself.
428 ///
429 /// Calling this method on an invalid node handle, or an unbalanced parenthesis expression,
430 /// will produce an iterator over an unspecified subset of nodes.
431 pub fn subtree_iter(
432 &self,
433 node: <BpTree<BLOCK_SIZE> as Tree>::NodeHandle,
434 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
435 debug_assert!(
436 self.vec.get(node) == Some(OPEN_PAREN),
437 "Node handle is invalid"
438 );
439
440 let index = self.vec.rank1(node);
441 let close = self.close(node).unwrap_or(node);
442 let subtree_size = self.vec.rank1(close) - index;
443
444 self.vec.iter1().skip(index).take(subtree_size)
445 }
446
447 /// Iterate over a subtree rooted at `node` in depth-first (post-)order.
448 /// This is slower than the pre-order iteration.
449 /// The iteration ends with the node itself.
450 ///
451 /// # Panics
452 /// Calling this method on an invalid node handle, or an unbalanced parenthesis expression,
453 /// will produce an iterator over an unspecified subset of nodes, or panic either during
454 /// construction or iteration.
455 pub fn subtree_post_iter(
456 &self,
457 node: <BpTree<BLOCK_SIZE> as Tree>::NodeHandle,
458 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
459 debug_assert!(
460 self.vec.get(node) == Some(OPEN_PAREN),
461 "Node handle is invalid"
462 );
463
464 let index = self.vec.rank0(node);
465 let close = self.close(node).unwrap_or(node);
466 let subtree_size = self.vec.rank0(close) + 1 - index;
467
468 self.vec
469 .iter0()
470 .skip(index)
471 .take(subtree_size)
472 .map(|n| self.open(n).unwrap())
473 }
474
475 /// Iterate over the children of a node in the tree.
476 /// The iterator yields the children in the order they appear in the parenthesis expression.
477 /// If the node is a leaf, the iterator is empty.
478 /// If the node is not a valid node handle, or the tree is unbalanced,
479 /// the iterator will produce an unspecified subset of the tree's nodes.
480 pub fn children(
481 &self,
482 node: <BpTree<BLOCK_SIZE> as Tree>::NodeHandle,
483 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
484 debug_assert!(
485 self.vec.get(node) == Some(OPEN_PAREN),
486 "Node handle is invalid"
487 );
488
489 ChildrenIter::<BLOCK_SIZE, true>::new(self, node)
490 }
491
492 /// Iterate over the children of a node in the tree in reverse order.
493 /// The iterator yields the children in the reverse order they appear in the parenthesis expression.
494 /// If the node is a leaf, the iterator is empty.
495 /// If the node is not a valid node handle, or the tree is unbalanced,
496 /// the iterator will produce an unspecified subset of the tree's nodes.
497 pub fn rev_children(
498 &self,
499 node: <BpTree<BLOCK_SIZE> as Tree>::NodeHandle,
500 ) -> impl Iterator<Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle> + use<'_, BLOCK_SIZE> {
501 debug_assert!(
502 self.vec.get(node) == Some(OPEN_PAREN),
503 "Node handle is invalid"
504 );
505
506 ChildrenIter::<BLOCK_SIZE, false>::new(self, node)
507 }
508
509 /// Returns the number of bytes used on the heap for this tree. This does not include
510 /// allocated space that is not used (e.g. by the allocation behavior of `Vec`).
511 #[must_use]
512 pub fn heap_size(&self) -> usize {
513 self.vec.heap_size() + self.min_max_tree.heap_size()
514 }
515}
516
517impl<const BLOCK_SIZE: usize> Tree for BpTree<BLOCK_SIZE> {
518 type NodeHandle = usize;
519
520 fn root(&self) -> Option<Self::NodeHandle> {
521 if self.vec.is_empty() {
522 None
523 } else {
524 Some(0)
525 }
526 }
527
528 fn parent(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
529 debug_assert!(
530 self.vec.get(node) == Some(OPEN_PAREN),
531 "Node handle is invalid"
532 );
533
534 self.enclose(node)
535 }
536
537 fn first_child(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
538 debug_assert!(
539 self.vec.get(node) == Some(OPEN_PAREN),
540 "Node handle is invalid"
541 );
542
543 if let Some(bit) = self.vec.get(node + 1) {
544 if bit == OPEN_PAREN {
545 return Some(node + 1);
546 }
547 }
548
549 None
550 }
551
552 fn next_sibling(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
553 debug_assert!(
554 self.vec.get(node) == Some(OPEN_PAREN),
555 "Node handle is invalid"
556 );
557 self.close(node).and_then(|i| {
558 self.vec
559 .get(i + 1)
560 .and_then(|bit| if bit == OPEN_PAREN { Some(i + 1) } else { None })
561 })
562 }
563
564 fn previous_sibling(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
565 debug_assert!(
566 self.vec.get(node) == Some(OPEN_PAREN),
567 "Node handle is invalid"
568 );
569 if node == 0 {
570 None
571 } else {
572 self.vec.get(node - 1).and_then(|bit| {
573 if bit == CLOSE_PAREN {
574 self.open(node - 1)
575 } else {
576 None
577 }
578 })
579 }
580 }
581
582 fn last_child(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
583 debug_assert!(
584 self.vec.get(node) == Some(OPEN_PAREN),
585 "Node handle is invalid"
586 );
587 self.vec.get(node + 1).and_then(|bit| {
588 if bit == OPEN_PAREN {
589 if let Some(i) = self.close(node) {
590 self.open(i - 1)
591 } else {
592 None
593 }
594 } else {
595 None
596 }
597 })
598 }
599
600 fn node_index(&self, node: Self::NodeHandle) -> usize {
601 debug_assert!(
602 self.vec.get(node) == Some(OPEN_PAREN),
603 "Node handle is invalid"
604 );
605 self.vec.rank1(node)
606 }
607
608 fn node_handle(&self, index: usize) -> Self::NodeHandle {
609 self.vec.select1(index)
610 }
611
612 fn is_leaf(&self, node: Self::NodeHandle) -> bool {
613 debug_assert!(
614 self.vec.get(node) == Some(OPEN_PAREN),
615 "Node handle is invalid"
616 );
617 self.vec.get(node + 1) == Some(CLOSE_PAREN)
618 }
619
620 fn depth(&self, node: Self::NodeHandle) -> u64 {
621 debug_assert!(
622 self.vec.get(node) == Some(OPEN_PAREN),
623 "Node handle is invalid"
624 );
625 let excess: u64 = self.excess(node).try_into().unwrap_or(0);
626 excess.saturating_sub(1)
627 }
628
629 fn size(&self) -> usize {
630 self.vec.rank1(self.vec.len())
631 }
632
633 fn is_empty(&self) -> bool {
634 self.vec.is_empty()
635 }
636}
637
638impl<const BLOCK_SIZE: usize> IsAncestor for BpTree<BLOCK_SIZE> {
639 fn is_ancestor(
640 &self,
641 ancestor: Self::NodeHandle,
642 descendant: Self::NodeHandle,
643 ) -> Option<bool> {
644 debug_assert!(
645 self.vec.get(ancestor) == Some(OPEN_PAREN),
646 "Node handle is invalid"
647 );
648 debug_assert!(
649 self.vec.get(descendant) == Some(OPEN_PAREN),
650 "Node handle is invalid"
651 );
652
653 self.close(ancestor)
654 .map(|closing| ancestor <= descendant && descendant < closing)
655 }
656}
657
658impl<const BLOCK_SIZE: usize> LevelTree for BpTree<BLOCK_SIZE> {
659 fn level_ancestor(&self, node: Self::NodeHandle, level: u64) -> Option<Self::NodeHandle> {
660 if level == 0 {
661 return Some(node);
662 }
663
664 #[allow(clippy::cast_possible_wrap)]
665 // if the level exceeds 2^63, we accept that the result is wrong
666 self.bwd_search(node, -(level as i64))
667 }
668
669 fn level_next(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
670 self.fwd_search(self.close(node)?, 1)
671 }
672
673 fn level_prev(&self, node: Self::NodeHandle) -> Option<Self::NodeHandle> {
674 self.open(self.bwd_search(node, 1)?)
675 }
676
677 fn level_leftmost(&self, level: u64) -> Option<Self::NodeHandle> {
678 // fwd_search doesn't support returning the input position
679 if level == 0 {
680 return Some(0);
681 }
682
683 #[allow(clippy::cast_possible_wrap)]
684 // if the level exceeds 2^63, we accept that the result is wrong
685 self.fwd_search(0, level as i64)
686 }
687
688 fn level_rightmost(&self, level: u64) -> Option<Self::NodeHandle> {
689 #[allow(clippy::cast_possible_wrap)]
690 // if the level exceeds 2^63, we accept that the result is wrong
691 self.open(self.bwd_search(self.size() * 2 - 1, level as i64)?)
692 }
693}
694
695impl<const BLOCK_SIZE: usize> SubtreeSize for BpTree<BLOCK_SIZE> {
696 fn subtree_size(&self, node: Self::NodeHandle) -> Option<usize> {
697 debug_assert!(
698 self.vec.get(node) == Some(OPEN_PAREN),
699 "Node handle is invalid"
700 );
701
702 self.close(node)
703 .map(|c| self.vec.rank1(c) - self.vec.rank1(node))
704 }
705}
706
707impl<const BLOCK_SIZE: usize> IntoIterator for BpTree<BLOCK_SIZE> {
708 type Item = <BpTree<BLOCK_SIZE> as Tree>::NodeHandle;
709 type IntoIter = SelectIntoIter<false>;
710
711 fn into_iter(self) -> Self::IntoIter {
712 self.vec.into_iter1()
713 }
714}
715
716impl<const BLOCK_SIZE: usize> From<BitVec> for BpTree<BLOCK_SIZE> {
717 fn from(bv: BitVec) -> Self {
718 Self::from_bit_vector(bv)
719 }
720}
721
722/// An iterator over the children of a node.
723/// Calls to `next` return the next child node handle in the order they appear in the parenthesis
724/// expression.
725struct ChildrenIter<'a, const BLOCK_SIZE: usize, const FORWARD: bool> {
726 tree: &'a BpTree<BLOCK_SIZE>,
727 current_sibling: Option<usize>,
728}
729
730impl<'a, const BLOCK_SIZE: usize, const FORWARD: bool> ChildrenIter<'a, BLOCK_SIZE, FORWARD> {
731 fn new(tree: &'a BpTree<BLOCK_SIZE>, node: usize) -> Self {
732 Self {
733 tree,
734 current_sibling: if FORWARD {
735 tree.first_child(node)
736 } else {
737 tree.last_child(node)
738 },
739 }
740 }
741}
742
743impl<const BLOCK_SIZE: usize, const FORWARD: bool> Iterator
744 for ChildrenIter<'_, BLOCK_SIZE, FORWARD>
745{
746 type Item = usize;
747
748 fn next(&mut self) -> Option<Self::Item> {
749 let current = self.current_sibling?;
750 let next = if FORWARD {
751 self.tree.next_sibling(current)
752 } else {
753 self.tree.previous_sibling(current)
754 };
755 self.current_sibling = next;
756 Some(current)
757 }
758}
759
760impl<const BLOCK_SIZE: usize, const FORWARD: bool> FusedIterator
761 for ChildrenIter<'_, BLOCK_SIZE, FORWARD>
762{
763}
764
765#[cfg(test)]
766mod tests;