1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
// Copyright 2019 Eric Izoita (nytopop)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
// of the Software, and to permit persons to whom the Software is furnished to
// do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.

//! An implementation of rational buckets for lexically ordered
//! collections.
//!
//! References
//! - [0] Dan Hazel
//!   [Using rational numbers to key nested sets](https://arxiv.org/abs/0806.3115)
//! - [1] David W. Matula, Peter Kornerup
//!   [An order preserving finite binary encoding of the rationals](https://www.researchgate.net/publication/261204300_An_order_preserving_finite_binary_encoding_of_the_rationals)

#![feature(test)]
#![feature(specialization)]

use self::bitter::*;
use std::{cmp, cmp::Ordering, iter};

pub mod bitter;

/// Represents a location in the treeid hierarchy, and an arbitrary key.
///
/// Taken together, the primary use case is to allow for arbitrarily nested
/// ranges of keys in a flat, ordered collection like [BTreeMap](std::collections::BTreeMap).
///
/// Crucially, the sort order of treeid nodes remains stable even when serialized,
/// allowing for them to be used efficiently with on-disk collections that do not
/// support varying comparison operators. Even in collections that do, the lexicographic
/// sort offered by a serialized treeid node is typically faster (and simpler) than
/// having to deserialize keys for every comparison.
///
/// # Sort order
/// The location of each node in the hierarchy is represented as a sequence of
/// nonzero unsigned integers:
///
/// ```
/// // Hierarchical Structure
/// //
/// //                /------------[root]-------------\
/// //                |                               |
/// //       /-------[1]-------\             /-------[2]-------\
/// //       |                 |             |                 |
/// //  /---[1,1]---\   /---[1,2]---\   /---[2,1]---\   /---[2,2]---\
/// //  |           |   |           |   |           |   |           |
/// // [1,1,1] [1,1,2] [1,2,1] [1,2,2] [2,1,1] [2,1,2] [2,2,1] [2,2,2]
///
/// // Ascending Sort Order
/// //
/// // [root]
/// // [1]
/// // [1,1]
/// // [1,1,1]
/// // [1,1,2]
/// // [1,2]
/// // [1,2,1]
/// // [1,2,2]
/// // [2]
/// // [2,1]
/// // [2,1,1]
/// // [2,1,2]
/// // [2,2]
/// // [2,2,1]
/// // [2,2,2]
/// ```
///
/// Nodes in the same position, but with different keys will be ordered by the
/// key.
///
/// ```
/// use treeid::Node;
///
/// let a = Node::from_parts(&[1, 2], b"hello world");
/// let b = Node::from_parts(&[1, 2, 1], b"1st key");
/// let c = Node::from_parts(&[1, 2, 1], b"2nd key");
/// let d = Node::from_parts(&[1, 3], b"some other key");
///
/// assert!(a < b && b < c && c < d);
/// assert!(a.to_binary() < b.to_binary()
///      && b.to_binary() < c.to_binary()
///      && c.to_binary() < d.to_binary());
/// ```
///
/// # Encoding format
/// Nodes are encoded to binary in a modified form of LCF[1](https://www.researchgate.net/publication/261204300_An_order_preserving_finite_binary_encoding_of_the_rationals) (mLCF).
///
/// Technical deviations from LCF encoding as described by Matula et al:
///
/// - only suitable for rationals p/q where one (out of 2) of the
///   continued fraction forms has both of the following properties:
///   - composed of an odd number of natural terms
///   - terms at odd indices are always 1
///
/// - leading high bit / low bit is elided because (p >= q >= 1)
///   and we don't need to differentiate from (1 <= p <= q).
///
/// - a trailing zero byte is appended to allow for a suffix key
///
/// # Size
/// There is no limit to the length of a treeid position, other than practical
/// concerns w.r.t. space consumption. The total size of the positional portion
/// of an encoded treeid node can be found by taking the sum of 1 + the doubles
/// of minimum binary sizes of each term - 1, and adding the number of terms - 1.
/// The result rounded to the next byte boundary will be the total bitsize.
///
/// A single zero byte will follow to dilineate from the key portion, which is
/// appended unchanged.
///
/// For example, to find the encoded size of the position `&[7, 4, 2]`, we perform:
///
/// - minimum size: `[3 (111), 3 (100), 2 (10)]`
/// - subtract one: `[2, 2, 1]`
/// - double      : `[4, 4, 2]`
/// - add one     : `[5, 5, 3]`
/// - summate     : `13`
/// - add terms-1 : `15`
/// - round to 8  : `16`
/// - add a byte  : `24`
///
/// Which corresponds to the encoded form of:
///
/// `0b11011111 0b00011000 0x0`
///
/// ```
/// use treeid::Node;
///
/// let node = Node::from(&[7, 4, 2]);
/// assert_eq!(
///     // 7    |sep|4    |sep|2  |padding   |key
///     // 11011|1  |11000|1  |100|0000000000|
///
///     &[0b11011_1_11, 0b000_1_100_0, 0],
///     &*node.to_binary(),
/// );
/// ```
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Node {
    loc: Vec<u64>, // location in the tree
    key: Vec<u8>,  // arbitrary key
}

impl Default for Node {
    fn default() -> Self {
        Self::root()
    }
}

impl Ord for Node {
    fn cmp(&self, other: &Node) -> Ordering {
        match self.loc.cmp(&other.loc) {
            Ordering::Equal => self.key.cmp(&other.key),
            o => o,
        }
    }
}

impl PartialOrd for Node {
    fn partial_cmp(&self, other: &Node) -> Option<Ordering> {
        Some(self.cmp(other))
    }
}

impl<A: AsRef<[u64]>> From<A> for Node {
    default fn from(loc: A) -> Self {
        assert!(!loc.as_ref().contains(&0));
        Node {
            loc: loc.as_ref().iter().map(|&x| x).collect(),
            key: Vec::new(),
        }
    }
}

impl From<Vec<u64>> for Node {
    fn from(loc: Vec<u64>) -> Self {
        Self::from_vec(loc)
    }
}

impl AsRef<[u8]> for Node {
    fn as_ref(&self) -> &[u8] {
        &self.key
    }
}

impl Node {
    /// Returns the root node.
    ///
    /// ```rust
    /// use treeid::*;
    /// assert_eq!(Node::from(&[]), Node::root());
    /// ```
    pub fn root() -> Self {
        Node {
            loc: Vec::new(),
            key: Vec::new(),
        }
    }

    /// Returns a reference to the tree position of this node.
    pub fn position(&self) -> &[u64] {
        &self.loc
    }

    /// Returns a reference to the key of this node.
    pub fn key(&self) -> &[u8] {
        &self.key
    }

    /// Constructs a node from its tree position as a series of natural
    /// numbers.
    ///
    /// Panics if the input contains any zeros.
    pub fn from_vec(loc: Vec<u64>) -> Self {
        Self::from_vec_parts(loc, Vec::new())
    }

    /// Constructs a node from its tree position and key.
    ///
    /// Panics if the position contains any zeros.
    pub fn from_parts<A: AsRef<[u64]>, B: AsRef<[u8]>>(loc: A, key: B) -> Self {
        assert!(!loc.as_ref().contains(&0));
        Node {
            loc: loc.as_ref().iter().map(|&x| x).collect(),
            key: key.as_ref().iter().map(|&x| x).collect(),
        }
    }

    /// Constructs a node from its (owned) tree position and key.
    ///
    /// Panics if the position contains any zeros.
    pub fn from_vec_parts(loc: Vec<u64>, key: Vec<u8>) -> Self {
        assert!(!loc.contains(&0));
        Node { loc, key }
    }

    /// Returns a node at the same location as the current node, but using
    /// the provided key.
    pub fn with_key<K: AsRef<[u8]>>(&self, key: K) -> Self {
        Node {
            loc: self.loc.clone(),
            key: key.as_ref().iter().map(|&x| x).collect(),
        }
    }

    /// Returns a node at the same location as the current node, but using
    /// the provided (owned) key.
    pub fn with_vec_key(&self, key: Vec<u8>) -> Self {
        Node {
            loc: self.loc.clone(),
            key,
        }
    }

    /// Sets the key for this node.
    pub fn set_key<K: AsRef<[u8]>>(&mut self, key: K) {
        self.key = key.as_ref().iter().map(|&x| x).collect();
    }

    /// Sets the (owned) key for this node.
    pub fn set_vec_key(&mut self, key: Vec<u8>) {
        self.key = key
    }

    /// Get the parent of this node. Sorts before this node and any of its
    /// siblings/children.
    ///
    /// The parent of the root is the root.
    pub fn parent(&self) -> Self {
        let mut parent = self.clone();
        parent.parent_mut();
        parent
    }

    pub fn parent_mut(&mut self) {
        self.loc.pop();
    }

    /// Get the specified child of this node. Sorts after this node, but
    /// before any higher siblings.
    ///
    /// Panics if `id` is zero.
    pub fn child(&self, id: u64) -> Self {
        let mut child = self.clone();
        child.child_mut(id);
        child
    }

    pub fn child_mut(&mut self, id: u64) {
        assert!(id != 0);
        self.loc.push(id);
    }

    /// Get the specified sibling of this node. Sort order is dependent on
    /// the value of `id`, relative to the current node's last term.
    ///
    /// Panics if `id` is zero, and returns None for the root.
    pub fn sibling(&self, id: u64) -> Option<Self> {
        if self.is_root() {
            return None;
        }

        let mut sibling = self.clone();
        sibling.sibling_mut(id);
        Some(sibling)
    }

    pub fn sibling_mut(&mut self, id: u64) {
        assert!(id != 0);
        if let Some(c) = self.loc.last_mut() {
            *c = id;
        }
    }

    /// Get the last sibling of this node. Sorts before this node.
    ///
    /// Returns None if this is a first child or the root.
    pub fn pred(&self) -> Option<Self> {
        let mut pred = self.clone();
        let x = pred.loc.last_mut()?;
        if *x < 2 {
            return None;
        }
        *x -= 1;
        Some(pred)
    }

    /// Get the next sibling of this node. Sorts after this node.
    ///
    /// Returns None if this is the root.
    pub fn succ(&self) -> Option<Self> {
        let mut succ = self.clone();
        (*succ.loc.last_mut()?) += 1;
        Some(succ)
    }

    /// Returns `true` if this is the root.
    pub fn is_root(&self) -> bool {
        self.loc.is_empty()
    }

    /// Decode a node from its mLCF encoded form.
    pub fn from_binary(mlcf_encoded: &[u8]) -> Option<Self> {
        let mut loc: Vec<u64> = Vec::new();

        let mut it = mlcf_encoded.iter().peekable();
        let mut cursor: u8 = 0;
        'chunker: loop {
            let mut nz_tot: u8 = 0;
            'prefixer: while let Some(&&seg) = it.peek() {
                let nz = (!(seg << cursor)).leading_zeros();
                nz_tot += nz as u8;

                // if cursor has rotated, we must at least attempt to
                // read some prefix from the next byte. it may or may
                // not actually contain any prefix.
                if rotate_consume(&mut it, &mut cursor, nz as u8)? {
                    continue 'prefixer;
                }

                guard(!kth_bit(seg, cursor))?;
                break 'prefixer;
            }

            // if we are here, we have read the entirety of a unit
            // prefix, and cursor points to the first low bit in the
            // next byte of 'it'.

            // advance the cursor by 1 bit to consume a zero bit
            // indicating a partition between the prefix and data
            // carrying component.
            rotate_incr(&mut it, &mut cursor)?;

            // initialize the term as 1 because we already consumed
            // the (inverted) leading payload bit.
            let mut term: u64 = 1;
            'payloader: while let Some(&&seg) = it.peek() {
                // extract the only bits in the current byte that
                // are part of the term we're reading.
                let until_end: u8 = U8_WIDTH - cursor;
                let mut data_mask = (seg << cursor) >> cursor;
                data_mask >>= until_end.saturating_sub(nz_tot);

                // push them into term. repeated application of
                // this push-copy produces the final value.
                let safe_bits: u8 = cmp::min(nz_tot, until_end);
                term <<= safe_bits;
                term |= data_mask as u64;
                nz_tot -= safe_bits;

                rotate_consume(&mut it, &mut cursor, safe_bits)?;
                if nz_tot == 0 {
                    break 'payloader;
                }
            }

            // if we have gotten here, we have succesfully decoded a
            // term. the bit at cursor is set high if there are any
            // more terms to decode.
            loc.push(term);
            if !kth_bit_iter(&mut it, cursor) {
                it.next()?;
                break 'chunker;
            }

            // advance the cursor to consume the high bit we just
            // checked for.
            rotate_incr(&mut it, &mut cursor)?;
        }

        guard(it.next()? == &0)?; // consume key separator byte
        let key = it.map(|&x| x).collect(); // key is the rest

        Some(Self::from_vec_parts(loc, key))
    }

    /// Writes this id into a `Vec<[u8]>` using mLCF encoding.
    ///
    /// ```rust
    /// use treeid::*;
    /// assert_eq!(&[0b00000000, 0], &*Node::from(&[1]).to_binary());
    /// assert_eq!(&[0b10000000, 0], &*Node::from(&[2]).to_binary());
    /// assert_eq!(&[0b10011000, 0], &*Node::from(&[2, 2]).to_binary());
    /// assert_eq!(
    ///     &[0b11000110, 0b11100111, 0b00100000, 0],
    ///     &*Node::from(&[4, 3, 2, 5]).to_binary(),
    /// );
    /// ```
    pub fn to_binary(&self) -> Vec<u8> {
        let evens = self.loc.iter();
        let odds = iter::repeat(&1).take(self.loc.len() - 1);
        let it = itertools::interleave(evens, odds);

        let mut stack = BitWriter::new();
        for (i, &x) in it.enumerate() {
            if i % 2 != 0 {
                stack.push_bit(true);
                continue;
            }

            let nz = x.leading_zeros() as u8;
            let nd = 63u8.saturating_sub(nz);
            stack.push_bits(std::u64::MAX, nd);
            stack.push_bit(false);
            stack.push_bits(x, nd);
        }

        stack.align();
        stack.push(0x00);
        stack.push_bytes(&self.key);
        stack.to_vec()
    }
}

fn guard(x: bool) -> Option<()> {
    if x {
        return Some(());
    }
    None
}

#[cfg(test)]
mod tests {
    extern crate rand;
    extern crate test;

    use self::test::Bencher;
    use super::*;
    use num_bigint::BigUint;
    use num_rational::Ratio;

    impl Node {
        fn to_ratio(&self) -> Ratio<BigUint> {
            Self::as_ratio(&self.cf_expansion())
        }

        fn as_ratio(ex: &[u64]) -> Ratio<BigUint> {
            let one = Ratio::new(BigUint::from(1usize), BigUint::from(1usize));
            let mut last = Ratio::from_integer(BigUint::from(0usize));
            for i in (0..ex.len()).rev() {
                let term = &one / (Ratio::new(BigUint::from(ex[i]), BigUint::from(1usize)) + last);
                last = term;
            }
            last.recip()
        }

        fn cf_expansion(&self) -> Vec<u64> {
            let evens = self.loc.iter();
            let odds = iter::repeat(&1).take(self.loc.len() - 1);
            itertools::interleave(evens, odds).map(|&x| x).collect()
        }
    }

    #[test]
    fn child_parent_eq() {
        let b = Node::from(&[1, 2, 3]);
        assert_eq!(b, b.child(4).parent());
    }

    #[bench]
    fn binary_lo_2(b: &mut Bencher) {
        let v: Vec<u64> = (1..=2).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_lo_4(b: &mut Bencher) {
        let v: Vec<u64> = (1..=4).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_lo_8(b: &mut Bencher) {
        let v: Vec<u64> = (1..=8).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_lo_16(b: &mut Bencher) {
        let v: Vec<u64> = (1..=16).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_lo_32(b: &mut Bencher) {
        let v: Vec<u64> = (1..=32).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_lo_64(b: &mut Bencher) {
        let v: Vec<u64> = (1..=64).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }

    #[bench]
    fn binary_hi_2(b: &mut Bencher) {
        let v: Vec<u64> = (1..=2).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_hi_4(b: &mut Bencher) {
        let v: Vec<u64> = (1..=4).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_hi_8(b: &mut Bencher) {
        let v: Vec<u64> = (1..=8).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_hi_16(b: &mut Bencher) {
        let v: Vec<u64> = (1..=16).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_hi_32(b: &mut Bencher) {
        let v: Vec<u64> = (1..=32).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }
    #[bench]
    fn binary_hi_64(b: &mut Bencher) {
        let v: Vec<u64> = (1..=64).map(|_| rand::random()).collect();
        let node = Node::from(&v);
        b.iter(|| Node::from_binary(&*node.to_binary()).unwrap());
    }

    #[test]
    fn edge_case() {
        let n1 = Node::from(&[2, 4, 1]); // 2, 1, 4, 1, 1
        let n2 = Node::from(&[2, 5]); //    2, 1, 4, 1
        println!("2 . 4 . 1 : {:?}", Node::from_binary(&*n1.to_binary()),);
        println!();
        println!("2 . 5     : {:?}", Node::from_binary(&*n2.to_binary()),);
        println!();
        assert!(n1.to_ratio() < n2.to_ratio());
        assert!(n2.to_binary().gt(&n1.to_binary()));
    }

    struct BfsIter {
        stack: BigUint,
        radix: u32,
    }

    impl BfsIter {
        fn new(rdx: u32) -> BfsIter {
            BfsIter {
                stack: BigUint::new(vec![]),
                radix: rdx,
            }
        }
    }

    impl Iterator for BfsIter {
        type Item = Vec<u64>;

        fn next(&mut self) -> Option<Self::Item> {
            let item = self
                .stack
                .to_radix_le(self.radix)
                .iter()
                .map(|&x| (x + 1) as u64)
                .collect();
            self.stack += BigUint::from(1u8);
            Some(item)
        }
    }

    #[test]
    fn bfs_iter_round_trip() {
        let mut it = BfsIter::new(15);
        it.next().unwrap();

        for i in 0..2u64.pow(14) {
            let v = it.next().unwrap();
            println!("raw input is: {:?}", v);

            let nd = Node::from_vec_parts(v, (1..=24).map(|_| rand::random()).collect());
            println!("roundtripping: #{} {:?}", i, nd);

            let bin = nd.to_binary();
            println!("binary: {:?}", bin);

            assert_eq!(nd, Node::from_binary(&*bin).unwrap());
        }
    }

    #[test]
    fn lcf_enc() {
        let mut node = Node::from_parts(&[1], b"hello worldo");
        let mut last = node.clone();

        // parent < children
        for i in 0..250 {
            node = node.succ().unwrap();
            if i % 100 == 0 {
                node.child_mut(rand::random());
            }

            // num_gt must be true as per proof in [0]
            // lex_gt must be true as per proof in [1]
            let num_gt = node.to_ratio() > last.to_ratio();
            let lex_gt = node.to_binary().gt(&last.to_binary());
            assert_eq!(num_gt, lex_gt, "forward");
            assert!(num_gt || lex_gt, "forward");
            last = node.clone();
        }

        // children < parent.succ()
        while node.loc.len() > 0 {
            for _ in 0..16 {
                node = node.succ().unwrap();

                // num_gt must be true as per proof in [0]
                // lex_gt must be true as per proof in [1]
                let num_gt = node.to_ratio() > last.to_ratio();
                let lex_gt = node.to_binary().gt(&last.to_binary());
                assert_eq!(num_gt, lex_gt, "backward");
                assert!(num_gt || lex_gt, "backward");

                last = node.clone();
            }

            node.parent_mut();
        }
    }
}