Skip to main content

dbsp/trace/layers/
layer.rs

1//! Implementation using ordered keys and exponential search.
2
3//mod consumer;
4//mod tests;
5
6use rand::Rng;
7use rkyv::{Archive, Serialize};
8
9use crate::{
10    DBData, NumEntries,
11    algebra::{AddAssignByRef, AddByRef, NegByRef},
12    dynamic::{DataTrait, DynVec, Erase, Factory, LeanVec, WithFactory},
13    utils::{advance, assume},
14};
15use size_of::SizeOf;
16use std::{
17    cmp::{Ordering, min},
18    fmt::{self, Debug, Display, Formatter},
19    ops::Neg,
20};
21use textwrap::indent;
22
23use super::{Builder, Cursor, MergeBuilder, OrdOffset, Trie, TupleBuilder};
24
25pub struct LayerFactories<K: DataTrait + ?Sized, C> {
26    pub key: &'static dyn Factory<K>,
27    pub keys: &'static dyn Factory<DynVec<K>>,
28    pub child: C,
29}
30
31impl<K: DataTrait + ?Sized, C> LayerFactories<K, C> {
32    pub fn new<KType>(child: C) -> Self
33    where
34        KType: DBData + Erase<K>,
35    {
36        Self {
37            key: WithFactory::<KType>::FACTORY,
38            keys: WithFactory::<LeanVec<KType>>::FACTORY,
39            child,
40        }
41    }
42}
43
44impl<K: DataTrait + ?Sized, C: Clone> Clone for LayerFactories<K, C> {
45    fn clone(&self) -> Self {
46        Self {
47            key: self.key,
48            keys: self.keys,
49            child: self.child.clone(),
50        }
51    }
52}
53
54/// A level of the trie, with keys and offsets into a lower layer.
55///
56/// In this representation, the values for `keys[i]` are found at `vals[offs[i]
57/// .. offs[i+1]]`.
58#[derive(SizeOf, Archive, Serialize)]
59pub struct Layer<K, L, O = usize>
60where
61    K: DataTrait + ?Sized,
62    O: 'static,
63    L: Trie,
64{
65    #[size_of(skip)]
66    pub(crate) factories: LayerFactories<K, <L as Trie>::Factories>,
67
68    /// The keys of the layer.
69    pub(crate) keys: Box<DynVec<K>>,
70    /// The offsets associated with each key.
71    ///
72    /// The bounds for `keys[i]` are `(offs[i], offs[i+1]`). The offset array is
73    /// guaranteed to be one element longer than the keys array, ensuring
74    /// that these accesses do not panic.
75    pub(crate) offs: Vec<O>,
76    /// The ranges of values associated with the keys.
77    pub(crate) vals: L,
78}
79
80impl<K, L, O> PartialEq for Layer<K, L, O>
81where
82    K: DataTrait + ?Sized,
83    O: OrdOffset,
84    L: Trie + PartialEq,
85{
86    fn eq(&self, other: &Self) -> bool {
87        self.keys.eq(&other.keys) && self.offs == other.offs && self.vals == other.vals
88    }
89}
90
91impl<K, L, O> Eq for Layer<K, L, O>
92where
93    K: DataTrait + ?Sized,
94    O: OrdOffset,
95    L: Trie + Eq,
96{
97}
98
99impl<K, L, O> Clone for Layer<K, L, O>
100where
101    K: DataTrait + ?Sized,
102    L: Trie + Clone,
103    O: Clone,
104{
105    fn clone(&self) -> Self {
106        Self {
107            factories: self.factories.clone(),
108            keys: self.keys.clone(),
109            offs: self.offs.clone(),
110            vals: self.vals.clone(),
111        }
112    }
113}
114
115impl<K, L, O> Debug for Layer<K, L, O>
116where
117    K: DataTrait + ?Sized,
118    L: Trie + Debug,
119    O: 'static + Debug,
120{
121    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
122        f.debug_struct("Layer")
123            .field("keys", &self.keys)
124            .field("offs", &self.offs)
125            .field("vals", &self.vals)
126            .finish()
127    }
128}
129
130impl<K: DataTrait + ?Sized, L, O: 'static> Layer<K, L, O>
131where
132    L: Trie,
133{
134    /// Create a new `OrderedLayer` from its component parts
135    pub fn from_parts(
136        factories: &LayerFactories<K, <L as Trie>::Factories>,
137        mut keys: Box<DynVec<K>>,
138        mut offs: Vec<O>,
139        vals: L,
140    ) -> Self
141    where
142        O: OrdOffset,
143    {
144        debug_assert_eq!(keys.len() + 1, offs.len());
145        debug_assert_eq!(offs.last().unwrap().into_usize(), vals.keys());
146
147        if keys.spare_capacity() >= keys.len() / 10 {
148            keys.shrink_to_fit();
149        }
150        if offs.capacity() - offs.len() >= offs.len() / 10 {
151            offs.shrink_to_fit();
152        }
153
154        Self {
155            factories: factories.clone(),
156            keys,
157            offs,
158            vals,
159        }
160    }
161
162    /// Assume the invariants of the current builder
163    ///
164    /// # Safety
165    ///
166    /// Requires that `offs` has a length of `keys + 1`
167    unsafe fn assume_invariants(&self) {
168        unsafe {
169            assume(self.offs.len() == self.keys.len() + 1);
170        }
171    }
172
173    /// Compute a random sample of size `sample_size` of keys in `self.keys`.
174    ///
175    /// Pushes the random sample of keys to the `output` vector in ascending
176    /// order.
177    pub fn sample_keys<RG>(&self, rng: &mut RG, sample_size: usize, output: &mut DynVec<K>)
178    where
179        RG: Rng,
180    {
181        output.reserve(sample_size);
182        self.keys
183            .sample_slice(0, self.keys.len(), rng, sample_size, &mut |x: &K| {
184                output.push_ref(x)
185            });
186    }
187}
188
189impl<K, L, O> NumEntries for Layer<K, L, O>
190where
191    K: DataTrait + ?Sized,
192    L: Trie + NumEntries,
193    O: OrdOffset,
194{
195    const CONST_NUM_ENTRIES: Option<usize> = None;
196
197    fn num_entries_shallow(&self) -> usize {
198        self.keys.len()
199    }
200
201    fn num_entries_deep(&self) -> usize {
202        self.vals.num_entries_deep()
203    }
204}
205
206impl<K, L, O> NegByRef for Layer<K, L, O>
207where
208    K: DataTrait + ?Sized,
209    L: Trie + NegByRef,
210    O: OrdOffset,
211{
212    fn neg_by_ref(&self) -> Self {
213        Self {
214            factories: self.factories.clone(),
215            keys: self.keys.clone(),
216            offs: self.offs.clone(),
217            // We assume that offsets in `vals` don't change after negation;
218            // otherwise `self.offs` will be invalid.
219            vals: self.vals.neg_by_ref(),
220        }
221    }
222}
223
224impl<K, L, O> Neg for Layer<K, L, O>
225where
226    K: DataTrait + ?Sized,
227    L: Trie + Neg<Output = L>,
228    O: OrdOffset,
229{
230    type Output = Self;
231
232    fn neg(self) -> Self {
233        Self {
234            factories: self.factories,
235            keys: self.keys,
236            offs: self.offs,
237            // We assume that offsets in `vals` don't change after negation;
238            // otherwise `self.offs` will be invalid.
239            vals: self.vals.neg(),
240        }
241    }
242}
243
244impl<K, L, O> AddAssignByRef for Layer<K, L, O>
245where
246    K: DataTrait + ?Sized,
247    L: Trie,
248    O: OrdOffset,
249{
250    fn add_assign_by_ref(&mut self, other: &Self) {
251        if !other.is_empty() {
252            *self = self.merge(other);
253        }
254    }
255}
256
257impl<K, L, O> AddByRef for Layer<K, L, O>
258where
259    K: DataTrait + ?Sized,
260    L: Trie,
261    O: OrdOffset,
262{
263    fn add_by_ref(&self, rhs: &Self) -> Self {
264        self.merge(rhs)
265    }
266}
267
268impl<K, L, O> Trie for Layer<K, L, O>
269where
270    K: DataTrait + ?Sized,
271    L: Trie,
272    O: OrdOffset,
273{
274    type Item<'a> = (&'a mut K, L::Item<'a>);
275    type ItemRef<'a> = (&'a K, L::ItemRef<'a>);
276    type Factories = LayerFactories<K, L::Factories>;
277    type Cursor<'s>
278        = LayerCursor<'s, K, L, O>
279    where
280        K: 's,
281        O: 's,
282        L: 's;
283    type MergeBuilder = LayerBuilder<K, L::MergeBuilder, O>;
284    type TupleBuilder = LayerBuilder<K, L::TupleBuilder, O>;
285    type LeafKey = L::LeafKey;
286
287    fn keys(&self) -> usize {
288        unsafe { self.assume_invariants() }
289        self.keys.len()
290    }
291
292    fn tuples(&self) -> usize {
293        unsafe { self.assume_invariants() }
294        self.vals.tuples()
295    }
296
297    fn cursor_from(&self, lower: usize, upper: usize) -> Self::Cursor<'_> {
298        unsafe { self.assume_invariants() }
299
300        if lower < upper {
301            let child_lower = self.offs[lower];
302            let child_upper = self.offs[lower + 1];
303
304            LayerCursor {
305                bounds: (lower, upper),
306                storage: self,
307                child: self
308                    .vals
309                    .cursor_from(child_lower.into_usize(), child_upper.into_usize()),
310                pos: lower as isize,
311            }
312        } else {
313            LayerCursor {
314                bounds: (0, 0),
315                storage: self,
316                child: self.vals.cursor_from(0, 0),
317                pos: 0,
318            }
319        }
320    }
321
322    fn approximate_byte_size(&self) -> usize {
323        self.keys.approximate_byte_size()
324            + self.offs.len() * std::mem::size_of::<O>()
325            + self.vals.approximate_byte_size()
326    }
327}
328
329impl<K, L, O> Display for Layer<K, L, O>
330where
331    K: DataTrait + ?Sized,
332    L: Trie,
333    for<'a> L::Cursor<'a>: Clone + Display,
334    O: OrdOffset,
335{
336    fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
337        self.cursor().fmt(f)
338    }
339}
340
341/// Assembles a layer of this
342#[derive(SizeOf, Clone)]
343pub struct LayerBuilder<K, L, O = usize>
344where
345    K: DataTrait + ?Sized,
346    L: Builder,
347{
348    #[size_of(skip)]
349    factories: LayerFactories<K, <L::Trie as Trie>::Factories>,
350    /// Keys
351    pub keys: Box<DynVec<K>>,
352    /// Offsets
353    pub offs: Vec<O>,
354    /// The next layer down
355    pub vals: L,
356}
357
358impl<K, L, O> Debug for LayerBuilder<K, L, O>
359where
360    K: DataTrait + ?Sized,
361    L: Builder + Debug,
362    O: 'static + Debug,
363{
364    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
365        f.debug_struct("LayerBuilder")
366            .field("keys", &self.keys)
367            .field("offs", &self.offs)
368            .field("vals", &self.vals)
369            .finish()
370    }
371}
372
373impl<K, L, O> LayerBuilder<K, L, O>
374where
375    K: DataTrait + ?Sized,
376    L: Builder,
377{
378    /// Performs one step of merging.
379    pub fn merge_step(
380        &mut self,
381        (trie1, lower1, upper1): (&Layer<K, L::Trie, O>, &mut usize, usize),
382        (trie2, lower2, upper2): (&Layer<K, L::Trie, O>, &mut usize, usize),
383        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
384    ) where
385        L: MergeBuilder,
386        O: OrdOffset,
387    {
388        match trie1.keys[*lower1].cmp(&trie2.keys[*lower2]) {
389            Ordering::Less => {
390                // determine how far we can advance lower1 until we reach/pass lower2
391                let step = 1 + trie1
392                    .keys
393                    .advance_to(1 + *lower1, upper1, &trie2.keys[*lower2]);
394                let step = min(step, 1_000);
395                self.copy_range(trie1, *lower1, *lower1 + step, map_func);
396                *lower1 += step;
397            }
398
399            Ordering::Equal => {
400                let lower = self.vals.boundary();
401                // record vals_length so we can tell if anything was pushed.
402                self.vals.push_merge(
403                    trie1.vals.cursor_from(
404                        trie1.offs[*lower1].into_usize(),
405                        trie1.offs[*lower1 + 1].into_usize(),
406                    ),
407                    trie2.vals.cursor_from(
408                        trie2.offs[*lower2].into_usize(),
409                        trie2.offs[*lower2 + 1].into_usize(),
410                    ),
411                    map_func,
412                );
413                if self.vals.keys() > lower {
414                    self.keys.push_ref(&trie1.keys[*lower1]);
415                    self.offs.push(O::from_usize(self.vals.keys()));
416                }
417
418                *lower1 += 1;
419                *lower2 += 1;
420            }
421
422            Ordering::Greater => {
423                // determine how far we can advance lower2 until we reach/pass lower1
424                let step = 1 + trie2
425                    .keys
426                    .advance_to(1 + *lower2, upper2, &trie1.keys[*lower1]);
427                let step = min(step, 1_000);
428                self.copy_range(trie2, *lower2, *lower2 + step, map_func);
429                *lower2 += step;
430            }
431        }
432    }
433
434    pub fn merge_step_retain_keys<F>(
435        &mut self,
436        (trie1, lower1, upper1): (&<Self as Builder>::Trie, &mut usize, usize),
437        (trie2, lower2, upper2): (&<Self as Builder>::Trie, &mut usize, usize),
438        filter: &F,
439        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
440    ) where
441        L: MergeBuilder,
442        O: OrdOffset,
443        F: Fn(&K) -> bool,
444    {
445        match trie1.keys[*lower1].cmp(&trie2.keys[*lower2]) {
446            Ordering::Less => {
447                // determine how far we can advance lower1 until we reach/pass lower2
448                let step = 1 + trie1
449                    .keys
450                    .advance_to(1 + *lower1, upper1, &trie2.keys[*lower2]);
451                let step = min(step, 1_000);
452                self.copy_range_retain_keys(trie1, *lower1, *lower1 + step, filter, map_func);
453                *lower1 += step;
454            }
455
456            Ordering::Equal => {
457                let lower = self.vals.boundary();
458                if filter(&trie1.keys[*lower1]) {
459                    // record vals_length so we can tell if anything was pushed.
460                    self.vals.push_merge(
461                        trie1.vals.cursor_from(
462                            trie1.offs[*lower1].into_usize(),
463                            trie1.offs[*lower1 + 1].into_usize(),
464                        ),
465                        trie2.vals.cursor_from(
466                            trie2.offs[*lower2].into_usize(),
467                            trie2.offs[*lower2 + 1].into_usize(),
468                        ),
469                        map_func,
470                    );
471                    if self.vals.keys() > lower {
472                        self.keys.push_ref(&trie1.keys[*lower1]);
473                        self.offs.push(O::from_usize(self.vals.keys()));
474                    }
475                }
476
477                *lower1 += 1;
478                *lower2 += 1;
479            }
480
481            Ordering::Greater => {
482                // determine how far we can advance lower2 until we reach/pass lower1
483                let step = 1 + trie2
484                    .keys
485                    .advance_to(1 + *lower2, upper2, &trie1.keys[*lower1]);
486                let step = min(step, 1_000);
487                self.copy_range_retain_keys(trie2, *lower2, *lower2 + step, filter, map_func);
488                *lower2 += step;
489            }
490        }
491    }
492
493    /*
494    /// Push a key and all of its associated values to the current builder
495    ///
496    /// Can be more efficient than repeatedly calling `.push_tuple()` because it
497    /// doesn't require an owned key for every value+diff pair which can allow
498    /// eliding unnecessary clones
499    pub fn with_key<F>(&mut self, mut key: K, with: F)
500    where
501        L: DynTupleBuilder,
502        O: OrdOffset,
503        F: for<'a> FnOnce(ErasedOrderedBuilderVals<'a, K, I, L, O>),
504    {
505        let mut pushes = 0;
506        let vals = ErasedOrderedBuilderVals {
507            builder: self,
508            pushes: &mut pushes,
509        };
510        with(vals);
511
512        // If the user's closure actually added any elements, push the key
513        if pushes != 0
514            && (self.keys.is_empty()
515                || !self.offs[self.keys.len()].is_zero()
516                || !(self.vtables.key.eq)(&self.keys[self.keys.len() - 1], &key))
517        {
518            if !self.keys.is_empty() && self.offs[self.keys.len()].is_zero() {
519                self.offs[self.keys.len()] = O::from_usize(self.vals.boundary());
520            }
521            // Safety: we own `key`, and we forget it, so `drop` won't get called.
522            unsafe { self.keys.push(&mut key) };
523            forget(key);
524            self.offs.push(O::zero());
525        }
526    }*/
527}
528
529/*
530pub struct ErasedOrderedBuilderVals<'a, K, I, L, O>
531where
532    K: 'static,
533    I: 'static,
534    O: 'static,
535    L: DynBuilder,
536{
537    builder: &'a mut ErasedOrderedBuilder<K, I, L, O>,
538    pushes: &'a mut usize,
539}
540*/
541
542/*
543impl<'a, K, I, L, O> ErasedOrderedBuilderVals<'a, K, I, L, O>
544where
545    K: 'static,
546    L: DynTupleBuilder,
547{
548    pub fn push(&mut self, value: <L as DynTupleBuilder>::Item) {
549        *self.pushes += 1;
550        unsafe { self.builder.vals.push_tuple(value); }
551    }
552}
553*/
554
555impl<K, L, O> Builder for LayerBuilder<K, L, O>
556where
557    K: DataTrait + ?Sized,
558    L: Builder,
559    O: OrdOffset,
560{
561    type Trie = Layer<K, L::Trie, O>;
562
563    fn boundary(&mut self) -> usize {
564        self.offs[self.keys.len()] = O::from_usize(self.vals.boundary());
565        self.keys.len()
566    }
567
568    fn done(mut self) -> Self::Trie {
569        if !self.keys.is_empty() && self.offs[self.keys.len()].is_zero() {
570            self.offs[self.keys.len()] = O::from_usize(self.vals.boundary());
571        }
572
573        if self.keys.spare_capacity() >= self.keys.len() / 10 {
574            self.keys.shrink_to_fit();
575            self.offs.shrink_to_fit();
576        }
577
578        Layer {
579            factories: self.factories,
580            keys: self.keys,
581            offs: self.offs,
582            vals: self.vals.done(),
583        }
584    }
585}
586
587impl<K, L, O> LayerBuilder<K, L, O>
588where
589    K: DataTrait + ?Sized,
590    L: MergeBuilder,
591    O: OrdOffset,
592{
593    /// Like `push_merge`, but uses `fuel` to bound the amount of work.
594    ///
595    /// Builds at most `fuel` values plus values for one extra key.
596    /// If `fuel > 0` when the method returns, this means that the merge is
597    /// complete.
598    pub fn push_merge_fueled(
599        &mut self,
600        (source1, lower1, upper1): (&<Self as Builder>::Trie, &mut usize, usize),
601        (source2, lower2, upper2): (&<Self as Builder>::Trie, &mut usize, usize),
602        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
603        fuel: &mut isize,
604    ) {
605        let starting_updates = source1.offs[*lower1] + source2.offs[*lower2];
606        let mut effort = 0isize;
607
608        // while both mergees are still active
609        while *lower1 < upper1 && *lower2 < upper2 && effort < *fuel {
610            self.merge_step(
611                (source1, lower1, upper1),
612                (source2, lower2, upper2),
613                map_func,
614            );
615            effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
616                as isize;
617        }
618
619        // Merging is complete; only copying remains.
620        if *lower1 == upper1 || *lower2 == upper2 {
621            // Limit merging by remaining fuel.
622            let mut remaining_fuel = *fuel - effort;
623            if remaining_fuel > 0 {
624                if *lower1 < upper1 {
625                    if remaining_fuel < 1_000 {
626                        remaining_fuel = 1_000;
627                    }
628                    *lower1 = self.copy_range_fueled(
629                        source1,
630                        *lower1,
631                        upper1,
632                        map_func,
633                        remaining_fuel as usize,
634                    );
635                }
636                if *lower2 < upper2 {
637                    if remaining_fuel < 1_000 {
638                        remaining_fuel = 1_000;
639                    }
640                    *lower2 = self.copy_range_fueled(
641                        source2,
642                        *lower2,
643                        upper2,
644                        map_func,
645                        remaining_fuel as usize,
646                    );
647                }
648            }
649        }
650
651        effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
652            as isize;
653        *fuel -= effort;
654    }
655
656    pub fn push_merge_retain_keys_fueled<F>(
657        &mut self,
658        (source1, lower1, upper1): (&<Self as Builder>::Trie, &mut usize, usize),
659        (source2, lower2, upper2): (&<Self as Builder>::Trie, &mut usize, usize),
660        filter: &F,
661        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
662        fuel: &mut isize,
663    ) where
664        F: Fn(&K) -> bool,
665    {
666        // We measure effort exerted by this function in terms of the number of input
667        // values processed rather than the number of produced outputs.
668        let starting_updates = source1.offs[*lower1] + source2.offs[*lower2];
669        let mut effort = 0isize;
670
671        // while both mergees are still active
672        while *lower1 < upper1 && *lower2 < upper2 && effort < *fuel {
673            self.merge_step_retain_keys(
674                (source1, lower1, upper1),
675                (source2, lower2, upper2),
676                filter,
677                map_func,
678            );
679            effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
680                as isize;
681        }
682
683        // Merging is complete; only copying remains.
684        if *lower1 == upper1 || *lower2 == upper2 {
685            // Limit merging by remaining fuel.
686            let mut remaining_fuel = *fuel - effort;
687            if remaining_fuel > 0 {
688                if *lower1 < upper1 {
689                    if remaining_fuel < 1_000 {
690                        remaining_fuel = 1_000;
691                    }
692                    *lower1 = self.copy_range_retain_keys_fueled(
693                        source1,
694                        *lower1,
695                        upper1,
696                        filter,
697                        map_func,
698                        remaining_fuel as usize,
699                    );
700                }
701                if *lower2 < upper2 {
702                    if remaining_fuel < 1_000 {
703                        remaining_fuel = 1_000;
704                    }
705                    *lower2 = self.copy_range_retain_keys_fueled(
706                        source2,
707                        *lower2,
708                        upper2,
709                        filter,
710                        map_func,
711                        remaining_fuel as usize,
712                    );
713                }
714            }
715        }
716
717        effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
718            as isize;
719
720        *fuel -= effort;
721    }
722
723    /// Copy the key at `index` and its associated values from `other` to `self`, applying
724    /// `map_func` to times along the way. If all values get consolidated away, the
725    /// key is not copied.
726    fn map_times_and_copy_key(
727        &mut self,
728        other: &<Self as Builder>::Trie,
729        index: usize,
730        map_func: &dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey),
731    ) {
732        let val_start = self.vals.keys();
733
734        self.vals.copy_range(
735            &other.vals,
736            other.offs[index].into_usize(),
737            other.offs[index + 1].into_usize(),
738            Some(map_func),
739        );
740
741        let val_end = self.vals.keys();
742
743        if val_end > val_start {
744            self.keys.push_ref(&other.keys[index]);
745            self.offs.push(O::from_usize(val_end));
746        }
747    }
748
749    /// Like `copy_range`, but uses `fuel` to bound the amount of work.
750    ///
751    /// Invariants:
752    /// - Copies at most `fuel` values plus values for one extra key.
753    /// - If `fuel` is greater than or equal to the number of values, in the
754    ///   `lower..upper` key range, copies the entire range.
755    pub fn copy_range_fueled(
756        &mut self,
757        other: &<Self as Builder>::Trie,
758        lower: usize,
759        mut upper: usize,
760        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
761        fuel: usize,
762    ) -> usize {
763        assert!(lower < upper && lower < other.offs.len() && upper < other.offs.len());
764
765        let other_basis = other.offs[lower];
766        let self_basis = self.offs.last().copied().unwrap_or_else(|| O::zero());
767
768        // Number of keys in the `[lower..upper]` range that can be copied without
769        // exceeding `fuel`.
770        let keys = advance(&other.offs[lower..upper], |offset| {
771            offset.into_usize() - other_basis.into_usize() <= fuel
772        });
773
774        upper = lower + keys;
775
776        if let Some(map_func) = map_func {
777            // Process keys one by one only keeping those keys whose values don't
778            // get compacted away.
779            for i in lower..upper {
780                self.map_times_and_copy_key(other, i, map_func);
781            }
782        } else {
783            self.keys
784                .extend_from_range(other.keys.as_ref(), lower, upper);
785            for index in lower..upper {
786                self.offs
787                    .push((other.offs[index + 1] + self_basis) - other_basis);
788            }
789
790            self.vals.copy_range(
791                &other.vals,
792                other_basis.into_usize(),
793                other.offs[upper].into_usize(),
794                None,
795            );
796        }
797        upper
798    }
799
800    fn copy_range_retain_keys_fueled<F>(
801        &mut self,
802        other: &<Self as Builder>::Trie,
803        lower: usize,
804        mut upper: usize,
805        filter: &F,
806        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
807        fuel: usize,
808    ) -> usize
809    where
810        F: Fn(&K) -> bool,
811    {
812        assert!(lower < upper && lower < other.offs.len() && upper < other.offs.len());
813
814        let other_basis = other.offs[lower];
815
816        // Number of keys in the `[lower..upper]` range that can be copied without
817        // exceeding `fuel`.
818        let keys = advance(&other.offs[lower..upper], |offset| {
819            offset.into_usize() - other_basis.into_usize() <= fuel
820        });
821
822        upper = lower + keys;
823
824        self.copy_range_retain_keys(other, lower, upper, filter, map_func);
825
826        upper
827    }
828}
829
830impl<K, V, L, O> LayerBuilder<K, L, O>
831where
832    K: DataTrait + ?Sized,
833    V: DataTrait + ?Sized,
834    O: OrdOffset,
835    L: MergeBuilder,
836    <L as Builder>::Trie: 'static,
837    for<'a, 'b> <<L as Builder>::Trie as Trie>::Cursor<'a>: Cursor<'a, Key = V>,
838{
839    /// Like `push_merge_fueled`, but also removes values that don't pass
840    /// `filter` in both inputs.
841    pub fn push_merge_retain_values_fueled<KF, VF>(
842        &mut self,
843        (source1, lower1, upper1): (&<Self as Builder>::Trie, &mut usize, usize),
844        (source2, lower2, upper2): (&<Self as Builder>::Trie, &mut usize, usize),
845        key_filter: &KF,
846        value_filter: &VF,
847        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
848        fuel: &mut isize,
849    ) where
850        KF: Fn(&K) -> bool,
851        VF: Fn(&V) -> bool,
852    {
853        let starting_updates = source1.offs[*lower1] + source2.offs[*lower2];
854
855        let mut effort = 0isize;
856
857        // while both mergees are still active
858        while *lower1 < upper1 && *lower2 < upper2 && effort < *fuel {
859            self.merge_step_retain_values_fueled(
860                (source1, lower1, upper1),
861                (source2, lower2, upper2),
862                key_filter,
863                value_filter,
864                map_func,
865                usize::MAX,
866            );
867            effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
868                as isize;
869        }
870
871        // Merging is complete; only copying remains.
872        if *lower1 == upper1 || *lower2 == upper2 {
873            // Limit merging by remaining fuel.
874            let mut remaining_fuel = *fuel - effort;
875            if remaining_fuel > 0 {
876                if *lower1 < upper1 {
877                    if remaining_fuel < 1_000 {
878                        remaining_fuel = 1_000;
879                    }
880                    *lower1 = self.copy_range_retain_values_fueled(
881                        source1,
882                        *lower1,
883                        upper1,
884                        key_filter,
885                        value_filter,
886                        map_func,
887                        remaining_fuel as usize,
888                    );
889                }
890                if *lower2 < upper2 {
891                    if remaining_fuel < 1_000 {
892                        remaining_fuel = 1_000;
893                    }
894                    *lower2 = self.copy_range_retain_values_fueled(
895                        source2,
896                        *lower2,
897                        upper2,
898                        key_filter,
899                        value_filter,
900                        map_func,
901                        remaining_fuel as usize,
902                    );
903                }
904            }
905        }
906
907        effort = (source1.offs[*lower1] + source2.offs[*lower2] - starting_updates).into_usize()
908            as isize;
909        *fuel -= effort;
910    }
911
912    fn merge_step_retain_values_fueled<KF, VF>(
913        &mut self,
914        (trie1, lower1, upper1): (&<Self as Builder>::Trie, &mut usize, usize),
915        (trie2, lower2, upper2): (&<Self as Builder>::Trie, &mut usize, usize),
916        key_filter: &KF,
917        value_filter: &VF,
918        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
919        fuel: usize,
920    ) where
921        KF: Fn(&K) -> bool,
922        VF: Fn(&V) -> bool,
923    {
924        match trie1.keys[*lower1].cmp(&trie2.keys[*lower2]) {
925            Ordering::Less => {
926                // determine how far we can advance lower1 until we reach/pass lower2
927                let step = 1 + trie1
928                    .keys
929                    .advance_to(1 + *lower1, upper1, &trie2.keys[*lower2]);
930                let step = min(step, 1_000);
931                *lower1 = self.copy_range_retain_values_fueled(
932                    trie1,
933                    *lower1,
934                    *lower1 + step,
935                    key_filter,
936                    value_filter,
937                    map_func,
938                    fuel,
939                );
940            }
941
942            Ordering::Equal => {
943                let lower = self.vals.boundary();
944                let cursor1 = trie1.vals.cursor_from(
945                    trie1.offs[*lower1].into_usize(),
946                    trie1.offs[*lower1 + 1].into_usize(),
947                );
948
949                let cursor2 = trie2.vals.cursor_from(
950                    trie2.offs[*lower2].into_usize(),
951                    trie2.offs[*lower2 + 1].into_usize(),
952                );
953
954                if key_filter(&trie1.keys[*lower1]) {
955                    // record vals_length so we can tell if anything was pushed.
956                    self.vals
957                        .push_merge_retain_keys(cursor1, cursor2, value_filter, map_func);
958
959                    if self.vals.keys() > lower {
960                        self.keys.push_ref(&trie1.keys[*lower1]);
961                        self.offs.push(O::from_usize(self.vals.keys()));
962                    }
963                }
964                *lower1 += 1;
965                *lower2 += 1;
966            }
967
968            Ordering::Greater => {
969                // determine how far we can advance lower2 until we reach/pass lower1
970                let step = 1 + trie2
971                    .keys
972                    .advance_to(1 + *lower2, upper2, &trie1.keys[*lower1]);
973                let step = min(step, 1_000);
974                *lower2 = self.copy_range_retain_values_fueled(
975                    trie2,
976                    *lower2,
977                    *lower2 + step,
978                    key_filter,
979                    value_filter,
980                    map_func,
981                    fuel,
982                );
983            }
984        }
985    }
986
987    #[allow(clippy::too_many_arguments)]
988    fn copy_range_retain_values_fueled<KF, VF>(
989        &mut self,
990        other: &<Self as Builder>::Trie,
991        lower: usize,
992        upper: usize,
993        key_filter: &KF,
994        value_filter: &VF,
995        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
996        fuel: usize,
997    ) -> usize
998    where
999        KF: Fn(&K) -> bool,
1000        VF: Fn(&V) -> bool,
1001    {
1002        assert!(lower < upper && lower < other.offs.len() && upper < other.offs.len());
1003
1004        let other_start = other.offs[lower].into_usize();
1005
1006        for index in lower..upper {
1007            if key_filter(&other.keys[index]) {
1008                let self_basis = self.vals.boundary();
1009                let other_end = other.offs[index + 1].into_usize();
1010
1011                let cursor = other
1012                    .vals
1013                    .cursor_from(other.offs[index].into_usize(), other_end);
1014
1015                let other_basis = cursor.position();
1016
1017                if other_end > other_basis {
1018                    self.vals.copy_range_retain_keys(
1019                        &other.vals,
1020                        other_basis,
1021                        other_end,
1022                        value_filter,
1023                        map_func,
1024                    );
1025                    if self.vals.keys() > self_basis {
1026                        self.keys.push_ref(&other.keys[index]);
1027                        self.offs.push(O::from_usize(self.vals.keys()));
1028                    }
1029                }
1030
1031                if other_end - other_start >= fuel {
1032                    return index + 1;
1033                }
1034            }
1035        }
1036
1037        upper
1038    }
1039}
1040
1041impl<K, L, O> MergeBuilder for LayerBuilder<K, L, O>
1042where
1043    K: DataTrait + ?Sized,
1044    L: MergeBuilder,
1045    O: OrdOffset,
1046{
1047    fn with_capacity(other1: &Self::Trie, other2: &Self::Trie) -> Self {
1048        let mut offs = Vec::with_capacity(other1.keys() + other2.keys() + 1);
1049        offs.push(O::zero());
1050
1051        let mut keys = other1.factories.keys.default_box();
1052        keys.reserve_exact(other1.keys() + other2.keys());
1053
1054        Self {
1055            factories: other1.factories.clone(),
1056            keys,
1057            offs,
1058            vals: L::with_capacity(&other1.vals, &other2.vals),
1059        }
1060    }
1061
1062    /*fn with_key_capacity(capacity: usize) -> Self {
1063        let mut offs = Vec::with_capacity(capacity + 1);
1064        offs.push(O::zero());
1065
1066        Self {
1067            keys: Vec::with_capacity(capacity),
1068            offs,
1069            vals: L::with_key_capacity(capacity),
1070        }
1071    }*/
1072
1073    fn reserve(&mut self, additional: usize) {
1074        self.keys.reserve(additional);
1075        self.offs.reserve(additional);
1076        self.vals.reserve(additional);
1077    }
1078
1079    fn keys(&self) -> usize {
1080        self.keys.len()
1081    }
1082
1083    fn copy_range(
1084        &mut self,
1085        other: &Self::Trie,
1086        lower: usize,
1087        upper: usize,
1088        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
1089    ) {
1090        assert!(lower < upper && lower < other.offs.len() && upper < other.offs.len());
1091
1092        let other_basis = other.offs[lower];
1093        let self_basis = self.offs.last().copied().unwrap_or_else(|| O::zero());
1094
1095        if let Some(map_func) = map_func {
1096            for i in lower..upper {
1097                self.map_times_and_copy_key(other, i, map_func);
1098            }
1099        } else {
1100            self.keys
1101                .extend_from_range(other.keys.as_ref(), lower, upper);
1102            for index in lower..upper {
1103                self.offs
1104                    .push((other.offs[index + 1] + self_basis) - other_basis);
1105            }
1106
1107            self.vals.copy_range(
1108                &other.vals,
1109                other_basis.into_usize(),
1110                other.offs[upper].into_usize(),
1111                None,
1112            );
1113        }
1114    }
1115
1116    fn copy_range_retain_keys<'a, F>(
1117        &mut self,
1118        other: &'a Self::Trie,
1119        lower: usize,
1120        upper: usize,
1121        filter: &F,
1122        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
1123    ) where
1124        F: Fn(&<<Self::Trie as Trie>::Cursor<'a> as Cursor<'a>>::Key) -> bool,
1125    {
1126        assert!(lower < upper && lower < other.offs.len() && upper < other.offs.len());
1127
1128        for index in lower..upper {
1129            if filter(&other.keys[index]) {
1130                if let Some(map_func) = map_func {
1131                    self.map_times_and_copy_key(other, index, map_func);
1132                } else {
1133                    self.keys.push_ref(&other.keys[index]);
1134
1135                    self.vals.copy_range(
1136                        &other.vals,
1137                        other.offs[index].into_usize(),
1138                        other.offs[index + 1].into_usize(),
1139                        None,
1140                    );
1141
1142                    self.offs.push(O::from_usize(self.vals.keys()));
1143                }
1144            }
1145        }
1146    }
1147
1148    fn push_merge<'a>(
1149        &'a mut self,
1150        cursor1: <Self::Trie as Trie>::Cursor<'a>,
1151        cursor2: <Self::Trie as Trie>::Cursor<'a>,
1152        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
1153    ) {
1154        let (mut lower1, upper1) = cursor1.bounds;
1155        let (mut lower2, upper2) = cursor2.bounds;
1156
1157        let capacity = (upper1 - lower1) + (upper2 - lower2);
1158        self.keys.reserve(capacity);
1159        self.offs.reserve(capacity);
1160
1161        // while both mergees are still active
1162        while lower1 < upper1 && lower2 < upper2 {
1163            self.merge_step(
1164                (cursor1.storage, &mut lower1, upper1),
1165                (cursor2.storage, &mut lower2, upper2),
1166                map_func,
1167            );
1168        }
1169
1170        if lower1 < upper1 {
1171            self.copy_range(cursor1.storage, lower1, upper1, map_func);
1172        }
1173        if lower2 < upper2 {
1174            self.copy_range(cursor2.storage, lower2, upper2, map_func);
1175        }
1176    }
1177
1178    fn push_merge_retain_keys<'a, F>(
1179        &'a mut self,
1180        cursor1: <Self::Trie as Trie>::Cursor<'a>,
1181        cursor2: <Self::Trie as Trie>::Cursor<'a>,
1182        filter: &F,
1183        map_func: Option<&dyn Fn(&mut <<Self as Builder>::Trie as Trie>::LeafKey)>,
1184    ) where
1185        F: Fn(&<<Self::Trie as Trie>::Cursor<'a> as Cursor<'a>>::Key) -> bool,
1186    {
1187        let (mut lower1, upper1) = cursor1.bounds;
1188        let (mut lower2, upper2) = cursor2.bounds;
1189
1190        let capacity = (upper1 - lower1) + (upper2 - lower2);
1191        self.keys.reserve(capacity);
1192        self.offs.reserve(capacity);
1193
1194        // while both mergees are still active
1195        while lower1 < upper1 && lower2 < upper2 {
1196            self.merge_step_retain_keys(
1197                (cursor1.storage, &mut lower1, upper1),
1198                (cursor2.storage, &mut lower2, upper2),
1199                filter,
1200                map_func,
1201            );
1202        }
1203
1204        if lower1 < upper1 {
1205            self.copy_range_retain_keys(cursor1.storage, lower1, upper1, filter, map_func);
1206        }
1207        if lower2 < upper2 {
1208            self.copy_range_retain_keys(cursor2.storage, lower2, upper2, filter, map_func);
1209        }
1210    }
1211}
1212
1213impl<K, L, O> TupleBuilder for LayerBuilder<K, L, O>
1214where
1215    K: DataTrait + ?Sized,
1216    L: TupleBuilder,
1217    O: OrdOffset,
1218{
1219    fn new(factories: &<Self::Trie as Trie>::Factories) -> Self {
1220        Self {
1221            factories: factories.clone(),
1222            keys: factories.keys.default_box(),
1223            offs: vec![O::zero()],
1224            vals: L::new(&factories.child),
1225        }
1226    }
1227
1228    fn with_capacity(factories: &<Self::Trie as Trie>::Factories, cap: usize) -> Self {
1229        let mut offs = Vec::with_capacity(cap + 1);
1230        offs.push(O::zero());
1231
1232        let mut keys = factories.keys.default_box();
1233        keys.reserve_exact(cap);
1234
1235        Self {
1236            factories: factories.clone(),
1237            keys,
1238            offs,
1239            vals: L::with_capacity(&factories.child, cap),
1240        }
1241    }
1242
1243    fn reserve_tuples(&mut self, additional: usize) {
1244        self.keys.reserve(additional);
1245        self.offs.reserve(additional);
1246        self.vals.reserve_tuples(additional);
1247    }
1248
1249    fn tuples(&self) -> usize {
1250        self.vals.tuples()
1251    }
1252
1253    fn push_tuple<'a>(&mut self, (key, val): (&mut K, <L::Trie as Trie>::Item<'_>)) {
1254        // if first element, prior element finish, or different element, need to push
1255        // and maybe punctuate.
1256        if self.keys.is_empty()
1257            || !self.offs[self.keys.len()].is_zero()
1258            || &self.keys[self.keys.len() - 1] != key
1259        {
1260            if !self.keys.is_empty() && self.offs[self.keys.len()].is_zero() {
1261                self.offs[self.keys.len()] = O::from_usize(self.vals.boundary());
1262            }
1263            self.keys.push_val(key);
1264            self.offs.push(O::zero()); // <-- indicates "unfinished".
1265        }
1266        self.vals.push_tuple(val);
1267    }
1268
1269    fn push_refs<'a>(&mut self, (key, val): (&K, <L::Trie as Trie>::ItemRef<'_>)) {
1270        // if first element, prior element finish, or different element, need to push
1271        // and maybe punctuate.
1272        if self.keys.is_empty()
1273            || !self.offs[self.keys.len()].is_zero()
1274            || &self.keys[self.keys.len() - 1] != (key)
1275        {
1276            if !self.keys.is_empty() && self.offs[self.keys.len()].is_zero() {
1277                self.offs[self.keys.len()] = O::from_usize(self.vals.boundary());
1278            }
1279            self.keys.push_ref(key);
1280            self.offs.push(O::zero()); // <-- indicates "unfinished".
1281        }
1282        self.vals.push_refs(val);
1283    }
1284}
1285
1286/// A cursor with a child cursor that is updated as we move.
1287#[derive(Debug)]
1288pub struct LayerCursor<'s, K, L, O>
1289where
1290    K: DataTrait + ?Sized,
1291    O: 'static,
1292    L: Trie,
1293{
1294    pub storage: &'s Layer<K, L, O>,
1295    pos: isize,
1296    bounds: (usize, usize),
1297    /// The cursor for the trie layer below this one.
1298    pub child: L::Cursor<'s>,
1299}
1300
1301impl<'s, K, L, O> Clone for LayerCursor<'s, K, L, O>
1302where
1303    K: DataTrait + ?Sized,
1304    L: Trie,
1305    O: OrdOffset,
1306    L::Cursor<'s>: Clone,
1307{
1308    fn clone(&self) -> Self {
1309        Self {
1310            storage: self.storage,
1311            pos: self.pos,
1312            bounds: self.bounds,
1313            child: self.child.clone(),
1314        }
1315    }
1316
1317    fn clone_from(&mut self, source: &Self) {
1318        self.storage.clone_from(&source.storage);
1319        self.pos.clone_from(&source.pos);
1320        self.bounds.clone_from(&source.bounds);
1321        self.child.clone_from(&source.child);
1322    }
1323}
1324
1325impl<K, L, O> LayerCursor<'_, K, L, O>
1326where
1327    K: DataTrait + ?Sized,
1328    L: Trie,
1329    O: OrdOffset,
1330{
1331    pub fn pos(&self) -> isize {
1332        self.pos
1333    }
1334
1335    pub fn seek_with<P>(&mut self, predicate: P)
1336    where
1337        P: Fn(&K) -> bool,
1338    {
1339        if self.valid() {
1340            self.pos +=
1341                self.storage
1342                    .keys
1343                    .advance_until(self.pos as usize, self.bounds.1, &predicate)
1344                    as isize;
1345        }
1346
1347        if self.valid() {
1348            self.child.reposition(
1349                self.storage.offs[self.pos as usize].into_usize(),
1350                self.storage.offs[self.pos as usize + 1].into_usize(),
1351            );
1352        }
1353    }
1354
1355    pub fn seek_with_reverse<P>(&mut self, predicate: P)
1356    where
1357        P: Fn(&K) -> bool,
1358    {
1359        if self.valid() {
1360            self.pos -=
1361                self.storage
1362                    .keys
1363                    .retreat_until(self.bounds.0, self.pos as usize, &predicate)
1364                    as isize;
1365        }
1366
1367        if self.valid() {
1368            self.child.reposition(
1369                self.storage.offs[self.pos as usize].into_usize(),
1370                self.storage.offs[self.pos as usize + 1].into_usize(),
1371            );
1372        }
1373    }
1374}
1375
1376impl<'s, K, L, O> Cursor<'s> for LayerCursor<'s, K, L, O>
1377where
1378    K: DataTrait + ?Sized,
1379    L: Trie,
1380    O: OrdOffset,
1381{
1382    type Key = K;
1383
1384    type Item<'k>
1385        = &'k K
1386    where
1387        Self: 'k;
1388
1389    type ValueCursor = L::Cursor<'s>;
1390
1391    #[inline]
1392    fn keys(&self) -> usize {
1393        self.bounds.1 - self.bounds.0
1394    }
1395
1396    #[inline]
1397    fn item(&self) -> Self::Item<'s> {
1398        &self.storage.keys[self.pos as usize]
1399    }
1400
1401    fn values(&self) -> L::Cursor<'s> {
1402        if self.valid() {
1403            self.storage.vals.cursor_from(
1404                self.storage.offs[self.pos as usize].into_usize(),
1405                self.storage.offs[self.pos as usize + 1].into_usize(),
1406            )
1407        } else {
1408            self.storage.vals.cursor_from(0, 0)
1409        }
1410    }
1411
1412    fn step(&mut self) {
1413        self.pos += 1;
1414
1415        if self.pos < self.bounds.1 as isize {
1416            self.child.reposition(
1417                self.storage.offs[self.pos as usize].into_usize(),
1418                self.storage.offs[self.pos as usize + 1].into_usize(),
1419            );
1420        } else {
1421            self.pos = self.bounds.1 as isize;
1422        }
1423    }
1424
1425    fn seek(&mut self, key: &Self::Key) {
1426        if self.valid() {
1427            self.pos += self
1428                .storage
1429                .keys
1430                .advance_to(self.pos as usize, self.bounds.1, key) as isize;
1431        }
1432
1433        if self.valid() {
1434            self.child.reposition(
1435                self.storage.offs[self.pos as usize].into_usize(),
1436                self.storage.offs[self.pos as usize + 1].into_usize(),
1437            );
1438        }
1439    }
1440
1441    fn valid(&self) -> bool {
1442        self.pos >= self.bounds.0 as isize && self.pos < self.bounds.1 as isize
1443    }
1444
1445    fn rewind(&mut self) {
1446        self.pos = self.bounds.0 as isize;
1447
1448        if self.valid() {
1449            self.child.reposition(
1450                self.storage.offs[self.pos as usize].into_usize(),
1451                self.storage.offs[self.pos as usize + 1].into_usize(),
1452            );
1453        }
1454    }
1455
1456    fn position(&self) -> usize {
1457        self.pos as usize
1458    }
1459
1460    fn reposition(&mut self, lower: usize, upper: usize) {
1461        self.pos = lower as isize;
1462        self.bounds = (lower, upper);
1463
1464        if self.valid() {
1465            self.child.reposition(
1466                self.storage.offs[self.pos as usize].into_usize(),
1467                self.storage.offs[self.pos as usize + 1].into_usize(),
1468            );
1469        }
1470    }
1471
1472    fn step_reverse(&mut self) {
1473        self.pos -= 1;
1474
1475        if self.pos >= self.bounds.0 as isize {
1476            self.child.reposition(
1477                self.storage.offs[self.pos as usize].into_usize(),
1478                self.storage.offs[self.pos as usize + 1].into_usize(),
1479            );
1480        } else {
1481            self.pos = self.bounds.0 as isize - 1;
1482        }
1483    }
1484
1485    fn seek_reverse(&mut self, key: &Self::Key) {
1486        if self.valid() {
1487            self.pos -= self
1488                .storage
1489                .keys
1490                .retreat_to(self.bounds.0, self.pos as usize, key) as isize;
1491        }
1492
1493        if self.valid() {
1494            self.child.reposition(
1495                self.storage.offs[self.pos as usize].into_usize(),
1496                self.storage.offs[self.pos as usize + 1].into_usize(),
1497            );
1498        }
1499    }
1500
1501    fn fast_forward(&mut self) {
1502        self.pos = self.bounds.1 as isize - 1;
1503
1504        if self.valid() {
1505            self.child.reposition(
1506                self.storage.offs[self.pos as usize].into_usize(),
1507                self.storage.offs[self.pos as usize + 1].into_usize(),
1508            );
1509        }
1510    }
1511}
1512
1513impl<'a, K, L, O> Display for LayerCursor<'a, K, L, O>
1514where
1515    K: DataTrait + ?Sized,
1516    L: Trie,
1517    L::Cursor<'a>: Clone + Display,
1518    O: OrdOffset,
1519{
1520    fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
1521        let mut cursor: LayerCursor<'_, K, L, O> = self.clone();
1522
1523        while cursor.valid() {
1524            let key = cursor.item();
1525            key.fmt(f)?;
1526            let val_str = cursor.values().to_string();
1527
1528            f.write_str(&indent(&val_str, "    "))?;
1529            cursor.step();
1530        }
1531
1532        Ok(())
1533    }
1534}