Skip to main content

ph_temp/fmph/
keyset.rs

1//! Managing sets of keys during construction of minimal perfect hash functions.
2
3use std::mem;
4use std::mem::MaybeUninit;
5use rayon::join;
6use rayon::prelude::*;
7use bitm::{BitAccess, ceiling_div};
8
9/// A trait for accessing and managing sets of keys (of the type `K`) during construction of
10/// [`fmph::Function`](super::Function) or [`fmph::GOFunction`](super::GOFunction).
11pub trait KeySet<K> {
12    /// Returns number of retained keys. Guarantee to be very fast.
13    fn keys_len(&self) -> usize;
14
15    /// Returns `true` only if [`Self::par_for_each_key`] can use multiple threads.
16    #[inline(always)] fn has_par_for_each_key(&self) -> bool { false }
17
18    /// Call `f` for each key in the set, using single thread.
19    ///
20    /// If `self` doesn't remember which keys are retained it uses `retained_hint` to check this.
21    fn for_each_key<F, P>(&self, f: F, retained_hint: P) where F: FnMut(&K), P: FnMut(&K) -> bool;
22
23    /// Multi-threaded version of `for_each_key`.
24    #[inline(always)]
25    fn par_for_each_key<F, P>(&self, f: F, retained_hint: P)
26        where F: Fn(&K) + Sync + Send, P: Fn(&K) -> bool + Sync + Send
27    {
28        self.for_each_key(f, retained_hint);
29    }
30
31    /// Calls `map` for each key in the set, and returns outputs of these calls. Uses single thread.
32    ///
33    /// If `self` doesn't remember which keys are retained it uses `retained_hint` to check this.
34    fn map_each_key<R, M, P>(&self, mut map: M, retained_hint: P) -> Vec<R>
35        where M: FnMut(&K) -> R, P: FnMut(&K) -> bool
36    {
37        let mut result = Vec::with_capacity(self.keys_len());
38        self.for_each_key(|k| result.push(map(k)), retained_hint);
39        result
40    }
41
42    /// Multi-threaded version of `map_each_key`.
43    #[inline(always)]
44    fn par_map_each_key<R, M, P>(&self, map: M, retained_hint: P) -> Vec<R>
45        where M: Fn(&K)->R + Sync + Send, R: Send, P: Fn(&K) -> bool + Sync + Send
46    { self.map_each_key(map, retained_hint) }
47
48    /// Calls either `map_each_key` (if `use_mt` is `false`) or `par_map_each_key` (if `use_mt` is `true`).
49    #[inline(always)]
50    fn maybe_par_map_each_key<R, M, P>(&self, map: M, retained_hint: P, use_mt: bool) -> Vec<R>
51        where M: Fn(&K)->R + Sync + Send, R: Send, P: Fn(&K) -> bool + Sync + Send
52    {
53        if use_mt { self.par_map_each_key(map, retained_hint) }
54            else { self.map_each_key(map, retained_hint) }
55    }
56
57    /// Retains in `self` keys pointed by the `filter` and remove the rest, using single thread.
58    /// - `filter` shows the keys to be retained (the result of the function can be unspecified for keys removed earlier),
59    /// - `retained_earlier` shows the keys that have not been removed earlier,
60    /// - `remove_count` returns number of keys to remove.
61    fn retain_keys<F, P, R>(&mut self, filter: F, retained_earlier: P, remove_count: R)
62        where F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize;
63
64    /// Multi-threaded version of `retain_keys`.
65    #[inline(always)]
66    fn par_retain_keys<F, P, R>(&mut self, filter: F, retained_earlier: P, remove_count: R)
67        where F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
68    {
69        self.retain_keys(filter, retained_earlier, remove_count)
70    }
71
72    /// Calls either `retain_keys` (if `use_mt` is `false`) or `par_retain_keys` (if `use_mt` is `true`).
73    #[inline(always)]
74    fn maybe_par_retain_keys<F, P, R>(&mut self, filter: F, retained_earlier: P, remove_count: R, use_mt: bool)
75        where F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
76    {
77        if use_mt /*&& self.has_par_retain_keys()*/ {
78            self.par_retain_keys(filter, retained_earlier, remove_count)
79        } else {
80            self.retain_keys(filter, retained_earlier, remove_count)
81        }
82    }
83
84    /// Retains in `self` keys pointed by the `index_filter`
85    /// (or `filter` if `self` does not support `index_filter`)
86    /// and remove the rest.
87    /// Uses single thread.
88    /// - `index_filter` shows indices (consistent with `par_map_each_key`) of keys to be retained,
89    /// - `filter` shows the keys to be retained,
90    /// - `retained_earlier` shows the keys that have not been removed earlier,
91    /// - `remove_count` returns number of keys to remove.
92    ///
93    /// The results of `index_filter` and `filter` are unspecified for keys removed earlier.
94    #[inline(always)]
95    fn retain_keys_with_indices<IF, F, P, R>(&mut self, _index_filter: IF, filter: F, retained_earlier: P, remove_count: R)
96        where IF: FnMut(usize) -> bool, F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
97    {
98        self.retain_keys(filter, retained_earlier, remove_count)
99    }
100
101    /// Multi-threaded version of `retain_keys_with_indices`.
102    #[inline(always)]
103    fn par_retain_keys_with_indices<IF, F, P, R>(&mut self, _index_filter: IF, filter: F, retained_earlier: P, remove_count: R)
104        where IF: Fn(usize) -> bool + Sync + Send,  F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
105    {
106        self.par_retain_keys(filter, retained_earlier, remove_count)
107    }
108
109    /// Calls either `retain_keys_with_indices` (if `use_mt` is `false`) or `par_retain_keys_with_indices` (if `use_mt` is `true`).
110    #[inline(always)]
111    fn maybe_par_retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, filter: F, retained_earlier: P, remove_count: R, use_mt: bool)
112        where IF: Fn(usize) -> bool + Sync + Send, F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
113    {
114        if use_mt /*&& self.has_par_retain_keys()*/ {
115            self.par_retain_keys_with_indices(index_filter, filter, retained_earlier, remove_count)
116        } else {
117            self.retain_keys_with_indices(index_filter, filter, retained_earlier, remove_count)
118        }
119    }
120
121    /// Convert `self` into the vector of retained keys.
122    /// 
123    /// If `self` doesn't remember which keys are retained it uses `retained_hint` to check this.
124    #[inline] fn into_vec<P>(self, retained_hint: P) -> Vec<K>
125        where P: FnMut(&K) -> bool, K: Clone, Self: Sized
126    {
127        self.map_each_key(|k| (*k).clone(), retained_hint)
128    }
129
130    /// Multi-threaded version of `into_vec`.
131    #[inline] fn par_into_vec<P>(self, retained_hint: P) -> Vec<K>
132        where P: Fn(&K) -> bool + Sync + Send, Self: Sized, K: Clone + Send
133    {
134        self.par_map_each_key(|k| (*k).clone(), retained_hint)
135    }
136}
137
138impl<K: Sync + Send> KeySet<K> for Vec<K> {
139    #[inline(always)] fn keys_len(&self) -> usize {
140        self.len()
141    }
142
143    #[inline(always)] fn has_par_for_each_key(&self) -> bool { true }
144
145    #[inline(always)] fn for_each_key<F, P>(&self, f: F, _retained_hint: P)
146        where F: FnMut(&K)
147    {
148        self.iter().for_each(f)
149    }
150
151    #[inline(always)] fn map_each_key<R, M, P>(&self, map: M, _retained_hint: P) -> Vec<R>
152        where M: FnMut(&K) -> R { self.iter().map(map).collect() }
153
154    #[inline(always)] fn par_for_each_key<F, P>(&self, f: F, _retained_hint: P)
155        where F: Fn(&K) + Sync + Send
156    {
157        self.into_par_iter().for_each(f)
158    }
159
160    #[inline(always)] fn par_map_each_key<R, M, P>(&self, map: M, _retained_hint: P) -> Vec<R>
161        where M: Fn(&K)->R + Sync + Send, R: Send
162    {
163        self.into_par_iter().map(map).collect()
164    }
165
166    #[inline(always)] fn retain_keys<F, P, R>(&mut self, filter: F, _retained_earlier: P, _remove_count: R)
167        where F: FnMut(&K) -> bool
168    {
169        self.retain(filter)
170    }
171
172    #[inline(always)] fn par_retain_keys<F, P, R>(&mut self, filter: F, _retained_earlier: P, remove_count: R)
173        where F: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
174    {
175        let mut result = Vec::with_capacity(self.len() - remove_count());
176        std::mem::swap(self, &mut result);
177        self.par_extend(result.into_par_iter().filter(filter));
178        //*self = (std::mem::take(self)).into_par_iter().filter(filter).collect();
179    }
180
181    #[inline(always)] fn retain_keys_with_indices<IF, F, P, R>(&mut self, mut index_filter: IF, _filter: F, _retained_earlier: P, _remove_count: R)
182        where IF: FnMut(usize) -> bool
183    {
184        let mut index = 0;
185        self.retain(|_| (index_filter(index), index += 1).0)
186    }
187
188    fn par_retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, _filter: F, _retained_earlier: P, remove_count: R)
189        where IF: Fn(usize) -> bool + Sync + Send, R: Fn() -> usize
190    {
191        let mut result = Vec::with_capacity(self.len() - remove_count());
192        std::mem::swap(self, &mut result);
193        self.par_extend(result.into_par_iter().enumerate().filter_map(|(i, k)| index_filter(i).then_some(k)));
194        //*self = (std::mem::take(self)).into_par_iter().enumerate().filter_map(|(i, k)| index_filter(i).then_some(k)).collect();
195    }
196
197    #[inline(always)] fn into_vec<P>(self, _retained_hint: P) -> Vec<K>
198    {
199        self
200    }
201
202    #[inline(always)] fn par_into_vec<P>(self, _retained_hint: P) -> Vec<K>
203    {
204        self
205    }
206}
207
208/// Implements [`KeySet`], storing keys in the mutable slice.
209///
210/// Retain operations reorder the slice, putting retained keys at the beginning of the slice.
211pub struct SliceMutSource<'k, K> {
212    /// All keys (retained ones occupy `len` beginning indices).
213    pub slice: &'k mut [K],
214    /// How many first keys are retained.
215    pub len: usize
216}
217
218impl<'k, K> SliceMutSource<'k, K> {
219    #[inline(always)] pub fn new(slice: &'k mut [K]) -> Self {
220        let len = slice.len();
221        Self { slice, len }
222    }
223}
224
225impl<'k, K> From<&'k mut [K]> for SliceMutSource<'k, K> {
226    #[inline(always)] fn from(slice: &'k mut [K]) -> Self { Self::new(slice) }
227}
228
229impl<'k, K: Sync> KeySet<K> for SliceMutSource<'k, K> {
230    #[inline(always)] fn keys_len(&self) -> usize { self.len }
231
232    #[inline(always)] fn has_par_for_each_key(&self) -> bool { true }
233
234    #[inline(always)] fn for_each_key<F, P>(&self, f: F, _retained_hint: P) where F: FnMut(&K) {
235        self.slice[0..self.len].iter().for_each(f)
236    }
237
238    #[inline(always)] fn par_for_each_key<F, P>(&self, f: F, _retained_hint: P)
239        where F: Fn(&K) + Sync + Send
240    {
241        self.slice[0..self.len].into_par_iter().for_each(f)
242    }
243
244    #[inline(always)] fn map_each_key<R, M, P>(&self, map: M, _retained_hint: P) -> Vec<R>
245        where M: FnMut(&K) -> R
246    {
247        self.slice[0..self.len].into_iter().map(map).collect()
248    }
249
250    #[inline(always)] fn par_map_each_key<R, M, P>(&self, map: M, _retained_hint: P) -> Vec<R>
251        where M: Fn(&K)->R + Sync + Send, R: Send
252    {
253        self.slice[0..self.len].into_par_iter().map(map).collect()
254    }
255
256    fn retain_keys<F, P, R>(&mut self, mut filter: F, _retained_hint: P, _remove_count: R)
257        where F: FnMut(&K) -> bool
258    {
259        let mut i = 0usize;
260        while i < self.len {
261            if filter(&self.slice[i]) {
262                i += 1;
263            } else {
264                // remove i-th element by replacing it with the last one
265                self.len -= 1;
266                self.slice.swap(i, self.len);
267            }
268        }
269    }
270}
271
272/// Implements [`KeySet`] that use immutable slice.
273///
274/// It does not use any index for pointing remain keys.
275/// That is why it is recommended to use it with [`CachedKeySet`].
276pub struct ImmutableSlice<'k, K> {
277    slice: &'k [K],
278    len: usize  // how many elements are in use
279}
280
281impl<'k, K: Sync> ImmutableSlice<'k, K> {
282    pub fn new(slice: &'k [K]) -> Self {
283        Self { slice, len: slice.len() }
284    }
285
286    pub fn cached(slice: &[K], clone_threshold: usize) -> CachedKeySet<K, ImmutableSlice<'_, K>> {
287        CachedKeySet::new(ImmutableSlice::new(slice), clone_threshold)
288    }
289}
290
291impl<'k, K: Sync + Send + Clone> KeySet<K> for ImmutableSlice<'k, K> {
292    #[inline(always)] fn keys_len(&self) -> usize { self.len }
293
294    #[inline(always)] fn has_par_for_each_key(&self) -> bool { true }
295
296    #[inline(always)] fn for_each_key<F, P>(&self, mut f: F, mut retained_hint: P)
297    where F: FnMut(&K), P: FnMut(&K) -> bool
298    {
299        if self.slice.len() == self.len {
300            self.slice.into_iter().for_each(f)
301        } else {
302            for k in self.slice { if retained_hint(k) { f(k) } }
303        }
304    }
305
306    #[inline(always)] fn map_each_key<R, M, P>(&self, mut map: M, mut retained_hint: P) -> Vec<R>
307        where M: FnMut(&K) -> R, P: FnMut(&K) -> bool
308    {
309        if self.slice.len() == self.len {
310            self.slice.into_iter().map(map).collect()
311        } else {
312            self.slice.into_iter().filter_map(|k| retained_hint(k).then(|| map(k))).collect()
313        }
314    }
315
316    #[inline(always)] fn par_for_each_key<F, P>(&self, f: F, retained_hint: P)
317        where F: Fn(&K) + Sync + Send, P: Fn(&K) -> bool + Sync + Send
318    {
319        if self.slice.len() == self.len {
320            (*self.slice).into_par_iter().for_each(f)
321        } else {
322            (*self.slice).into_par_iter().filter(|k| retained_hint(*k)).for_each(f)
323        }
324    }
325
326    #[inline(always)] fn retain_keys<F, P, R>(&mut self, _filter: F, _retained_earlier: P, mut remove_count: R)
327        where R: FnMut() -> usize
328    {
329        self.len -= remove_count();
330    }
331
332    /*fn par_retain_keys<F, P, R>(&mut self, filter: F, retained_earlier: P, remove_count: R)
333        where F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
334    {
335        if let Some(ref mut retained) = self.retained {
336            retained.par_retain_keys(filter, retained_earlier, remove_count)
337        } else {
338            self.retained = Some(self.slice.into_par_iter().filter_map(|k|filter(k).then(|| k.clone())).collect())
339        }
340    }
341
342    #[inline(always)] fn retain_keys_with_indices<IF, F, P, R>(&mut self, mut index_filter: IF, _filter: F, retained_earlier: P, remove_count: R)
343        where IF: FnMut(usize) -> bool, F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
344    {
345        let mut index = 0;
346        self.retain_keys(|_| (index_filter(index), index += 1).0, retained_earlier, remove_count)
347    }
348
349    fn par_retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, filter: F, retained_earlier: P, remove_count: R)
350        where IF: Fn(usize) -> bool + Sync + Send,  F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
351    {
352        if let Some(ref mut retained) = self.retained {
353            retained.par_retain_keys_with_indices(index_filter, filter, retained_earlier, remove_count)
354        } else {
355            self.retained = Some(self.slice.into_par_iter().enumerate().filter_map(|(i, k)| index_filter(i).then_some(k.clone())).collect())
356        }
357    }*/
358}
359
360struct SegmentMetadata {
361    first_index: usize,   // first index described by the segment
362    first_key: usize,   // first key described by the segment
363}
364
365pub trait RefsIndex: Copy {
366    const SEGMENT_SIZE: usize;
367    fn from_usize(u: usize) -> Self;
368    fn as_usize(self) -> usize;
369}
370
371impl RefsIndex for u8 {
372    const SEGMENT_SIZE: usize = 1<<8;
373    #[inline(always)] fn from_usize(u: usize) -> Self { u as Self }
374    #[inline(always)] fn as_usize(self) -> usize { self as usize }
375}
376
377impl RefsIndex for u16 {
378    const SEGMENT_SIZE: usize = 1<<16;
379    #[inline(always)] fn from_usize(u: usize) -> Self { u as Self }
380    #[inline(always)] fn as_usize(self) -> usize { self as usize }
381}
382
383/// [`KeySet`] implementation that stores reference to slice with keys,
384/// and indices of this slice that points retained keys.
385/// Indices are stored partitioned to segments and stored as 8 (if `I=u8`) or 16-bit (if `I=u16`) integers.
386/// Each segment covers $2^8$ or $2^{16}$ consecutive keys.
387/// Empty segments are not stored.
388pub struct SliceSourceWithRefs<'k, K, I: RefsIndex = u8> {
389    keys: &'k [K],
390    indices: Vec<I>,  // lowest 16 bits of each key index retained so far
391    segments: Vec<SegmentMetadata>,   // segments metadata: each element of the vector is (index in indices, index in keys)
392}
393
394impl<'k, K: Sync + 'k, I: RefsIndex + Send + Sync> SliceSourceWithRefs<'k, K, I> {
395    pub fn new(slice: &'k [K]) -> Self {
396        Self { keys: slice, indices: Vec::new(), segments: Vec::new() }
397    }
398
399    #[inline(always)] fn for_each_in_segment<F: FnMut(&K)>(&self, seg_i: usize, mut f: F) {
400        let slice = &self.keys[self.segments[seg_i].first_key..];
401        for d in &self.indices[self.segments[seg_i].first_index..self.segments[seg_i+1].first_index] {
402            f(unsafe{slice.get_unchecked(d.as_usize())});
403        }
404    }
405
406    /*fn par_for_each<F: Fn(&K) + Sync>(&self, f: &F, seg_beg: usize, seg_end: usize) {
407        let len = seg_end - seg_beg;
408        if len > 1 && self.segments[seg_end].first_index - self.segments[seg_beg].first_index > 1024 {
409            let mid = seg_beg + len/2;
410            join(
411                || self.par_for_each(f, seg_beg, mid),
412                || self.par_for_each(f, mid, seg_end)
413            );
414        } else {
415            for s in seg_beg..seg_end {
416                self.for_each_in_segment(s, f);
417            }
418        }
419    }*/
420
421    /// Copy `indices` accepted by `filter` to the beginning of each segment and stores new lengths of each segment in `new_lengths`.
422    fn par_map<R, M>(&self, dst: &mut [MaybeUninit<R>], map: &M, segments: &[SegmentMetadata])
423        where R: Send, M: Fn(&K) -> R + Sync + Send
424    {
425        if segments.len() > 2 /*&& dst.len() < 1024*/ {
426            let mid = segments.len()/2;
427            let (dst0, dst1) = dst.split_at_mut(segments[mid].first_index - segments[0].first_index);
428            join(
429                || self.par_map(dst0, map, &segments[..=mid]),
430                || self.par_map(dst1, map, &segments[mid..])
431            );
432        } else {
433            let keys = &self.keys[segments[0].first_key..];
434            for (i, d) in self.indices[segments[0].first_index..segments[1].first_index].into_iter().zip(dst) {
435                d.write(map(unsafe { keys.get_unchecked(I::as_usize(*i)) }));
436            }
437
438            /*let mut di = 0;
439            for segments in segments.windows(2) {
440                let keys = &self.keys[segments[0].first_key..];
441                for i in self.indices[segments[0].first_index..segments[1].first_index].into_iter() {
442                    dst[di].write(map(unsafe { keys.get_unchecked(I::as_usize(*i)) }));
443                    di += 1;
444                }
445            }*/
446        }
447    }
448
449    /// Copy `indices` accepted by `filter` to the beginning of each segment and stores new lengths of each segment in `new_lengths`.
450    fn par_pre_retain<F>(filter: &F, indices: &mut [I], segments: &[SegmentMetadata], new_lengths: &mut [u32])
451        where F: Fn(usize, usize) -> bool + Sync    // filter is called with indices of: keys and indices
452    {
453        if segments.len() > 1 && indices.len() > 1024 { // TODO check if it is not better to comment indices.len() > 1024 and then use commented out code below
454            let mid = segments.len()/2;
455            let segments = segments.split_at(mid);
456            let new_lens = new_lengths.split_at_mut(mid);
457            let indices = indices.split_at_mut(segments.1[0].first_index - segments.0[0].first_index);
458            join(
459                || Self::par_pre_retain(filter, indices.0, segments.0, new_lens.0),
460                || Self::par_pre_retain(filter, indices.1, segments.1, new_lens.1)
461            );
462        } else {
463            /*let seg = &segments[0];
464            let mut len = 0;
465            let mut i = 0;
466            while i < indices.len() {
467                if filter(seg.first_key + indices[i].as_usize(), seg.first_index + i) {
468                    indices[len as usize] = indices[i];
469                    len += 1;
470                }
471                i += 1;
472            }
473            new_lengths[0] = len;*/
474            for seg_i in 0..segments.len() {
475                let seg = &segments[seg_i];
476                let first_i = seg.first_index - segments[0].first_index;
477                let last_i = segments.get(seg_i+1).map_or_else(|| indices.len(), |s| s.first_index - segments[0].first_index);
478                let indices = &mut indices[first_i..last_i];
479                let mut len = 0;
480                let mut i = 0;
481                while i < indices.len() {
482                    if filter(seg.first_key + indices[i].as_usize(), seg.first_index + i) {   // index in self.indices is seg.first_index + i
483                        indices[len] = indices[i];
484                        len += 1;
485                    }
486                    i += 1;
487                }
488                new_lengths[seg_i] = len as u32;
489            }
490        }
491    }
492
493    fn par_retain_index<F>(indices: &mut Vec<I>, segments: &mut Vec<SegmentMetadata>, filter: F)
494        where F: Fn(usize, usize) -> bool + Sync
495    {
496        let real_seg_len = segments.len()-1;
497        let mut new_lenghts = vec![0; real_seg_len];
498        Self::par_pre_retain(&filter, indices, &mut segments[0..real_seg_len], &mut new_lenghts);
499        let mut new_seg_len = 0;    // where to copy segment[seg_i]
500        let mut new_indices_len = 0;    // where to copy segment[seg_i]
501        for seg_i in 0..real_seg_len {
502            let new_seg_i_len = new_lenghts[seg_i] as usize;
503            if new_seg_i_len > 0 {
504                indices.copy_within(
505                    segments[seg_i].first_index .. segments[seg_i].first_index + new_seg_i_len,
506                    new_indices_len
507                );
508                segments[new_seg_len].first_index = new_indices_len;
509                segments[new_seg_len].first_key = segments[seg_i].first_key;
510                new_indices_len += new_seg_i_len;
511                new_seg_len += 1;
512            }
513        }
514        segments[new_seg_len].first_index = new_indices_len;    // the last indices index of the last segment
515        // note self.segments[new_seg_len].1 is not used any more and we do not need update it
516        segments.resize_with(new_seg_len+1, || unreachable!());
517        indices.resize_with(new_indices_len, || unreachable!());
518    }
519}
520
521impl<'k, K, I: RefsIndex> SliceSourceWithRefs<'k, K, I> {
522    fn append_segments_from_bitmap(&mut self, slice_index: &mut usize, accepted_keys: &Vec<u64>) {
523        for accepted in accepted_keys.chunks( I::SEGMENT_SIZE >> 6) {
524            self.indices.extend(accepted.bit_ones().map(|b| I::from_usize(b)));
525            *slice_index += I::SEGMENT_SIZE;
526            self.segments.push(SegmentMetadata { first_index: self.indices.len(), first_key: *slice_index });
527        }
528    }
529}
530
531impl<'k, K: Sync, I: RefsIndex + Sync + Send> KeySet<K> for SliceSourceWithRefs<'k, K, I> {
532    #[inline(always)] fn keys_len(&self) -> usize {
533        if self.segments.is_empty() { self.keys.len() } else { self.indices.len() }
534    }
535
536    #[inline(always)] fn has_par_for_each_key(&self) -> bool { true }
537
538    #[inline(always)] fn for_each_key<F, P>(&self, mut f: F, _retained_hint: P)
539        where F: FnMut(&K), P: FnMut(&K) -> bool
540    {
541        if self.segments.is_empty() {
542            self.keys.into_iter().for_each(f);
543        } else {
544            for seg_i in 0..self.segments.len()-1 {
545                self.for_each_in_segment(seg_i, &mut f);
546            };
547        }
548    }
549
550    #[inline(always)] fn par_for_each_key<F, P>(&self, f: F, _retained_hint: P)
551        where F: Fn(&K) + Sync + Send, P: Fn(&K) -> bool + Sync + Send
552    {
553        if self.segments.is_empty() {
554            (*self.keys).into_par_iter().for_each(f);
555        } else {
556            (0..self.segments.len()-1).into_par_iter().for_each(|seg_i| {
557                self.for_each_in_segment(seg_i, &f);
558            });
559            //self.par_for_each(&f, 0, self.segments.len()-1);
560        }
561    }
562
563    fn par_map_each_key<R, M, P>(&self, map: M, _retained_hint: P) -> Vec<R>
564        where M: Fn(&K) -> R + Sync + Send, R: Send, P: Fn(&K) -> bool
565    {
566        if self.segments.is_empty() {
567            (*self.keys).into_par_iter().map(map).collect()
568        } else {
569            let len = self.indices.len();
570            let mut result = Vec::with_capacity(len);
571            self.par_map(result.spare_capacity_mut(), &map, &self.segments);
572            unsafe { result.set_len(len); }
573            result
574        }
575    }
576
577    fn retain_keys<F, P, R>(&mut self, mut filter: F, _retained_earlier: P, mut remove_count: R)
578        where F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
579    {
580        if self.segments.is_empty() {
581            self.indices.reserve(self.keys.len() - remove_count());
582            self.segments.reserve(ceiling_div(self.keys.len(), I::SEGMENT_SIZE) + 1);
583            let mut slice_index = 0;
584            self.segments.push(SegmentMetadata { first_index: 0, first_key: slice_index });
585            for keys in self.keys.chunks(I::SEGMENT_SIZE) {
586                self.indices.extend(keys.into_iter().enumerate().filter_map(|(i,k)| filter(k).then_some(I::from_usize(i))));
587                slice_index += I::SEGMENT_SIZE;
588                self.segments.push(SegmentMetadata { first_index: self.indices.len(), first_key: slice_index });
589            }
590        } else {
591            let mut new_indices_len = 0;
592            let mut new_seg_len = 0;    // where to copy segment[seg_i]
593            for seg_i in 0..self.segments.len()-1 {
594                let new_delta_index = new_indices_len;
595                let si = &self.segments[seg_i];
596                let keys = &self.keys[si.first_key..];
597                for i in si.first_index..self.segments[seg_i+1].first_index {
598                    let i = *unsafe { self.indices.get_unchecked(i) };
599                    if filter(unsafe { keys.get_unchecked(i.as_usize()) }) {
600                        self.indices[new_indices_len] = i;
601                        new_indices_len += 1;
602                    }
603                }
604                if new_delta_index != new_indices_len {    // segment seg_i is not empty and have to be preserved
605                    self.segments[new_seg_len].first_index = new_delta_index;
606                    self.segments[new_seg_len].first_key = self.segments[seg_i].first_key;
607                    new_seg_len += 1;
608                }
609            }
610            self.segments[new_seg_len].first_index = new_indices_len;    // the last delta index of the last segment
611            // note self.segments[new_seg_len].1 is not used any more and we do not need update it
612            self.segments.resize_with(new_seg_len+1, || unreachable!());
613            self.indices.resize_with(new_indices_len, || unreachable!());
614        }
615    }
616
617    fn par_retain_keys<F, P, R>(&mut self, filter: F, _retained_earlier: P, remove_count: R)
618        where F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
619    {
620        if self.segments.is_empty() {
621            self.indices.reserve(self.keys.len() - remove_count());
622            self.segments.reserve(ceiling_div(self.keys.len(), I::SEGMENT_SIZE) + 1);
623            let mut slice_index = 0;
624            let mut accepted_keys = Vec::<u64>::new();  // first par_extend should set proper capacity
625            self.segments.push(SegmentMetadata { first_index: 0, first_key: slice_index });
626            for keys in self.keys.chunks(1<<18) {
627                accepted_keys.clear();
628                accepted_keys.par_extend(keys.par_chunks(64).map(|keys| {
629                    let mut r = 0;
630                    for (i, k) in keys.iter().enumerate() {
631                        if filter(k) { r |= 1 << i; }
632                    }
633                    r
634                }));
635                self.append_segments_from_bitmap(&mut slice_index, &accepted_keys);
636            }
637
638            /*let mut accepted = [false; 1<<16];
639            self.build_index(remove_count, |indices, keys, _| {
640                accepted.par_iter_mut().zip(keys.into_par_iter()).for_each(|(v, k)| {
641                    *v = filter(k);
642                });
643                for i in 0..keys.len() { if accepted[i] { indices.push(i as u16); } }
644                indices.extend((0..keys.len()).filter(|i| buff[*i]));
645            });*/
646        } else {
647            Self::par_retain_index(&mut self.indices, &mut self.segments, |k, _| filter(&self.keys[k]));
648        }
649    }
650
651    fn retain_keys_with_indices<IF, F, P, R>(&mut self, mut index_filter: IF, _filter: F, retained_earlier: P, remove_count: R)
652        where IF: FnMut(usize) -> bool, F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
653    {
654        let mut index = 0;
655        self.retain_keys(|_| (index_filter(index), index += 1).0, retained_earlier, remove_count)
656    }
657
658    fn par_retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, _filter: F, _retained_earlier: P, remove_count: R)
659        where IF: Fn(usize) -> bool + Sync + Send, F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
660    {
661        if self.segments.is_empty() {
662            self.indices.reserve(self.keys.len() - remove_count());
663            self.segments.reserve(ceiling_div(self.keys.len(), I::SEGMENT_SIZE) + 1);
664            let mut slice_index = 0;
665            let mut accepted_keys = Vec::<u64>::new();  // first par_extend should set proper capacity
666            self.segments.push(SegmentMetadata { first_index: 0, first_key: slice_index });
667            for keys_begin in (0..self.keys.len()).step_by(1<<18) {
668                let keys_end = self.keys.len().min(keys_begin + (1<<18));
669                accepted_keys.clear();
670                accepted_keys.par_extend((keys_begin..keys_end).into_par_iter().step_by(64).map(|first_key| {
671                    let mut r = 0;
672                    for i in first_key..keys_end.min(first_key+64) {
673                        if index_filter(i) { r |= 1u64 << (i-first_key); }
674                    }
675                    r
676                }));
677                self.append_segments_from_bitmap(&mut slice_index, &accepted_keys);
678            }
679
680            /*self.build_index(remove_count, |indices, keys, shift| {
681                indices.par_extend(
682                    (0..keys.len()).into_par_iter()
683                        .filter_map(|key_nr| index_filter(shift + key_nr).then_some(I::from_usize(key_nr)))
684                );
685            })*/
686        } else {
687            Self::par_retain_index(&mut self.indices, &mut self.segments, |_, ii| index_filter(ii));
688        }
689    }
690}
691
692/// Trait for returning iterator and (optionally) parallel iterator over keys.
693pub trait GetIterator {
694    /// Key type.
695    type Item;
696    /// Iterator type.
697    type Iterator: Iterator<Item = Self::Item>;
698    /// Parallel iterator type.
699    type ParallelIterator: ParallelIterator<Item = Self::Item> where Self::Item: Send;
700
701    /// Returns iterator over keys.
702    fn iter(&self) -> Self::Iterator;
703
704    /// If possible, returns parallel iterator over keys.
705    #[inline(always)] fn par_iter(&self) -> Option<Self::ParallelIterator> where Self::Item: Send { None }
706
707    /// Returns `true` only if `par_iter` returns `Some`.
708    #[inline(always)] fn has_par_iter(&self) -> bool { false /*self.par_iter().is_some()*/ }
709
710    /// Returns the number of items produced by iterators returned by `iter` and `par_iter`.
711    fn len(&self) -> usize {
712        self.iter().count()   // TODO faster alternative
713    }
714}
715
716impl<I: Iterator, F: Fn() -> I> GetIterator for F {
717    type Item = I::Item;
718
719    type Iterator = I;
720
721    type ParallelIterator = rayon::iter::Empty<Self::Item> where Self::Item: Send;  // never used
722
723    #[inline(always)] fn iter(&self) -> Self::Iterator {
724        self()
725    }
726}
727
728impl<I: Iterator, PI: ParallelIterator<Item = I::Item>, F: Fn() -> I, PF: Fn() -> PI> GetIterator for (F, PF) {
729    type Item = I::Item;
730
731    type Iterator = I;
732
733    type ParallelIterator = PI where Self::Item: Send;
734
735    #[inline(always)] fn iter(&self) -> Self::Iterator {
736        self.0()
737    }
738
739    #[inline(always)] fn par_iter(&self) -> Option<Self::ParallelIterator> where Self::Item: Send {
740        Some(self.1())
741    }
742
743    #[inline(always)] fn has_par_iter(&self) -> bool { true }
744}
745
746/// Implementation of [`KeySet`] that stores only the object that returns iterator over all keys
747/// (the iterator can even expose the keys that have been removed earlier by `retain` methods).
748/// It is usually a good idea to use it within [`CachedKeySet`], see [`CachedKeySet::dynamic`].
749pub struct DynamicKeySet<GetKeyIter: GetIterator> {
750    pub keys: GetKeyIter,
751    pub len: usize,
752}
753
754impl<GetKeyIter: GetIterator> DynamicKeySet<GetKeyIter> {
755    /// Constructs a [`DynamicKeySet`] that obtains the keys by `keys`.
756    pub fn new(keys: GetKeyIter) -> Self {
757        let len = keys.len();
758        Self { keys, len }
759    }
760
761    /// Constructs a [`DynamicKeySet`] that obtains the keys by `keys` (which should produce `len` keys).
762    pub fn with_len(keys: GetKeyIter, len: usize) -> Self {
763        Self { keys, len }
764    }
765}
766
767impl<GetKeyIter: GetIterator> KeySet<GetKeyIter::Item> for DynamicKeySet<GetKeyIter> where GetKeyIter::Item: Send {
768    #[inline(always)] fn keys_len(&self) -> usize {
769        self.len
770    }
771
772    #[inline(always)] fn has_par_for_each_key(&self) -> bool {
773        self.keys.has_par_iter()
774    }
775
776    #[inline(always)] fn for_each_key<F, P>(&self, mut f: F, retained_hint: P)
777        where F: FnMut(&GetKeyIter::Item), P: FnMut(&GetKeyIter::Item) -> bool
778    {
779        self.keys.iter().filter(retained_hint).for_each(|k| f(&k))
780    }
781
782    #[inline(always)] fn par_for_each_key<F, P>(&self, f: F, retained_hint: P)
783        where F: Fn(&GetKeyIter::Item) + Sync + Send, P: Fn(&GetKeyIter::Item) -> bool + Sync + Send
784    {
785        if let Some(par_iter) = self.keys.par_iter() {
786            par_iter.filter(retained_hint).for_each(|k| f(&k))
787        } else {
788            self.for_each_key(f, retained_hint)
789        }
790    }
791
792    /*#[inline(always)] fn map_each_key<R, M, P>(&self, mut map: M, retained_hint: P) -> Vec<R>
793            where M: FnMut(&GetKeyIter::Item) -> R, P: FnMut(&GetKeyIter::Item) -> bool
794    {
795        let mut result = Vec::with_capacity(self.len);
796        self.keys.iter().filter(retained_hint).map(|k| map(&k)).collect_into(&mut result);
797        result
798    }*/
799
800    #[inline(always)] fn par_map_each_key<R, M, P>(&self, map: M, retained_hint: P) -> Vec<R>
801            where M: Fn(&GetKeyIter::Item)->R + Sync + Send, R: Send, P: Fn(&GetKeyIter::Item) -> bool + Sync + Send
802    {
803        if let Some(par_iter) = self.keys.par_iter() {
804            // TODO somehow use information about len
805            par_iter.filter(retained_hint).map(|k| map(&k)).collect()
806        } else {
807            self.map_each_key(map, retained_hint)
808        }
809    }
810
811    #[inline(always)] fn retain_keys<F, P, R>(&mut self, _filter: F, _retained_earlier: P, mut remove_count: R)
812        where F: FnMut(&GetKeyIter::Item) -> bool, P: FnMut(&GetKeyIter::Item) -> bool, R: FnMut() -> usize
813    {
814        self.len -= remove_count();
815    }
816
817    #[inline] fn into_vec<P>(self, retained_hint: P) -> Vec<GetKeyIter::Item>
818    where P: FnMut(&GetKeyIter::Item) -> bool, Self: Sized
819    {
820        self.keys.iter().filter(retained_hint).collect()
821    }
822
823    #[inline] fn par_into_vec<P>(self, retained_hint: P) -> Vec<GetKeyIter::Item>
824        where P: Fn(&GetKeyIter::Item) -> bool + Sync + Send, Self: Sized, GetKeyIter::Item: Send
825    {
826        if let Some(par_iter) = self.keys.par_iter() {
827            par_iter.filter(retained_hint).collect()
828        } else {
829            self.keys.iter().filter(retained_hint).collect()
830        }
831    }
832}
833
834/// Implementation of [`KeySet`] that initially stores another [`KeySet`] 
835/// (which is usually succinct but slow, such as [`DynamicKeySet`]),
836/// but when number of keys drops below given threshold,
837/// the remaining keys are cached (cloned into the vector),
838/// and later only the cache is used.
839pub enum CachedKeySet<K, KS> {
840    Dynamic(KS, usize), // the another key set and the threshold
841    Cached(Vec<K>)
842}
843
844impl<K, KS> Default for CachedKeySet<K, KS> {
845    #[inline] fn default() -> Self { Self::Cached(Default::default()) }   // construct an empty key set, needed for mem::take(self)
846}
847
848impl<K, KS> CachedKeySet<K, KS> {
849    /// Constructs cached `key_set`. The keys are cloned and cached as soon as their number drops below `clone_threshold`.
850    pub fn new(key_set: KS, clone_threshold: usize) -> Self {
851        Self::Dynamic(key_set, clone_threshold)
852    }
853}
854
855impl<K, PI: GetIterator> CachedKeySet<K, DynamicKeySet<PI>> {
856    /// Constructs cached [`DynamicKeySet`] that obtains the keys by `keys` that returns iterator over keys.
857    /// The keys are cloned and cached as soon as their number drops below `clone_threshold`.
858    ///
859    /// If the number of keys is known, it is more efficient to call [`CachedKeySet::dynamic_with_len`] instead.
860    ///
861    /// # Example
862    ///
863    /// ```
864    /// use {ph::fmph::keyset::{KeySet, CachedKeySet}, rayon::iter::{ParallelIterator, IntoParallelIterator}};
865    /// // Constructing a dynamic key set consisting of the squares of the integers from 1 to 100,
866    /// // part of which will be cached by the first call of any of the retain methods:
867    /// let ks = CachedKeySet::dynamic(|| (1..=100).map(|v| v*v), usize::MAX);
868    /// assert_eq!(ks.keys_len(), 100);
869    /// // Same as above but using multiple threads to generate keys:
870    /// let ks = CachedKeySet::dynamic((|| (1..=100).map(|v| v*v), || (1..=100).into_par_iter().map(|v| v*v)), usize::MAX);
871    /// assert_eq!(ks.keys_len(), 100);
872    /// ```
873    pub fn dynamic(keys: PI, clone_threshold: usize) -> Self {
874        Self::new(DynamicKeySet::new(keys), clone_threshold)
875    }
876
877    /// Constructs cached [`DynamicKeySet`] that obtains the keys by `keys` that returns iterator over exactly `len` keys.
878    /// The keys are cloned and cached as soon as their number drops below `clone_threshold`.
879    /// 
880    /// # Example
881    /// 
882    /// ```
883    /// use {ph::fmph::keyset::{KeySet, CachedKeySet}, rayon::iter::{ParallelIterator, IntoParallelIterator}};
884    /// // Constructing a dynamic key set consisting of the squares of the integers from 1 to 100,
885    /// // part of which will be cached by the first call of any of the retain methods:
886    /// let ks = CachedKeySet::dynamic_with_len(|| (1..=100).map(|v| v*v), 100, usize::MAX);
887    /// assert_eq!(ks.keys_len(), 100);
888    /// // Same as above but using multiple threads to generate keys:
889    /// let ks = CachedKeySet::dynamic_with_len((|| (1..=100).map(|v| v*v), || (1..=100).into_par_iter().map(|v| v*v)), 100, usize::MAX);
890    /// assert_eq!(ks.keys_len(), 100);
891    /// ```
892    pub fn dynamic_with_len(keys: PI, len: usize, clone_threshold: usize) -> Self {
893        Self::new(DynamicKeySet::with_len(keys, len), clone_threshold)
894    }
895}
896
897impl<'k, K: Sync> CachedKeySet<K, SliceSourceWithRefs<'k, K>> {
898    /// Constructs cached [`SliceSourceWithRefs`] that wraps given `keys`.
899    /// The keys are cloned and cached as soon as their number drops below `clone_threshold`.
900    /// After cloning, the keys are placed in a continuous memory area which is friendly to the CPU cache.
901    pub fn slice(keys: &'k [K], clone_threshold: usize) -> Self {
902        Self::new(SliceSourceWithRefs::new(keys), clone_threshold)
903    }
904}
905
906impl<K: Clone + Sync + Send, KS: KeySet<K>> KeySet<K> for CachedKeySet<K, KS>
907{
908    fn keys_len(&self) -> usize {
909        match self {
910            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.keys_len(),
911            Self::Cached(v) => v.keys_len()
912        }
913    }
914
915    fn has_par_for_each_key(&self) -> bool {
916        match self {
917            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.has_par_for_each_key(),
918            Self::Cached(v) => v.has_par_for_each_key()
919        }
920    }
921
922    #[inline]
923    fn for_each_key<F, P>(&self, f: F, retained_hint: P)
924        where F: FnMut(&K), P: FnMut(&K) -> bool
925    {
926        match self {
927            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.for_each_key(f, retained_hint),
928            Self::Cached(v) => v.for_each_key(f, retained_hint)
929        }
930    }
931
932    #[inline]
933    fn par_for_each_key<F, P>(&self, f: F, retained_hint: P)
934        where F: Fn(&K) + Sync + Send, P: Fn(&K) -> bool + Sync + Send
935    {
936        match self {
937            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.par_for_each_key(f, retained_hint),
938            Self::Cached(v) => v.par_for_each_key(f, retained_hint)
939        }
940    }
941
942    #[inline]
943    fn map_each_key<R, M, P>(&self, map: M, retained_hint: P) -> Vec<R>
944        where M: FnMut(&K) -> R, P: FnMut(&K) -> bool
945    {
946        match self {
947            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.map_each_key(map, retained_hint),
948            Self::Cached(v) => v.map_each_key(map, retained_hint)
949        }
950    }
951
952    #[inline]
953    fn par_map_each_key<R, M, P>(&self, map: M, retained_hint: P) -> Vec<R>
954        where M: Fn(&K)->R + Sync + Send, R: Send, P: Fn(&K) -> bool + Sync + Send
955    {
956        match self {
957            Self::Dynamic(dynamic_key_set, _) => dynamic_key_set.par_map_each_key(map, retained_hint),
958            Self::Cached(v) => v.par_map_each_key(map, retained_hint)
959        }
960    }
961
962    #[inline] fn retain_keys<F, P, R>(&mut self, mut filter: F, mut retained_earlier: P, remove_count: R)
963        where F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
964    {
965        match self {
966            Self::Dynamic(key_set, clone_threshold) => {
967                key_set.retain_keys(&mut filter, &mut retained_earlier, remove_count);
968                if key_set.keys_len() < *clone_threshold {
969                    if let Self::Dynamic(key_set, _) = mem::take(self) {
970                        *self = Self::Cached(key_set.into_vec(|k| retained_earlier(k) && filter(k)))
971                    }
972                }
973            },
974            Self::Cached(v) => v.retain_keys(filter, retained_earlier, remove_count)
975        }
976    }
977
978    #[inline] fn par_retain_keys<F, P, R>(&mut self, filter: F, retained_earlier: P, remove_count: R)
979        where F: Fn(&K) -> bool + Sync + Send, P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
980    {
981        match self {
982            Self::Dynamic(key_set, clone_threshold) => {
983                key_set.par_retain_keys(&filter, &retained_earlier, remove_count);
984                if key_set.keys_len() < *clone_threshold {
985                    if let Self::Dynamic(key_set, _) = mem::take(self) {
986                        *self = Self::Cached(key_set.par_into_vec(|k| retained_earlier(k) && filter(k)))
987                    }
988                }
989            },
990            Self::Cached(v) => v.par_retain_keys(filter, retained_earlier, remove_count)
991        }
992    }
993
994    #[inline] fn retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, mut filter: F, mut retained_earlier: P, remove_count: R)
995        where IF: FnMut(usize) -> bool, F: FnMut(&K) -> bool, P: FnMut(&K) -> bool, R: FnMut() -> usize
996    {
997        match self {
998            Self::Dynamic(key_set, clone_threshold) => {
999                key_set.retain_keys_with_indices(index_filter, &mut filter, &mut retained_earlier, remove_count);
1000                if key_set.keys_len() < *clone_threshold {
1001                    if let Self::Dynamic(key_set, _) = mem::take(self) {
1002                        *self = Self::Cached(key_set.into_vec(|k| retained_earlier(k) && filter(k)))
1003                    }
1004                }
1005            },
1006            Self::Cached(v) => v.retain_keys_with_indices(index_filter, filter, retained_earlier, remove_count)
1007        }
1008    }
1009
1010    #[inline] fn par_retain_keys_with_indices<IF, F, P, R>(&mut self, index_filter: IF, filter: F, retained_earlier: P, remove_count: R)
1011        where IF: Fn(usize) -> bool + Sync + Send, F: Fn(&K) -> bool + Sync + Send,
1012              P: Fn(&K) -> bool + Sync + Send, R: Fn() -> usize
1013    {
1014        match self {
1015            Self::Dynamic(key_set, clone_threshold) => {
1016                key_set.par_retain_keys_with_indices(index_filter, &filter, &retained_earlier, remove_count);
1017                if key_set.keys_len() < *clone_threshold {
1018                    if let Self::Dynamic(key_set, _) = mem::take(self) {
1019                        *self = Self::Cached(key_set.par_into_vec(|k| retained_earlier(k) && filter(k)))
1020                    }
1021                }
1022            },
1023            Self::Cached(v) => v.par_retain_keys_with_indices(index_filter, filter, retained_earlier, remove_count)
1024        }
1025    }
1026
1027    fn into_vec<P>(self, retained_hint: P) -> Vec<K>
1028    where P: FnMut(&K) -> bool, K: Clone, Self: Sized
1029    {
1030        match self {
1031            Self::Dynamic(key_set, _) => key_set.into_vec(retained_hint),
1032            Self::Cached(v) => v
1033        }
1034    }
1035
1036    fn par_into_vec<P>(self, retained_hint: P) -> Vec<K>
1037        where P: Fn(&K) -> bool + Sync + Send, Self: Sized, K: Clone + Send
1038    {
1039        match self {
1040            Self::Dynamic(key_set, _) => key_set.par_into_vec(retained_hint),
1041            Self::Cached(v) => v
1042        }
1043    }
1044}