kas_core/core/
widget_id.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License in the LICENSE-APACHE file or at:
4//     https://www.apache.org/licenses/LICENSE-2.0
5
6//! Widget identifiers
7
8// x << a + b is x << (a + b)
9#![allow(clippy::precedence)]
10
11use crate::cast::{Cast, Conv};
12use crate::window::WindowId;
13use hash_hasher::{HashBuildHasher, HashedMap};
14use std::cell::RefCell;
15use std::cmp::Ordering;
16use std::collections::hash_map::Entry;
17use std::hash::{DefaultHasher, Hash, Hasher};
18use std::iter::once;
19use std::marker::PhantomData;
20use std::mem::size_of;
21use std::num::NonZeroU64;
22use std::rc::Rc;
23use std::{fmt, slice};
24
25thread_local! {
26    // Map from hash of path to allocated path
27    static DB: RefCell<HashedMap<u64, *mut usize>> = const { RefCell::new(HashedMap::with_hasher(HashBuildHasher::new())) };
28}
29
30/// Invalid (default) identifier
31const INVALID: u64 = !0;
32
33/// Mark for use; one of the below should be set (or both if INVALID)
34const USE_MASK: u64 = 0x3;
35/// `x & USE_MASK == USE_BITS`: rest is a sequence of 4-bit blocks; len is number of blocks used
36const USE_BITS: u64 = 0x01;
37/// `x & USE_MASK == USE_PTR`: high 62 bits are a pointer (or hash: see to_nzu64)
38const USE_PTR: u64 = 0x02;
39
40const MASK_LEN: u64 = 0xF0;
41const SHIFT_LEN: u8 = 4;
42const BLOCKS: u8 = 14;
43const MASK_BITS: u64 = 0xFFFF_FFFF_FFFF_FF00;
44
45const MASK_PTR: u64 = 0xFFFF_FFFF_FFFF_FFFC;
46
47const LEN_MASK: usize = 0xF_FFFF;
48const LEN_INCR: usize = LEN_MASK + 1;
49
50/// Integer or pointer to a reference-counted slice.
51///
52/// Use `Self::get_ptr` to determine the variant used. When reading a pointer,
53/// mask with MASK_PTR.
54///
55/// `self.0 & USE_MASK` are the "flag bits" determining the variant used. This
56/// overlaps with the pointer's lowest bit (which must be zero due to alignment).
57///
58/// `PhantomData<Rc<()>>` is used to impl !Send and !Sync. We need atomic
59/// reference counting to support those.
60struct IntOrPtr(NonZeroU64, PhantomData<Rc<()>>);
61
62#[derive(Clone, Debug, PartialEq, Eq)]
63enum Variant<'a> {
64    Invalid,
65    Int(u64),
66    Slice(&'a [usize]),
67}
68
69fn hash(path: &[usize]) -> u64 {
70    let mut hasher = DefaultHasher::new();
71    path.hash(&mut hasher);
72    hasher.finish()
73}
74
75impl IntOrPtr {
76    const ROOT: Self = IntOrPtr(NonZeroU64::new(USE_BITS).unwrap(), PhantomData);
77    const INVALID: Self = IntOrPtr(NonZeroU64::new(INVALID).unwrap(), PhantomData);
78
79    #[inline]
80    fn get_ptr(&self) -> Option<*mut usize> {
81        if self.0.get() & USE_MASK == USE_PTR {
82            let p = usize::conv(self.0.get() & MASK_PTR);
83            Some(p as *mut usize)
84        } else {
85            None
86        }
87    }
88
89    fn get_slice(&self) -> Option<&[usize]> {
90        self.get_ptr().map(|p| unsafe {
91            let len = *p.offset(1) & LEN_MASK;
92            let p = p.offset(2);
93            slice::from_raw_parts(p, len)
94        })
95    }
96
97    /// Construct from an integer
98    ///
99    /// Note: requires `x & USE_MASK == USE_BITS`.
100    fn new_int(x: u64) -> Self {
101        assert!(x & USE_MASK == USE_BITS);
102        let x = NonZeroU64::new(x).unwrap();
103        let u = IntOrPtr(x, PhantomData);
104        assert!(u.get_ptr().is_none());
105        u
106    }
107
108    /// Construct as a slice from an iterator
109    fn new_iter<I: Clone + Iterator<Item = usize>>(iter: I) -> Self {
110        let ref_count = 1;
111        let len = iter.clone().count();
112        let mut v: Vec<usize> = once(ref_count).chain(once(len)).chain(iter).collect();
113
114        let mut p: Option<*mut usize> = None;
115        let pp = &mut p;
116        DB.with_borrow_mut(move |db| {
117            loop {
118                let x = hash(&v);
119                match db.entry(x) {
120                    Entry::Occupied(entry) => {
121                        let p = *entry.get();
122                        let slice = unsafe { slice::from_raw_parts(p.offset(2), *p.offset(1)) };
123                        if slice == &v[2..] {
124                            unsafe { increment_rc(p) };
125                            *pp = Some(p);
126                            break;
127                        } else {
128                            // Hash collision: tweak the representation and retry
129                            v[1] = v[1].wrapping_add(LEN_INCR);
130                        }
131                    }
132                    Entry::Vacant(entry) => {
133                        let p = Box::leak(v.into_boxed_slice()) as *mut [usize] as *mut usize;
134                        *pp = Some(p);
135                        entry.insert(p);
136                        break;
137                    }
138                }
139            }
140        });
141
142        let p: u64 = (p.expect("failed to access Id DB") as usize).cast();
143        debug_assert_eq!(p & 3, 0);
144        let p = p | USE_PTR;
145        let u = IntOrPtr(NonZeroU64::new(p).unwrap(), PhantomData);
146        assert!(u.get_ptr().is_some());
147        u
148    }
149
150    fn get(&self) -> Variant<'_> {
151        if let Some(slice) = self.get_slice() {
152            Variant::Slice(slice)
153        } else if self.0.get() == INVALID {
154            Variant::Invalid
155        } else {
156            Variant::Int(self.0.get())
157        }
158    }
159
160    fn to_nzu64(&self) -> NonZeroU64 {
161        if let Some(slice) = self.get_slice() {
162            let x = hash(slice) & MASK_PTR;
163            NonZeroU64::new(x | USE_PTR).unwrap()
164        } else {
165            self.0
166        }
167    }
168
169    fn try_from_u64(n: u64) -> Option<IntOrPtr> {
170        match n & USE_MASK {
171            USE_BITS => {
172                // TODO: sanity checks
173                Some(IntOrPtr(NonZeroU64::new(n).unwrap(), PhantomData))
174            }
175            USE_PTR => {
176                let mut result = None;
177                let r = &mut result;
178                DB.with_borrow(|db| {
179                    if let Some(p) = db.get(&n) {
180                        unsafe { increment_rc(*p) };
181                        let p: u64 = (*p as usize).cast();
182                        *r = Some(IntOrPtr(NonZeroU64::new(p).unwrap(), PhantomData))
183                    }
184                });
185                result
186            }
187            _ if n == !0 => Some(IntOrPtr::INVALID),
188            _ => None,
189        }
190    }
191}
192
193/// Increment a IntOrPtr pointer ref-count
194///
195/// Safety: the input must be a valid IntOrPtr pointer value.
196unsafe fn increment_rc(p: *mut usize) {
197    unsafe {
198        let ref_count = *p;
199
200        // Copy behaviour of Rc::clone:
201        if ref_count == 0 || ref_count == usize::MAX {
202            std::process::abort();
203        }
204
205        *p = ref_count + 1;
206    }
207}
208
209impl Clone for IntOrPtr {
210    fn clone(&self) -> Self {
211        if let Some(p) = self.get_ptr() {
212            unsafe { increment_rc(p) };
213        }
214        IntOrPtr(self.0, PhantomData)
215    }
216}
217
218impl Drop for IntOrPtr {
219    fn drop(&mut self) {
220        if let Some(p) = self.get_ptr() {
221            unsafe {
222                let ref_count = *p;
223                if ref_count > 1 {
224                    *p = ref_count - 1;
225                } else {
226                    // len+2 because path len does not include ref_count or len "fields"
227                    let len = (*p.offset(1) & LEN_MASK) + 2;
228                    let slice = slice::from_raw_parts_mut(p, len);
229
230                    let x = hash(slice);
231                    DB.with_borrow_mut(|db| db.remove(&x));
232
233                    let _ = Box::<[usize]>::from_raw(slice);
234                }
235            }
236        }
237    }
238}
239
240enum PathIter<'a> {
241    Bits(BitsIter),
242    Slice(std::iter::Cloned<std::slice::Iter<'a, usize>>),
243}
244
245impl<'a> Iterator for PathIter<'a> {
246    type Item = usize;
247
248    #[inline]
249    fn next(&mut self) -> Option<usize> {
250        match self {
251            PathIter::Bits(bits) => bits.next(),
252            PathIter::Slice(slice) => slice.next(),
253        }
254    }
255
256    #[inline]
257    fn size_hint(&self) -> (usize, Option<usize>) {
258        match self {
259            PathIter::Bits(bits) => bits.size_hint(),
260            PathIter::Slice(slice) => slice.size_hint(),
261        }
262    }
263}
264
265/// Iterator over [`Id`] path components
266pub struct WidgetPathIter<'a>(PathIter<'a>);
267impl<'a> Iterator for WidgetPathIter<'a> {
268    type Item = usize;
269
270    #[inline]
271    fn next(&mut self) -> Option<usize> {
272        self.0.next()
273    }
274
275    #[inline]
276    fn size_hint(&self) -> (usize, Option<usize>) {
277        self.0.size_hint()
278    }
279}
280
281/// Widget identifier
282///
283/// All widgets are assigned an identifier which is unique within the window.
284/// This type may be tested for equality and order and may be iterated over as
285/// a "path" of "key" values.
286///
287/// Formatting an `Id` via [`Display`] prints the the path, for example
288/// `#1290a4`. Here, `#` represents the root; each following hexadecimal digit
289/// represents a path component except that digits `8-f` are combined with the
290/// following digit(s). Hence, the above path has components `1`, `2`, `90`,
291/// `a4`. To interpret these values, first subtract 8 from each digit but the
292/// last digit, then read as base-8: `[1, 2, 8, 20]`.
293///
294/// # Representation
295///
296/// This type is small (64-bit) and non-zero: `Option<Id>` has the same
297/// size as `Id`. It is also very cheap to `Clone`: usually only one `if`
298/// check, and in the worst case a pointer dereference and ref-count increment.
299/// Paths up to 14 digits long (as printed) are represented internally;
300/// beyond this limit a reference-counted stack allocation is used.
301///
302/// `Id` is neither `Send` nor `Sync`.
303///
304/// # Invalid state
305///
306/// When `Id` is [`Default`] constructed, it uses a special **invalid** state.
307/// Any attempt to use or compare such an invalid state will cause a panic.
308///
309/// Widgets are initially constructed with an invalid `Id`, then assigned an
310/// `Id` when configured (see [`Events::configure`]).
311/// In most cases values are persistent but this is not guaranteed (e.g.
312/// inserting or removing a child from a `List` widget will affect the
313/// identifiers of all following children). View-widgets assign path components
314/// based on the data key, thus *possibly* making identifiers persistent.
315///
316/// [`Display`]: std::fmt::Display
317/// [`ConfigCx::configure`]: crate::event::ConfigCx::configure
318/// [`Events::configure`]: crate::Events::configure
319#[allow(clippy::assigning_clones)]
320#[derive(Clone)]
321pub struct Id(IntOrPtr);
322
323// Encode lowest 48 bits of key into the low bits of a u64, returning also the encoded bit-length
324fn encode(key: usize) -> (u64, u8) {
325    debug_assert!(8 * size_of::<usize>() as u32 - key.leading_zeros() <= 64);
326    let mut x = key as u64 & 0x0000_FFFF_FFFF_FFFF;
327    let mut y = x & 7;
328    x >>= 3;
329    let mut shift = 4;
330    while x != 0 {
331        y |= (8 | (x & 7)) << shift;
332        x >>= 3;
333        shift += 4;
334    }
335    (y, shift)
336}
337
338#[inline]
339fn block_len(x: u64) -> u8 {
340    ((x & MASK_LEN) >> SHIFT_LEN) as u8
341}
342
343// Returns usize read from highest bits of x plus blocks used
344fn next_from_bits(mut x: u64) -> (usize, u8) {
345    const TAKE: u64 = 0x7000_0000_0000_0000;
346    const HIGH: u64 = 0x8000_0000_0000_0000;
347    let mut y = (x & TAKE) >> 60;
348    let mut c = 1;
349    while (x & HIGH) != 0 {
350        x <<= 4;
351        y = (y << 3) | ((x & TAKE) >> 60);
352        c += 1;
353    }
354    (y.cast(), c)
355}
356
357#[derive(Clone, Debug)]
358struct BitsIter(u8, u64);
359impl BitsIter {
360    fn new(bits: u64) -> Self {
361        assert!((bits & USE_MASK) == USE_BITS);
362        let len = block_len(bits);
363        BitsIter(len, bits & MASK_BITS)
364    }
365}
366impl Iterator for BitsIter {
367    type Item = usize;
368    fn next(&mut self) -> Option<usize> {
369        if self.0 == 0 {
370            return None;
371        }
372        let (next, blocks) = next_from_bits(self.1);
373        self.0 -= blocks;
374        self.1 <<= 4 * blocks;
375        Some(next)
376    }
377    fn size_hint(&self) -> (usize, Option<usize>) {
378        ((self.0 != 0) as usize, Some(self.0 as usize))
379    }
380}
381
382impl Id {
383    /// Identifier of the window
384    pub(crate) const ROOT: Self = Id(IntOrPtr::ROOT);
385
386    const INVALID: Self = Id(IntOrPtr::INVALID);
387
388    /// Is the identifier valid?
389    ///
390    /// Default-constructed identifiers are invalid. Comparing invalid ids is
391    /// considered a logic error and thus will panic in debug builds.
392    /// This method may be used to check an identifier's validity.
393    pub fn is_valid(&self) -> bool {
394        self.0.get() != Variant::Invalid
395    }
396
397    /// Iterate over path components
398    pub fn iter(&self) -> WidgetPathIter<'_> {
399        match self.0.get() {
400            Variant::Invalid => panic!("Id::iter: invalid"),
401            Variant::Int(x) => WidgetPathIter(PathIter::Bits(BitsIter::new(x))),
402            Variant::Slice(path) => WidgetPathIter(PathIter::Slice(path.iter().cloned())),
403        }
404    }
405
406    /// Get the path len (number of indices, not storage length)
407    pub fn path_len(&self) -> usize {
408        match self.0.get() {
409            Variant::Invalid => panic!("Id::path_len: invalid"),
410            Variant::Int(x) => BitsIter::new(x).count(),
411            Variant::Slice(path) => path.len(),
412        }
413    }
414
415    /// Returns true if `self` equals `id` or if `id` is a descendant of `self`
416    pub fn is_ancestor_of(&self, id: &Self) -> bool {
417        match (self.0.get(), id.0.get()) {
418            (Variant::Invalid, _) | (_, Variant::Invalid) => {
419                panic!("Id::is_ancestor_of: invalid")
420            }
421            (Variant::Slice(_), Variant::Int(_)) => {
422                // This combo will never be created where id is a child.
423                false
424            }
425            (Variant::Int(self_x), Variant::Int(child_x)) => {
426                let self_blocks = block_len(self_x);
427                let child_blocks = block_len(child_x);
428                if self_blocks > child_blocks {
429                    return false;
430                }
431
432                // self_blocks == 0 for ROOT, otherwise > 0
433                let shift = 4 * (BLOCKS - self_blocks) + 8;
434                shift == 64 || self_x >> shift == child_x >> shift
435            }
436            (Variant::Int(self_x), Variant::Slice(child)) => {
437                let iter = BitsIter::new(self_x);
438                iter.zip(child.iter()).all(|(a, b)| a == *b)
439            }
440            (Variant::Slice(self_path), Variant::Slice(child)) => child.starts_with(self_path),
441        }
442    }
443
444    /// Find the most-derived common ancestor
445    pub fn common_ancestor(&self, id: &Self) -> Self {
446        let mut r = Id::ROOT;
447        for (a, b) in self.iter().zip(id.iter()) {
448            if a != b {
449                break;
450            }
451            r = r.make_child(a);
452        }
453        r
454    }
455
456    pub fn iter_keys_after(&self, id: &Self) -> WidgetPathIter<'_> {
457        let mut self_iter = self.iter();
458        for v in id.iter() {
459            if self_iter.next() != Some(v) {
460                return WidgetPathIter(PathIter::Bits(BitsIter(0, 0)));
461            }
462        }
463        self_iter
464    }
465
466    /// Get first key in path of `self` path after `id`
467    ///
468    /// If the path of `self` starts with the path of `id`
469    /// (`id.is_ancestor_of(self)`) then this returns the *next* key in
470    /// `self`'s path (if any). Otherwise, this returns `None`.
471    pub fn next_key_after(&self, id: &Self) -> Option<usize> {
472        match (id.0.get(), self.0.get()) {
473            (Variant::Invalid, _) | (_, Variant::Invalid) => {
474                panic!("Id::next_key_after: invalid")
475            }
476            (Variant::Slice(_), Variant::Int(_)) => None,
477            (Variant::Int(parent_x), Variant::Int(child_x)) => {
478                let parent_blocks = block_len(parent_x);
479                let child_blocks = block_len(child_x);
480                if parent_blocks >= child_blocks {
481                    return None;
482                }
483
484                // parent_blocks == 0 for ROOT, otherwise > 0
485                let shift = 4 * (BLOCKS - parent_blocks) + 8;
486                if shift != 64 && parent_x >> shift != child_x >> shift {
487                    return None;
488                }
489
490                debug_assert!(child_blocks > 0);
491                let next_bits = (child_x & MASK_BITS) << (4 * parent_blocks);
492                Some(next_from_bits(next_bits).0)
493            }
494            (Variant::Int(parent_x), Variant::Slice(child_path)) => {
495                let iter = BitsIter::new(parent_x);
496                let mut child_iter = child_path.iter();
497                if iter.zip(&mut child_iter).all(|(a, b)| a == *b) {
498                    child_iter.next().cloned()
499                } else {
500                    None
501                }
502            }
503            (Variant::Slice(parent_path), Variant::Slice(child_path)) => {
504                if child_path.starts_with(parent_path) {
505                    child_path[parent_path.len()..].iter().next().cloned()
506                } else {
507                    None
508                }
509            }
510        }
511    }
512
513    /// Get the parent widget's identifier, if not root
514    pub fn parent(&self) -> Option<Id> {
515        match self.0.get() {
516            Variant::Invalid => None,
517            Variant::Int(x) => {
518                let mut bit_len = 4 * block_len(x);
519                while bit_len > 0 {
520                    bit_len -= 4;
521                    if bit_len > 0 && (x >> (64 - bit_len)) & 8 != 0 {
522                        continue;
523                    }
524
525                    let len = ((bit_len / 4) as u64) << SHIFT_LEN;
526                    let mask = MASK_BITS << (56 - bit_len);
527                    let id = (x & mask) | len | USE_BITS;
528                    return Some(Id(IntOrPtr::new_int(id)));
529                }
530                None
531            }
532            Variant::Slice(path) => {
533                if path.is_empty() {
534                    return None;
535                }
536
537                let len = path.len();
538                let path = &path[0..len - 1];
539
540                if len > BLOCKS as usize {
541                    Some(Id(IntOrPtr::new_iter(path.iter().cloned())))
542                } else {
543                    Some(make_id(path))
544                }
545            }
546        }
547    }
548
549    /// Make an identifier for the child with the given `key`
550    ///
551    /// Note: this is not a getter method. Calling multiple times with the same
552    /// `key` may or may not return the same value!
553    #[must_use]
554    pub fn make_child(&self, key: usize) -> Self {
555        match self.0.get() {
556            Variant::Invalid => panic!("Id::make_child: invalid"),
557            Variant::Int(self_x) => {
558                // TODO(opt): this bit-packing approach is designed for space-optimisation, but it may
559                // be better to use a simpler, less-compressed approach, possibly with u128 type.
560                let block_len = block_len(self_x);
561                let avail_blocks = BLOCKS - block_len;
562                // Note: zero is encoded with 1 block to force bump to len
563                let req_bits = (8 * size_of::<usize>() as u8 - key.leading_zeros() as u8).max(1);
564                if req_bits <= 3 * avail_blocks {
565                    let (bits, bit_len) = encode(key);
566                    let used_blocks = bit_len / 4;
567                    debug_assert_eq!(used_blocks, req_bits.div_ceil(3));
568                    let len = (block_len as u64 + used_blocks as u64) << SHIFT_LEN;
569                    let rest = bits << 4 * avail_blocks - bit_len + 8;
570                    let id = (self_x & MASK_BITS) | rest | len | USE_BITS;
571                    Id(IntOrPtr::new_int(id))
572                } else {
573                    Id(IntOrPtr::new_iter(BitsIter::new(self_x).chain(once(key))))
574                }
575            }
576            Variant::Slice(path) => Id(IntOrPtr::new_iter(path.iter().cloned().chain(once(key)))),
577        }
578    }
579
580    /// Get a [`WindowId`], assuming that the first component is this
581    pub(crate) fn window_id(&self) -> WindowId {
582        let index = self.next_key_after(&Id::ROOT).unwrap();
583        WindowId::try_from(index.cast()).unwrap()
584    }
585
586    /// Convert to a [`NonZeroU64`] representation
587    ///
588    /// This conversion is infallible but not free.
589    ///
590    /// The result may be used in equality checks (with a *very* small risk of
591    /// false positive due to hash collision) and may be passed to
592    /// [`Self::try_from_u64`].
593    pub fn to_nzu64(&self) -> NonZeroU64 {
594        self.0.to_nzu64()
595    }
596
597    /// Try converting a `u64` to an `Id`
598    ///
599    /// This method is the inverse of [`Self::to_nzu64`]. Assuming that the
600    /// input `n` is the result of calling `id.to_nzu64().get()` for some `id`,
601    /// then this method will return:
602    ///
603    /// -   `Some(Id::INVALID)` where `!id.is_valid()`
604    /// -   `Some(id)` where `id` uses the internal (non-allocated) representation
605    /// -   `Some(id)` where `id` uses an allocated representation and the original `id` still
606    ///     exists (on the same thread).
607    /// -   `Some(id)` (probably) where `id` uses an allocated representation, has been freed then
608    ///     reconstructed (on the same thread) before calling this method.
609    ///     (There is a tiny chance of hash collision in this case.)
610    ///
611    /// This method will return `None` where:
612    ///
613    /// -   `n == 0`
614    /// -   The originating `id` used an allocating representation and has
615    ///     been deallocated without reconstruction
616    /// -   The originating `id` used an allocating representation and was
617    ///     constructed on another thread (excepting the case where an `Id` with
618    ///     the same hash was constructed on this thread)
619    ///
620    /// Bad / random inputs may yield either `None` or `Some(bad_id)` and in the
621    /// latter case operations on `bad_id` may panic, but are memory safe.
622    pub fn try_from_u64(n: u64) -> Option<Id> {
623        IntOrPtr::try_from_u64(n).map(Id)
624    }
625}
626
627impl PartialEq for Id {
628    fn eq(&self, rhs: &Self) -> bool {
629        match (self.0.get(), rhs.0.get()) {
630            (Variant::Invalid, _) | (_, Variant::Invalid) => panic!("Id::eq: invalid id"),
631            (Variant::Int(x), Variant::Int(y)) => x == y,
632            _ => self.iter().eq(rhs.iter()),
633        }
634    }
635}
636impl Eq for Id {}
637
638impl PartialEq<str> for Id {
639    fn eq(&self, rhs: &str) -> bool {
640        // TODO(opt): we don't have to format to test this
641        let formatted = format!("{self}");
642        formatted == rhs
643    }
644}
645impl PartialEq<&str> for Id {
646    fn eq(&self, rhs: &&str) -> bool {
647        self == *rhs
648    }
649}
650
651impl PartialOrd for Id {
652    fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
653        Some(self.cmp(rhs))
654    }
655}
656
657impl Ord for Id {
658    fn cmp(&self, rhs: &Self) -> Ordering {
659        match (self.0.get(), rhs.0.get()) {
660            (Variant::Invalid, _) | (_, Variant::Invalid) => panic!("Id::cmp: invalid id"),
661            (Variant::Int(x), Variant::Int(y)) => x.cmp(&y),
662            _ => self.iter().cmp(rhs.iter()),
663        }
664    }
665}
666
667impl Hash for Id {
668    fn hash<H: Hasher>(&self, state: &mut H) {
669        match self.0.get() {
670            Variant::Invalid => (),
671            Variant::Int(x) => {
672                x.hash(state);
673            }
674            Variant::Slice(path) => {
675                path.hash(state);
676            }
677        }
678    }
679}
680
681impl PartialEq<Option<Id>> for Id {
682    #[inline]
683    fn eq(&self, rhs: &Option<Id>) -> bool {
684        rhs.as_ref().map(|id| id == self).unwrap_or(false)
685    }
686}
687
688impl<'a> PartialEq<Option<&'a Id>> for Id {
689    #[inline]
690    fn eq(&self, rhs: &Option<&'a Id>) -> bool {
691        rhs.map(|id| id == self).unwrap_or(false)
692    }
693}
694
695impl<'a> PartialEq<&'a Id> for Id {
696    #[inline]
697    fn eq(&self, rhs: &&Id) -> bool {
698        self == *rhs
699    }
700}
701
702impl<'a> PartialEq<&'a Option<Id>> for Id {
703    #[inline]
704    fn eq(&self, rhs: &&Option<Id>) -> bool {
705        self == *rhs
706    }
707}
708
709impl Default for Id {
710    fn default() -> Self {
711        Id::INVALID
712    }
713}
714
715impl fmt::Debug for Id {
716    #[inline]
717    fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
718        write!(f, "Id({self})")
719    }
720}
721
722impl fmt::Display for Id {
723    fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
724        match self.0.get() {
725            Variant::Invalid => write!(f, "#INVALID"),
726            Variant::Int(x) => {
727                let len = block_len(x);
728                if len == 0 {
729                    write!(f, "#")
730                } else {
731                    let bits = x >> (4 * (BLOCKS - len) + 8);
732                    write!(f, "#{1:0>0$x}", len as usize, bits)
733                }
734            }
735            Variant::Slice(path) => {
736                write!(f, "#")?;
737                for key in path {
738                    let (bits, bit_len) = encode(*key);
739                    write!(f, "{1:0>0$x}", bit_len as usize / 4, bits)?;
740                }
741                Ok(())
742            }
743        }
744    }
745}
746
747/// Convert to a `NodeId` using [`Id::to_nzu64`]
748///
749/// Use [`Id::try_from_u64`] for the inverse conversion.
750#[cfg(feature = "accesskit")]
751impl From<Id> for accesskit::NodeId {
752    #[inline]
753    fn from(id: Id) -> Self {
754        accesskit::NodeId(id.to_nzu64().get())
755    }
756}
757
758/// Convert to a `NodeId` using [`Id::to_nzu64`]
759///
760/// Use [`Id::try_from_u64`] for the inverse conversion.
761#[cfg(feature = "accesskit")]
762impl From<&Id> for accesskit::NodeId {
763    #[inline]
764    fn from(id: &Id) -> Self {
765        accesskit::NodeId(id.to_nzu64().get())
766    }
767}
768
769/// Types supporting conversion to [`Id`]
770///
771/// A method taking an `id: impl HasId` parameter supports an [`Id`],
772/// a reference to an [`Id`] (which is thus cloned),
773/// or a (mutable) reference to a widget.
774///
775/// Note: in some cases attempting to pass a widget reference does not pass
776/// borrow checks. In this case pass `widget.id()` explicitly.
777pub trait HasId {
778    fn has_id(self) -> Id;
779}
780
781impl HasId for Id {
782    #[inline]
783    fn has_id(self) -> Id {
784        self
785    }
786}
787
788impl HasId for &Id {
789    #[inline]
790    fn has_id(self) -> Id {
791        self.clone()
792    }
793}
794
795impl HasId for &mut Id {
796    #[inline]
797    fn has_id(self) -> Id {
798        self.clone()
799    }
800}
801
802fn make_id(seq: &[usize]) -> Id {
803    let mut id = Id::ROOT;
804    for x in seq {
805        id = id.make_child(*x);
806    }
807    id
808}
809
810#[cfg(test)]
811mod test {
812    use super::*;
813
814    #[test]
815    fn size_of_option_widget_id() {
816        use std::mem::size_of;
817        assert_eq!(size_of::<Id>(), 8);
818        assert_eq!(size_of::<Id>(), size_of::<Option<Id>>());
819    }
820
821    #[test]
822    fn test_partial_eq() {
823        assert_eq!(Id::ROOT, Id::ROOT);
824        let c1 = Id::ROOT.make_child(0).make_child(15);
825        let c2 = Id::ROOT.make_child(1).make_child(15);
826        let c3 = Id::ROOT.make_child(0).make_child(14);
827        let c4 = Id::ROOT.make_child(0).make_child(15);
828        assert!(c1 != c2);
829        assert!(c1 != c3);
830        assert!(c2 != c3);
831        assert_eq!(c1, c4);
832        assert!(c1 != Id::ROOT);
833
834        let d1 = Id(IntOrPtr::new_iter([0, 15].iter().cloned()));
835        let d2 = Id(IntOrPtr::new_iter([1, 15].iter().cloned()));
836        assert_eq!(c1, d1);
837        assert_eq!(c2, d2);
838        assert!(d1 != d2);
839        assert!(d1 != Id::ROOT);
840    }
841
842    #[test]
843    #[should_panic]
844    fn test_partial_eq_invalid_1() {
845        let _ = Id::INVALID == Id::INVALID;
846    }
847
848    #[test]
849    #[should_panic]
850    fn test_partial_eq_invalid_2() {
851        let _ = Id::ROOT == Id::INVALID;
852    }
853
854    #[test]
855    #[should_panic]
856    fn test_partial_eq_invalid_3() {
857        let _ = Id::INVALID == Id::ROOT;
858    }
859
860    #[test]
861    fn test_partial_eq_str() {
862        assert_eq!(Id::ROOT, "#");
863        assert!(Id::ROOT != "#0");
864        assert_eq!(Id::ROOT.make_child(0).make_child(15), "#097");
865        assert_eq!(Id::ROOT.make_child(1).make_child(15), "#197");
866        let c3 = Id::ROOT.make_child(0).make_child(14);
867        assert_eq!(c3, "#096");
868        assert!(c3 != "#097");
869    }
870
871    #[test]
872    fn test_ord() {
873        let root = Id::ROOT;
874        let c_0 = root.make_child(0);
875        let c_0_0 = c_0.make_child(0);
876        assert!(root < c_0);
877        assert!(c_0 < c_0_0);
878
879        let c_1 = root.make_child(1);
880        assert!(c_0_0 < c_1);
881        assert!(c_1 < root.make_child(8));
882
883        let d_0 = Id(IntOrPtr::new_iter([0].iter().cloned()));
884        let d_0_0 = Id(IntOrPtr::new_iter([0, 0].iter().cloned()));
885        let d_1 = Id(IntOrPtr::new_iter([1].iter().cloned()));
886        assert_eq!(d_0.cmp(&c_0), Ordering::Equal);
887        assert_eq!(d_0_0.cmp(&c_0_0), Ordering::Equal);
888        assert_eq!(d_1.cmp(&c_1), Ordering::Equal);
889        assert!(d_0 < d_0_0);
890        assert!(d_0_0 < d_1);
891    }
892
893    #[test]
894    #[should_panic]
895    fn test_ord_invalid() {
896        let _ = Id::INVALID < Id::ROOT;
897    }
898
899    #[test]
900    fn test_next_from_bits() {
901        const REST: u64 = 0x0FFF_FFFF_FFFF_FFFF;
902        assert_eq!(next_from_bits(0), (0, 1));
903        assert_eq!(next_from_bits(REST), (0, 1));
904        assert_eq!(next_from_bits((7 << 60) | REST), (7, 1));
905        assert_eq!(next_from_bits((0xB << 60) | (3 << 56)), (27, 2));
906        assert_eq!(
907            next_from_bits(0xC9A4_F300_0000_0000),
908            ((4 << 9) + (1 << 6) + (2 << 3) + 4, 4)
909        );
910    }
911
912    #[test]
913    fn text_bits_iter() {
914        fn as_vec(x: u64) -> Vec<usize> {
915            BitsIter::new(x).collect()
916        }
917        assert_eq!(as_vec(USE_BITS), Vec::<usize>::new());
918        assert_eq!(as_vec(0x3100_0000_0000_0011), vec![3]);
919        assert_eq!(as_vec(0x1A93_007F_0000_0071), vec![1, 139, 0, 0, 7]);
920    }
921
922    #[test]
923    fn test_parent() {
924        fn test(seq: &[usize]) {
925            let len = seq.len();
926            let id = make_id(&seq[..len - 1]);
927
928            if len == 0 {
929                assert_eq!(id.parent(), None);
930            } else {
931                let id2 = id.make_child(seq[len - 1]);
932                assert_eq!(id2.parent(), Some(id));
933            }
934        }
935
936        test(&[4]);
937        test(&[4, 0]);
938        test(&[0, 0, 0]);
939        test(&[0, 1, 0]);
940        test(&[9, 0, 1, 300]);
941        test(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0]);
942        test(&[313553, 13513, 13511631]);
943    }
944
945    #[test]
946    fn test_make_child() {
947        fn test(seq: &[usize], x: u64) {
948            let id = make_id(seq);
949            let v = id.to_nzu64().get();
950            if v != x {
951                panic!("test({seq:?}, {x:x}): found {v:x}");
952            }
953
954            // Every id is its own ancestor:
955            assert!(id.is_ancestor_of(&id));
956        }
957
958        test(&[], USE_BITS);
959        test(&[0, 0, 0], (3 << 4) | USE_BITS);
960        test(&[0, 1, 0], (3 << 4) | (1 << 56) | USE_BITS);
961        test(&[9, 0, 1, 300], 0x9101cd4000000071);
962        test(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0], 0x12345679091920e1);
963    }
964
965    #[test]
966    fn test_display() {
967        fn from_seq(seq: &[usize]) -> String {
968            format!("{}", make_id(seq))
969        }
970
971        assert_eq!(from_seq(&[]), "#");
972        assert_eq!(from_seq(&[0]), "#0");
973        assert_eq!(from_seq(&[1, 2, 3]), "#123");
974        assert_eq!(from_seq(&[5, 9, 13]), "#59195");
975        assert_eq!(from_seq(&[321]), "#d81");
976        assert_eq!(
977            from_seq(&[313553, 13513, 13511631]),
978            "#99ccba1bab91ebcadf97"
979        )
980    }
981
982    #[test]
983    fn test_is_ancestor() {
984        fn test(seq: &[usize], seq2: &[usize]) {
985            let id = make_id(seq);
986            let mut id2 = id.clone();
987            for x in seq2 {
988                id2 = id2.make_child(*x);
989            }
990            let next = seq2.iter().next().cloned();
991            assert_eq!(id.is_ancestor_of(&id2), next.is_some() || id == id2);
992            assert_eq!(id2.next_key_after(&id), next);
993        }
994
995        test(&[], &[]);
996        test(&[], &[1]);
997        test(&[], &[51930, 2, 18]);
998        test(&[5, 6, 0, 1, 1], &[]);
999        test(&[5, 6, 0, 1, 1], &[1, 1]);
1000        test(&[8, 26], &[0]);
1001        test(&[9, 9, 9, 9, 9, 9, 9], &[]);
1002        test(&[9, 9, 9, 9, 9, 9, 9], &[6]);
1003        test(&[9, 9, 9, 9, 9, 9, 9, 9], &[3]);
1004        test(&[0, 2, 2, 0, 17], &[0]);
1005    }
1006
1007    #[test]
1008    fn test_not_ancestor() {
1009        fn test(seq: &[usize], seq2: &[usize]) {
1010            let id = make_id(seq);
1011            let id2 = make_id(seq2);
1012            assert_eq!(id.is_ancestor_of(&id2), false);
1013            assert_eq!(id2.next_key_after(&id), None);
1014        }
1015
1016        test(&[0], &[]);
1017        test(&[0], &[1]);
1018        test(&[2, 10, 1], &[2, 10]);
1019        test(&[0, 5, 2], &[0, 1, 5]);
1020    }
1021
1022    #[test]
1023    fn common_ancestor() {
1024        fn test(seq: &[usize], seq2: &[usize], seq3: &[usize]) {
1025            let id = make_id(seq);
1026            let id2 = make_id(seq2);
1027            assert_eq!(id.common_ancestor(&id2), make_id(seq3));
1028        }
1029
1030        test(&[0], &[], &[]);
1031        test(&[0], &[1, 2], &[]);
1032        test(&[0], &[0, 2], &[0]);
1033        test(&[2, 10, 1], &[2, 10], &[2, 10]);
1034        test(&[0, 5, 2], &[0, 5, 1], &[0, 5]);
1035    }
1036
1037    #[test]
1038    fn deduplicate_allocations() {
1039        let id1 = make_id(&[2, 6, 1, 1, 0, 13, 0, 100, 1000]);
1040        let id2 = make_id(&[2, 6, 1, 1, 0, 13, 0, 100, 1000]);
1041        assert_eq!(id1.0.0, id2.0.0);
1042    }
1043}