1#![deny(missing_docs)]
4
5use std::alloc::{self, Layout};
6use std::borrow::Borrow;
7use std::cmp::Ordering;
8use std::fmt::{self, Debug, Display};
9use std::hash::{Hash, Hasher};
10use std::io::Read;
11use std::iter::FusedIterator;
12use std::mem::MaybeUninit;
13use std::ops::Deref;
14use std::ptr::NonNull;
15use std::str::FromStr;
16use std::{mem, slice};
17
18#[cfg(feature = "serde")]
19use std::marker::PhantomData;
20
21#[cfg(feature = "serde")]
22use serde::{
23 de::{Deserialize, Deserializer, Visitor},
24 ser::{Serialize, Serializer},
25};
26
27mod enc;
28mod error;
29mod reader;
30mod writer;
31
32pub use enc::*;
33pub use error::*;
34pub use reader::*;
35pub use writer::*;
36
37const USIZE_BYTES: usize = (usize::BITS / u8::BITS) as usize;
38
39union Data<const N: usize> {
40 inline: MaybeUninit<[u8; N]>,
41 heap: NonNull<u8>,
42}
43
44impl<const N: usize> Data<N> {
45 const fn new_inline() -> Self {
46 Self {
47 inline: MaybeUninit::uninit(),
48 }
49 }
50
51 const fn new_heap(data: NonNull<u8>) -> Self {
52 Self { heap: data }
53 }
54}
55
56#[repr(C)]
57pub(crate) struct Buf<const N: usize> {
58 #[cfg(target_endian = "little")]
59 size: usize,
60 data: Data<N>,
61 #[cfg(target_endian = "big")]
62 size: usize,
63}
64
65impl<const N: usize> Buf<N> {
66 const INLINE_SIZE_LEN: usize = {
67 const fn max(lhs: usize, rhs: usize) -> usize {
68 if lhs > rhs {
69 lhs
70 } else {
71 rhs
72 }
73 }
74
75 const fn meta_size(data_len: usize) -> usize {
76 (usize::BITS - data_len.leading_zeros())
77 .saturating_add(1)
78 .div_ceil(u8::BITS) as usize
79 }
80
81 let data = max(N, size_of::<NonNull<u8>>());
82 let data = size_of::<Buf<N>>() - meta_size(data);
83
84 meta_size(data)
85 };
86
87 const INLINE_SIZE_MASK: usize =
88 (usize::MAX >> (usize::BITS - u8::BITS * Self::INLINE_SIZE_LEN as u32));
89
90 const INLINE_DATA_LEN: usize = size_of::<Self>() - Self::INLINE_SIZE_LEN;
91 const INLINE_DATA_POS: usize = if cfg!(target_endian = "little") {
92 size_of::<usize>() - Self::INLINE_SIZE_LEN
93 } else {
94 0
95 };
96
97 fn new(bits: usize) -> Result<Self, Error> {
98 if bits > usize::MAX >> 1 {
99 return Err(Error::new(ErrorKind::CapacityOverflow));
100 }
101
102 let bytes = bits.div_ceil(8);
103 let spilled = bytes >= Self::INLINE_DATA_LEN;
104
105 Ok(Self {
106 size: (bits << 1) | spilled as usize,
107 data: if spilled {
108 let layout = Layout::array::<u8>(bytes).unwrap();
109 let ptr = NonNull::new(unsafe { alloc::alloc(layout) }).unwrap();
110
111 Data::new_heap(ptr)
112 } else {
113 Data::new_inline()
114 },
115 })
116 }
117
118 #[inline]
119 const fn spilled(&self) -> bool {
120 self.size & 1 == 1
121 }
122
123 #[inline]
124 const fn len_in_bits(&self) -> usize {
125 let size = if self.spilled() {
126 self.size
127 } else {
128 self.size & Self::INLINE_SIZE_MASK
129 };
130 size >> 1
131 }
132
133 #[inline]
134 const fn len_in_bytes(&self) -> usize {
135 self.len_in_bits().div_ceil(8)
136 }
137
138 #[inline]
139 const fn as_ptr(&self) -> *const u8 {
140 unsafe {
141 if self.spilled() {
142 self.data.heap.as_ptr()
143 } else {
144 (&raw const self.data.inline)
147 .cast::<u8>()
148 .byte_sub(Self::INLINE_DATA_POS)
149 }
150 }
151 }
152
153 #[inline]
154 fn as_mut_ptr(&mut self) -> *mut u8 {
155 unsafe {
156 if self.spilled() {
157 self.data.heap.as_ptr()
158 } else {
159 (&raw mut self.data.inline)
162 .cast::<u8>()
163 .byte_sub(Self::INLINE_DATA_POS)
164 }
165 }
166 }
167
168 #[inline]
169 const fn as_slice(&self) -> &[u8] {
170 unsafe { slice::from_raw_parts(self.as_ptr(), self.len_in_bytes()) }
171 }
172
173 #[inline]
174 fn as_mut_slice(&mut self) -> &mut [u8] {
175 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len_in_bytes()) }
176 }
177}
178
179impl<const N: usize> Drop for Buf<N> {
180 fn drop(&mut self) {
181 if self.spilled() {
182 unsafe {
183 let data = self.as_mut_slice();
184 alloc::dealloc(
185 data.as_mut_ptr(),
186 Layout::from_size_align_unchecked(data.len(), align_of::<u8>()),
187 );
188 }
189 }
190 }
191}
192
193impl<const N: usize> PartialEq for Buf<N> {
194 #[inline]
195 fn eq(&self, other: &Self) -> bool {
196 self.as_slice().eq(other.as_slice())
197 }
198}
199
200impl<const N: usize> Eq for Buf<N> {}
201
202impl<const N: usize> PartialOrd for Buf<N> {
203 #[inline]
204 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
205 Some(self.cmp(other))
206 }
207}
208
209impl<const N: usize> Ord for Buf<N> {
210 #[inline]
211 fn cmp(&self, other: &Self) -> Ordering {
212 self.as_slice().cmp(other.as_slice())
213 }
214}
215
216impl<const N: usize> Clone for Buf<N> {
217 fn clone(&self) -> Self {
218 let mut other = Self::new(self.len_in_bits()).unwrap();
219 other.as_mut_slice().clone_from_slice(self.as_slice());
220 other
221 }
222}
223
224pub struct OrdPathBuf<E: Encoding = DefaultEncoding, const N: usize = USIZE_BYTES> {
226 raw: Buf<N>,
227 enc: E,
228}
229
230impl<E: Encoding, const N: usize> OrdPathBuf<E, N> {
231 #[inline]
232 fn new(bits: usize, enc: E) -> Result<Self, Error> {
233 Ok(Self {
234 raw: Buf::new(bits)?,
235 enc,
236 })
237 }
238
239 fn from_borrowed(s: &OrdPath<E, N>) -> Self
240 where
241 E: Clone,
242 {
243 let mut path = Self::new(s.bytes().len_in_bits(), s.encoding().clone()).unwrap();
244 unsafe {
245 s.bytes()
247 .read_exact(path.raw.as_mut_slice())
248 .unwrap_unchecked();
249 }
250 path
251 }
252
253 #[inline]
263 pub fn from_ordinals(s: &[i64], enc: E) -> Self {
264 Self::try_from_ordinals(s, enc).unwrap()
265 }
266
267 #[inline]
278 pub fn from_str(s: &str, enc: E) -> Self {
279 Self::try_from_str(s, enc).unwrap()
280 }
281
282 #[inline]
292 pub fn from_bytes(s: &[u8], enc: E) -> Self {
293 Self::try_from_bytes(s, enc).unwrap()
294 }
295
296 pub fn try_from_ordinals(s: &[i64], enc: E) -> Result<Self, Error> {
298 let mut bits = 0isize;
299 for ordinal in s {
300 bits = bits.wrapping_add(
301 enc.stage_by_value(*ordinal)
302 .ok_or_else(|| Error::new(ErrorKind::InvalidInput))?
303 .bits()
304 .into(),
305 );
306 }
307 let mut path = Self::new(
308 bits.try_into()
309 .map_err(|_| Error::new(ErrorKind::CapacityOverflow))?,
310 enc,
311 )?;
312 let mut writer = Writer::new(path.raw.as_mut_slice(), &path.enc);
313 for ordinal in s {
314 writer.write(*ordinal)?;
315 }
316 drop(writer);
317 Ok(path)
318 }
319
320 pub fn try_from_str(s: &str, enc: E) -> Result<Self, Error> {
322 let mut v = Vec::new();
323 for x in s.split_terminator('.') {
324 v.push(x.parse::<i64>()?);
325 }
326
327 Self::try_from_ordinals(&v, enc)
328 }
329
330 pub fn try_from_bytes(s: &[u8], enc: E) -> Result<Self, Error> {
332 let mut bits = 0isize;
333 let mut reader = Reader::new(s, &enc);
334 while let Some((_, stage)) = reader.read()? {
335 bits = bits.wrapping_add(stage.bits().into());
336 }
337 let mut path = Self::new(
338 bits.try_into()
339 .map_err(|_| Error::new(ErrorKind::CapacityOverflow))?,
340 enc,
341 )?;
342 path.raw.as_mut_slice().copy_from_slice(s);
343 Ok(path)
344 }
345
346 #[inline]
348 pub fn encoding(&self) -> &E {
349 &self.enc
350 }
351}
352
353impl<E: Encoding, const N: usize> AsRef<[u8]> for OrdPathBuf<E, N> {
354 fn as_ref(&self) -> &[u8] {
355 self.raw.as_slice()
356 }
357}
358
359impl<E: Encoding, const N: usize> Borrow<OrdPath<E, N>> for OrdPathBuf<E, N> {
360 fn borrow(&self) -> &OrdPath<E, N> {
361 self
362 }
363}
364
365impl<E: Encoding, const N: usize> Deref for OrdPathBuf<E, N> {
366 type Target = OrdPath<E, N>;
367
368 fn deref(&self) -> &Self::Target {
369 OrdPath::new(self, self.raw.len_in_bits())
370 }
371}
372
373impl<E: Encoding, const N: usize> PartialEq for OrdPathBuf<E, N>
374where
375 E: PartialEq,
376{
377 #[inline]
378 fn eq(&self, other: &Self) -> bool {
379 self.encoding().eq(other.encoding()) && self.raw.eq(&other.raw)
380 }
381}
382
383impl<E: Encoding, const N: usize> PartialOrd for OrdPathBuf<E, N>
384where
385 E: PartialEq,
386{
387 #[inline]
388 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
389 self.encoding()
390 .eq(other.encoding())
391 .then(|| self.bytes().cmp(other.bytes()))
392 }
393}
394
395impl<E: Encoding, const N: usize> Hash for OrdPathBuf<E, N> {
396 #[inline]
397 fn hash<H: Hasher>(&self, state: &mut H) {
398 state.write(self.as_ref());
399 }
400}
401
402impl<E: Encoding, const N: usize> Clone for OrdPathBuf<E, N>
403where
404 E: Clone,
405{
406 #[inline]
407 fn clone(&self) -> Self {
408 Self {
409 raw: self.raw.clone(),
410 enc: self.enc.clone(),
411 }
412 }
413}
414
415impl<E: Encoding, const N: usize> Debug for OrdPathBuf<E, N> {
416 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
417 <Self as Display>::fmt(self, f)
418 }
419}
420
421impl<E: Encoding, const N: usize> Display for OrdPathBuf<E, N> {
422 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
423 let mut ordinals = self.ordinals();
424 if let Some(value) = ordinals.next() {
425 write!(f, "{}", value)?;
426 for value in ordinals {
427 write!(f, ".{}", value)?;
428 }
429 }
430 Ok(())
431 }
432}
433
434impl<E: Encoding + Default, const N: usize> FromStr for OrdPathBuf<E, N> {
435 type Err = Error;
436
437 #[inline]
438 fn from_str(s: &str) -> Result<Self, Self::Err> {
439 Self::try_from_str(s, Default::default())
440 }
441}
442
443#[cfg(feature = "serde")]
444#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
445impl<E: Encoding, const N: usize> Serialize for OrdPathBuf<E, N> {
446 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
447 where
448 S: Serializer,
449 {
450 self.as_ref().serialize(serializer)
451 }
452}
453
454#[cfg(feature = "serde")]
455#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
456impl<'de, E: Encoding + Default, const N: usize> Deserialize<'de> for OrdPathBuf<E, N> {
457 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
458 where
459 D: Deserializer<'de>,
460 {
461 deserializer.deserialize_bytes(OrdPathVisitor::<E, N> {
462 marker: PhantomData,
463 })
464 }
465}
466
467#[cfg(feature = "serde")]
468struct OrdPathVisitor<Enc, const N: usize> {
469 marker: PhantomData<Enc>,
470}
471
472#[cfg(feature = "serde")]
473impl<Enc: Encoding + Default, const N: usize> Visitor<'_> for OrdPathVisitor<Enc, N> {
474 type Value = OrdPathBuf<Enc, N>;
475
476 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
477 formatter.write_str("bytes")
478 }
479
480 fn visit_bytes<Err: serde::de::Error>(self, v: &[u8]) -> Result<Self::Value, Err> {
481 Self::Value::try_from_bytes(v, Default::default()).map_err(Err::custom)
482 }
483}
484
485pub struct OrdPath<E: Encoding, const N: usize> {
487 data: [OrdPathBuf<E, N>],
488}
489
490impl<E: Encoding, const N: usize> OrdPath<E, N> {
491 #[inline]
492 fn new(path: &OrdPathBuf<E, N>, bits: usize) -> &Self {
493 unsafe { mem::transmute(slice::from_raw_parts(path as *const OrdPathBuf<E, N>, bits)) }
494 }
495
496 #[inline]
497 fn path(&self) -> &OrdPathBuf<E, N> {
498 unsafe { &*self.data.as_ptr() }
499 }
500
501 #[inline]
503 pub fn encoding(&self) -> &E {
504 self.path().encoding()
505 }
506
507 #[inline]
509 pub fn bytes(&self) -> &Bytes {
510 unsafe { Bytes::from_raw_parts(self.path().as_ref().as_ptr(), self.data.len()) }
511 }
512
513 #[inline]
515 pub fn ordinals(&self) -> Ordinals<&Bytes, &E> {
516 Ordinals {
517 reader: Reader::new(self.bytes(), self.encoding()),
518 }
519 }
520
521 #[inline]
538 pub fn ancestors(&self) -> Ancestors<'_, E, N> {
539 Ancestors { path: Some(self) }
540 }
541
542 pub fn parent(&self) -> Option<&OrdPath<E, N>> {
544 self.ancestors().next()
545 }
546
547 #[inline]
560 pub fn is_ancestor_of(&self, other: &Self) -> bool
561 where
562 E: PartialEq,
563 {
564 self.encoding().eq(other.encoding()) && self.bytes().is_ancestor(other.bytes())
565 }
566
567 #[inline]
580 pub fn is_descendant_of(&self, other: &Self) -> bool
581 where
582 E: PartialEq,
583 {
584 other.is_ancestor_of(self)
585 }
586}
587
588impl<E: Encoding + Clone, const N: usize> ToOwned for OrdPath<E, N> {
589 type Owned = OrdPathBuf<E, N>;
590
591 #[inline]
592 fn to_owned(&self) -> OrdPathBuf<E, N> {
593 OrdPathBuf::from_borrowed(self)
594 }
595}
596
597impl<E: Encoding + PartialEq, const N: usize> PartialEq for OrdPath<E, N> {
598 #[inline]
599 fn eq(&self, other: &Self) -> bool {
600 self.partial_cmp(other)
601 .is_some_and(|r| r == Ordering::Equal)
602 }
603}
604
605impl<E: Encoding + PartialEq, const N: usize> PartialOrd for OrdPath<E, N> {
606 #[inline]
607 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
608 self.encoding()
609 .eq(other.encoding())
610 .then(|| self.bytes().cmp(other.bytes()))
611 }
612}
613
614impl<E: Encoding, const N: usize> Hash for OrdPath<E, N> {
615 #[inline]
616 fn hash<H: Hasher>(&self, state: &mut H) {
617 for b in self.bytes() {
618 state.write_u8(b);
619 }
620 }
621}
622
623impl<E: Encoding, const N: usize> Debug for OrdPath<E, N> {
624 #[inline]
625 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
626 <Self as Display>::fmt(self, f)
627 }
628}
629
630impl<E: Encoding, const N: usize> Display for OrdPath<E, N> {
631 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
632 let mut ordinals = self.ordinals();
633 if let Some(value) = ordinals.next() {
634 write!(f, "{}", value)?;
635 for value in ordinals {
636 write!(f, ".{}", value)?;
637 }
638 }
639 Ok(())
640 }
641}
642
643#[repr(transparent)]
650pub struct Bytes {
651 data: [u8],
652}
653
654impl Bytes {
655 #[inline]
656 unsafe fn from_raw_parts<'a>(ptr: *const u8, bits: usize) -> &'a Self {
657 unsafe { mem::transmute(slice::from_raw_parts(ptr, bits)) }
658 }
659
660 #[inline]
661 const fn len_in_bits(&self) -> usize {
662 self.data.len()
663 }
664
665 #[inline]
666 const fn len_in_bytes(&self) -> usize {
667 self.len_in_bits().div_ceil(8)
668 }
669
670 unsafe fn get_at_unchecked(&self, idx: usize) -> u8 {
671 unsafe { self.data.as_ptr().add(idx).read() }
672 }
673
674 unsafe fn get_slice_unchecked(&self, len: usize) -> &[u8] {
675 unsafe { slice::from_raw_parts(self.data.as_ptr(), len) }
676 }
677
678 fn is_ancestor(&self, other: &Self) -> bool {
679 self.len_in_bits() < other.len_in_bits()
680 && match self.len_in_bytes() {
681 0 => true,
682 bytes => unsafe {
683 let last = bytes - 1;
685 self.get_slice_unchecked(last)
686 .eq(other.get_slice_unchecked(last))
687 && high_bits_eq(
688 self.len_in_bits(),
689 self.get_at_unchecked(last),
690 other.get_at_unchecked(last),
691 )
692 },
693 }
694 }
695
696 #[inline]
697 fn ancestor<E: Encoding>(&self, enc: &E, nth: usize) -> Option<&Self> {
698 const FORWARD_BUF_LEN: usize = size_of::<usize>();
699 let mut bytes = self;
700 for _ in 0..=nth.div_ceil(FORWARD_BUF_LEN) {
701 if bytes.len_in_bits() == 0 {
702 return None;
703 }
704 let mut idx = 0;
705 let mut buf = [0u8; FORWARD_BUF_LEN];
706 let mut bits = 0;
707 let mut reader = Reader::new(bytes, enc);
708 while let Some((_, stage)) = reader.read().unwrap() {
709 bits += stage.bits() as usize;
710 buf[idx % buf.len()] = stage.bits();
711 idx = idx.wrapping_add(1);
712 }
713 for _ in 0..=buf.len().min(nth % FORWARD_BUF_LEN) {
714 idx = idx.wrapping_sub(1);
715 bits -= buf[idx % buf.len()] as usize;
716 }
717 bytes = unsafe { Bytes::from_raw_parts(bytes.data.as_ptr(), bits) };
718 }
719 Some(bytes)
720 }
721}
722
723impl Read for &Bytes {
724 #[inline]
725 fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
726 unsafe {
727 let mut data = self.get_slice_unchecked(self.len_in_bytes());
728 let read = data.read(buf)?;
729 if read > 0 && data.is_empty() {
730 *buf.get_unchecked_mut(read - 1) &= high_bits_mask(self.len_in_bits());
731 }
732
733 *self =
734 Bytes::from_raw_parts(data.as_ptr(), self.len_in_bits().saturating_sub(read * 8));
735
736 Ok(read)
737 }
738 }
739}
740
741trait UncheckedIterator: Iterator {
742 unsafe fn next_unchecked(&mut self) -> Self::Item;
743 unsafe fn next_back_unchecked(&mut self) -> Self::Item;
744}
745
746impl UncheckedIterator for &Bytes {
747 #[inline(always)]
748 unsafe fn next_unchecked(&mut self) -> Self::Item {
749 unsafe {
750 let item = self.get_at_unchecked(0);
751 let bits = self.len_in_bits();
752 let (bits, item) = if bits >= 8 {
753 (bits - 8, item)
754 } else {
755 (0, item & high_bits_mask(bits))
756 };
757
758 *self = Bytes::from_raw_parts(self.data.as_ptr().add(1), bits);
759
760 item
761 }
762 }
763
764 #[inline(always)]
765 unsafe fn next_back_unchecked(&mut self) -> Self::Item {
766 unsafe {
767 let item = self.get_at_unchecked(self.len_in_bytes() - 1);
768 let bits = self.len_in_bits();
769 let (bits, item) = if bits & 7 == 0 {
770 (bits - 8, item)
771 } else {
772 (bits & (!7), item & high_bits_mask(self.len_in_bits()))
773 };
774
775 *self = Bytes::from_raw_parts(self.data.as_ptr(), bits);
776
777 item
778 }
779 }
780}
781
782impl Iterator for &Bytes {
783 type Item = u8;
784
785 #[inline]
786 fn next(&mut self) -> Option<Self::Item> {
787 unsafe {
788 if self.len_in_bits() == 0 {
789 None
790 } else {
791 Some(self.next_unchecked())
792 }
793 }
794 }
795
796 #[inline]
797 fn nth(&mut self, n: usize) -> Option<Self::Item> {
798 unsafe {
799 let bytes = self.len_in_bytes();
800 if bytes <= n {
801 *self = Bytes::from_raw_parts(self.data.as_ptr().add(bytes), 0);
802
803 None
804 } else {
805 *self =
806 Bytes::from_raw_parts(self.data.as_ptr().add(n), self.len_in_bits() - n * 8);
807
808 Some(self.next_unchecked())
809 }
810 }
811 }
812
813 #[inline]
814 fn last(mut self) -> Option<Self::Item>
815 where
816 Self: Sized,
817 {
818 self.next_back()
819 }
820
821 #[inline]
822 fn size_hint(&self) -> (usize, Option<usize>) {
823 let len = self.len();
824 (len, Some(len))
825 }
826}
827
828impl DoubleEndedIterator for &Bytes {
829 fn next_back(&mut self) -> Option<Self::Item> {
830 unsafe {
831 if self.len_in_bits() == 0 {
832 None
833 } else {
834 Some(self.next_back_unchecked())
835 }
836 }
837 }
838
839 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
840 unsafe {
841 let bytes = self.len_in_bytes();
842 if bytes <= n {
843 *self = Bytes::from_raw_parts(self.data.as_ptr(), 0);
844
845 None
846 } else {
847 *self = Bytes::from_raw_parts(self.data.as_ptr(), self.len_in_bits() - n * 8);
848
849 Some(self.next_back_unchecked())
850 }
851 }
852 }
853}
854
855impl ExactSizeIterator for &Bytes {
856 #[inline]
857 fn len(&self) -> usize {
858 self.len_in_bytes()
859 }
860}
861
862impl FusedIterator for &Bytes {}
863
864impl PartialEq for &Bytes {
865 #[inline]
866 fn eq(&self, other: &Self) -> bool {
867 self.len_in_bits() == other.len_in_bits()
868 && match self.len_in_bytes() {
869 0 => true,
870 bytes => {
871 unsafe {
872 let last = bytes - 1;
874 self.get_slice_unchecked(last) == other.get_slice_unchecked(last)
875 && high_bits_eq(
876 self.len_in_bits(),
877 self.get_at_unchecked(last),
878 other.get_at_unchecked(last),
879 )
880 }
881 }
882 }
883 }
884}
885
886impl Eq for &Bytes {}
887
888impl PartialOrd for &Bytes {
889 #[inline]
890 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
891 Some(self.cmp(other))
892 }
893}
894
895impl Ord for &Bytes {
896 #[inline]
897 fn cmp(&self, other: &Self) -> Ordering {
898 fn cmp_impl(shortest: &Bytes, longest: &Bytes) -> Ordering {
899 match shortest.len_in_bytes() {
900 0 => Ordering::Equal,
901 bytes => unsafe {
902 let last = bytes - 1;
904 shortest
905 .get_slice_unchecked(last)
906 .cmp(longest.get_slice_unchecked(last))
907 .then_with(|| {
908 let left = shortest.get_at_unchecked(last)
909 & high_bits_mask(shortest.len_in_bits());
910 let right = longest.get_at_unchecked(last)
911 & high_bits_mask(if bytes * 8 < longest.len_in_bits() {
912 0
913 } else {
914 longest.len_in_bits()
915 });
916
917 left.cmp(&right)
918 })
919 },
920 }
921 }
922
923 let ord = self.len_in_bits().cmp(&other.len_in_bits());
924 if ord.is_le() {
925 cmp_impl(self, other)
926 } else {
927 cmp_impl(other, self).reverse()
928 }
929 .then(ord)
930 }
931}
932
933pub struct Ordinals<R: Read, E: Encoding> {
940 reader: Reader<R, E>,
941}
942
943impl<R: Read, E: Encoding> Iterator for Ordinals<R, E> {
944 type Item = i64;
945
946 #[inline]
947 fn next(&mut self) -> Option<Self::Item> {
948 self.reader.read().unwrap().map(|x| x.0)
949 }
950}
951
952impl<R: Read, E: Encoding> FusedIterator for Ordinals<R, E> {}
953
954pub struct Ancestors<'a, E: Encoding, const N: usize> {
971 path: Option<&'a OrdPath<E, N>>,
972}
973
974impl<'a, E: Encoding, const N: usize> Iterator for Ancestors<'a, E, N> {
975 type Item = &'a OrdPath<E, N>;
976
977 #[inline]
978 fn next(&mut self) -> Option<Self::Item> {
979 self.nth(0)
980 }
981
982 #[inline]
983 fn nth(&mut self, n: usize) -> Option<Self::Item> {
984 self.path = self.path.and_then(|p| {
985 p.bytes()
986 .ancestor(p.encoding(), n)
987 .map(|b| OrdPath::new(p.path(), b.len_in_bits()))
988 });
989 self.path
990 }
991}
992
993impl<E: Encoding, const N: usize> FusedIterator for Ancestors<'_, E, N> {}
994
995#[inline]
996fn high_bits_eq(bits: usize, lhs: u8, rhs: u8) -> bool {
997 let mask = high_bits_mask(bits);
998 lhs & mask == rhs & mask
999}
1000
1001#[inline]
1002fn high_bits_mask(bits: usize) -> u8 {
1003 u8::MAX << (((usize::MAX << 3) - bits) & 7)
1004}
1005
1006#[cfg(test)]
1007mod tests {
1008 use super::*;
1009
1010 #[test]
1011 fn path_from_ordinals() {
1012 fn assert(s: &[i64]) {
1013 assert_eq!(
1014 <OrdPathBuf>::from_ordinals(s, DefaultEncoding)
1015 .ordinals()
1016 .collect::<Vec<_>>(),
1017 s
1018 );
1019 }
1020
1021 assert(&[0; 0]);
1022 assert(&[0]);
1023 assert(&[0, 8]);
1024 assert(&[4440, 4440, 4440, 8]);
1025 assert(&[4440, 4440, 4440, 8, 0]);
1026 assert(&[4440, 4440, 4440, 4440]);
1027 assert(&[4440, 4440, 4440, 4440, 88]);
1028 assert(&[4295037272, 4295037272]);
1029 assert(&[4295037272, 4295037272, 4440, 88]);
1030 assert(&[4295037272, 4295037272, 4440, 344]);
1031 assert(&[4295037272, 4295037272, 4440, 4440]);
1032
1033 assert(&[3]);
1034 assert(&[3, 8 + 5]);
1035 assert(&[4440 + 13, 4440 + 179, 4440 + 7541, 8 + 11]);
1036 assert(&[4440 + 13, 4440 + 179, 4440 + 7541, 8 + 11, 3]);
1037 assert(&[4440 + 13, 4440 + 179, 4440 + 7541, 4440 + 123]);
1038 assert(&[4440 + 13, 4440 + 179, 4440 + 7541, 4440 + 123, 88 + 11]);
1039 assert(&[4295037272 + 31, 4295037272 + 6793]);
1040 assert(&[4295037272 + 31, 4295037272 + 6793, 4440 + 7541, 88 + 11]);
1041 assert(&[4295037272 + 31, 4295037272 + 6793, 4440 + 7541, 344 + 71]);
1042 assert(&[4295037272 + 31, 4295037272 + 6793, 4440 + 7541, 4440 + 123]);
1043 }
1044
1045 #[test]
1046 fn path_from_str() {
1047 fn assert(s: &str, o: &[i64]) {
1048 assert_eq!(
1049 <OrdPathBuf>::try_from_str(s, DefaultEncoding),
1050 Ok(<OrdPathBuf>::from_ordinals(o, DefaultEncoding))
1051 );
1052 }
1053
1054 fn assert_err(s: &str, e: Error) {
1055 assert_eq!(
1056 <OrdPathBuf>::try_from_str(s, DefaultEncoding),
1057 Err(e.clone())
1058 );
1059 }
1060
1061 assert("", &[]);
1062 assert("0", &[0]);
1063 assert("1", &[1]);
1064 assert("1.2", &[1, 2]);
1065 assert_err("1.a", Error::new(ErrorKind::InvalidInput));
1066 assert_err("1_2", Error::new(ErrorKind::InvalidInput));
1067 assert_err("a", Error::new(ErrorKind::InvalidInput));
1068 }
1069
1070 #[test]
1071 fn path_to_string() {
1072 fn assert(o: Vec<i64>, s: &str) {
1073 assert_eq!(
1074 <OrdPathBuf>::from_ordinals(&o, DefaultEncoding).to_string(),
1075 s
1076 );
1077 }
1078
1079 assert(vec![], "");
1080 assert(vec![0], "0");
1081 assert(vec![1], "1");
1082 assert(vec![1, 2], "1.2");
1083 }
1084
1085 #[test]
1086 fn path_clone() {
1087 fn assert(o: &[i64]) {
1088 assert_eq!(
1089 <OrdPathBuf>::from_ordinals(o, DefaultEncoding).clone(),
1090 <OrdPathBuf>::from_ordinals(o, DefaultEncoding)
1091 );
1092 }
1093
1094 assert(&[]);
1095 assert(&[0]);
1096 assert(&[1]);
1097 assert(&[1, 2]);
1098 }
1099
1100 #[test]
1101 fn path_ordering() {
1102 fn assert(lhs: &[i64], rhs: &[i64], o: Ordering) {
1103 assert_eq!(
1104 <OrdPathBuf>::from_ordinals(lhs, DefaultEncoding)
1105 .partial_cmp(&<OrdPathBuf>::from_ordinals(rhs, DefaultEncoding)),
1106 Some(o)
1107 );
1108 }
1109
1110 assert(&[0; 0], &[0; 0], Ordering::Equal);
1111 assert(&[0; 0], &[0], Ordering::Less);
1112 assert(&[0], &[0; 0], Ordering::Greater);
1113 assert(&[0], &[0], Ordering::Equal);
1114 assert(&[0], &[1], Ordering::Less);
1115 assert(&[0], &[0, 1], Ordering::Less);
1116 assert(&[0], &[69976, 69976], Ordering::Less);
1117 assert(&[0], &[4295037272, 4295037272], Ordering::Less);
1118 }
1119
1120 #[test]
1121 fn path_is_ancestor() {
1122 fn assert(e: bool, a: &[i64], d: &[i64]) {
1123 let x = <OrdPathBuf>::from_ordinals(a, DefaultEncoding);
1124 let y = <OrdPathBuf>::from_ordinals(d, DefaultEncoding);
1125
1126 assert_eq!(e, x.is_ancestor_of(&y));
1127 assert_eq!(e, y.is_descendant_of(&x));
1128 }
1129
1130 assert(true, &[], &[0]);
1131 assert(true, &[0], &[0, 1]);
1132 assert(true, &[0, 1], &[0, 1, 2, 3]);
1133 assert(
1134 true,
1135 &[4295037272, 4295037272],
1136 &[4295037272, 4295037272, 1],
1137 );
1138
1139 assert(false, &[0], &[]);
1140 assert(false, &[0, 1], &[0]);
1141 assert(false, &[0, 1, 2, 3], &[0, 1]);
1142 assert(
1143 false,
1144 &[4295037272, 4295037272, 1],
1145 &[4295037272, 4295037272],
1146 );
1147
1148 assert(false, &[], &[]);
1149 assert(false, &[0], &[0]);
1150 assert(false, &[0], &[1]);
1151 }
1152
1153 #[test]
1154 fn path_iter_fused() {
1155 fn assert<R: Read, E: Encoding>(mut iter: Ordinals<R, E>) {
1156 assert_eq!(iter.next(), Some(1));
1157 assert_eq!(iter.next(), Some(2));
1158 assert_eq!(iter.next(), None);
1159 assert_eq!(iter.next(), None);
1160 }
1161
1162 assert(<OrdPathBuf>::from_ordinals(&[1, 2], DefaultEncoding).ordinals());
1163 }
1164
1165 #[test]
1166 fn path_parent() {
1167 let ords = (i8::MIN..i8::MAX).map(|x| x as i64).collect::<Vec<_>>();
1168 for n in 1..ords.len() {
1169 let ords = &ords[..n];
1170 let path = <OrdPathBuf>::from_ordinals(ords, DefaultEncoding);
1171 assert_eq!(
1172 ords[..(ords.len() - 1)],
1173 path.parent()
1174 .map(|p| p.ordinals().collect::<Vec<_>>())
1175 .unwrap_or_default()
1176 );
1177 }
1178 }
1179
1180 #[cfg(feature = "serde")]
1181 #[test]
1182 fn path_serialization() {
1183 fn assert(s: &[i64]) {
1184 let p = <OrdPathBuf>::from_ordinals(s, DefaultEncoding);
1185 let encoded = bincode::serialize(&p).unwrap();
1186 let decoded = bincode::deserialize(&encoded).unwrap();
1187
1188 assert_eq!(p, decoded);
1189 }
1190
1191 assert(&[0, 1, 2, 3]);
1192 }
1193
1194 #[test]
1195 fn bytes_next() {
1196 let path = <OrdPathBuf>::from_ordinals(&(0..5).collect::<Vec<_>>(), DefaultEncoding);
1197 assert_eq!(
1198 path.bytes().collect::<Vec<_>>(),
1199 path.as_ref().iter().copied().collect::<Vec<_>>()
1200 );
1201 }
1202
1203 #[test]
1204 fn bytes_next_back() {
1205 let path = <OrdPathBuf>::from_ordinals(&(0..5).collect::<Vec<_>>(), DefaultEncoding);
1206 assert_eq!(
1207 path.bytes().rev().collect::<Vec<_>>(),
1208 path.as_ref().iter().copied().rev().collect::<Vec<_>>()
1209 );
1210 }
1211
1212 #[test]
1213 fn bytes_nth() {
1214 let nth = 5;
1215 let len = (0..nth as i64).reduce(|l, n| l + n).unwrap();
1216 let path = <OrdPathBuf>::from_ordinals(&(0..len).collect::<Vec<_>>(), DefaultEncoding);
1217
1218 let mut actual = path.bytes();
1219 let mut expected = path.as_ref().iter().copied();
1220
1221 for n in 0..=nth {
1222 assert_eq!(actual.nth(n), expected.nth(n));
1223 }
1224 }
1225
1226 #[test]
1227 fn bytes_nth_back() {
1228 let nth = 5;
1229 let len = (0..=nth as i64).reduce(|l, n| l + n).unwrap();
1230 let path = <OrdPathBuf>::from_ordinals(&(0..len).collect::<Vec<_>>(), DefaultEncoding);
1231
1232 let mut actual = path.bytes();
1233 let mut expected = path.as_ref().iter().copied();
1234
1235 for n in 0..=nth {
1236 assert_eq!(actual.nth_back(n), expected.nth_back(n));
1237 }
1238 }
1239}