1use std::ptr::NonNull;
7
8use diskann_utils::{Reborrow, ReborrowMut};
9use thiserror::Error;
10
11use crate::{
12 alloc::{AllocatorCore, AllocatorError, GlobalAllocator, Poly},
13 bits::{
14 AsMutPtr, AsPtr, BitSlice, BitSliceBase, Dense, MutBitSlice, MutSlicePtr,
15 PermutationStrategy, Representation, SlicePtr,
16 },
17 ownership::{CopyMut, CopyRef, Mut, Owned, Ref},
18};
19
20#[derive(Debug, Clone, Copy)]
178pub struct VectorBase<const NBITS: usize, Repr, Ptr, T, Perm = Dense>
179where
180 Ptr: AsPtr<Type = u8>,
181 Repr: Representation<NBITS>,
182 Perm: PermutationStrategy<NBITS>,
183{
184 bits: BitSliceBase<NBITS, Repr, Ptr, Perm>,
185 meta: T,
186}
187
188impl<const NBITS: usize, Repr, Ptr, T, Perm> VectorBase<NBITS, Repr, Ptr, T, Perm>
189where
190 Ptr: AsPtr<Type = u8>,
191 Repr: Representation<NBITS>,
192 Perm: PermutationStrategy<NBITS>,
193{
194 pub fn slice_bytes(count: usize) -> usize {
196 BitSliceBase::<NBITS, Repr, Ptr, Perm>::bytes_for(count)
197 }
198
199 pub fn canonical_bytes(count: usize) -> usize
204 where
205 T: CopyRef,
206 T::Target: bytemuck::Pod,
207 {
208 Self::slice_bytes(count) + std::mem::size_of::<T::Target>()
209 }
210
211 pub fn new<M>(bits: BitSliceBase<NBITS, Repr, Ptr, Perm>, meta: M) -> Self
213 where
214 M: Into<T>,
215 {
216 Self {
217 bits,
218 meta: meta.into(),
219 }
220 }
221
222 pub fn len(&self) -> usize {
224 self.bits.len()
225 }
226
227 pub fn is_empty(&self) -> bool {
229 self.bits.is_empty()
230 }
231
232 pub fn meta(&self) -> T::Target
234 where
235 T: CopyRef,
236 {
237 self.meta.copy_ref()
238 }
239
240 pub fn vector(&self) -> BitSlice<'_, NBITS, Repr, Perm> {
242 self.bits.reborrow()
243 }
244
245 pub fn vector_mut(&mut self) -> MutBitSlice<'_, NBITS, Repr, Perm>
247 where
248 Ptr: AsMutPtr,
249 {
250 self.bits.reborrow_mut()
251 }
252
253 pub fn set_meta(&mut self, value: T::Target)
259 where
260 Ptr: AsMutPtr,
261 T: CopyMut,
262 {
263 self.meta.copy_mut(value)
264 }
265}
266
267impl<const NBITS: usize, Repr, Perm, T>
268 VectorBase<NBITS, Repr, Poly<[u8], GlobalAllocator>, Owned<T>, Perm>
269where
270 Repr: Representation<NBITS>,
271 Perm: PermutationStrategy<NBITS>,
272 T: Default,
273{
274 pub fn new_boxed(len: usize) -> Self {
276 Self {
277 bits: BitSliceBase::new_boxed(len),
278 meta: Owned::default(),
279 }
280 }
281}
282
283impl<const NBITS: usize, Repr, Perm, T, A> VectorBase<NBITS, Repr, Poly<[u8], A>, Owned<T>, Perm>
284where
285 Repr: Representation<NBITS>,
286 Perm: PermutationStrategy<NBITS>,
287 T: Default,
288 A: AllocatorCore,
289{
290 pub fn new_in(len: usize, allocator: A) -> Result<Self, AllocatorError> {
292 Ok(Self {
293 bits: BitSliceBase::new_in(len, allocator)?,
294 meta: Owned::default(),
295 })
296 }
297}
298
299pub type VectorRef<'a, const NBITS: usize, Repr, T, Perm = Dense> =
303 VectorBase<NBITS, Repr, SlicePtr<'a, u8>, Ref<'a, T>, Perm>;
304
305pub type VectorMut<'a, const NBITS: usize, Repr, T, Perm = Dense> =
309 VectorBase<NBITS, Repr, MutSlicePtr<'a, u8>, Mut<'a, T>, Perm>;
310
311pub type Vector<const NBITS: usize, Repr, T, Perm = Dense> =
315 VectorBase<NBITS, Repr, Poly<[u8], GlobalAllocator>, Owned<T>, Perm>;
316
317pub type PolyVector<const NBITS: usize, Repr, T, Perm, A> =
321 VectorBase<NBITS, Repr, Poly<[u8], A>, Owned<T>, Perm>;
322
323impl<'this, const NBITS: usize, Repr, Ptr, T, Perm> Reborrow<'this>
325 for VectorBase<NBITS, Repr, Ptr, T, Perm>
326where
327 Ptr: AsPtr<Type = u8>,
328 Repr: Representation<NBITS>,
329 Perm: PermutationStrategy<NBITS>,
330 T: CopyRef + Reborrow<'this, Target = Ref<'this, <T as CopyRef>::Target>>,
331{
332 type Target = VectorRef<'this, NBITS, Repr, <T as CopyRef>::Target, Perm>;
333
334 fn reborrow(&'this self) -> Self::Target {
335 Self::Target {
336 bits: self.bits.reborrow(),
337 meta: self.meta.reborrow(),
338 }
339 }
340}
341
342impl<'this, const NBITS: usize, Repr, Ptr, T, Perm> ReborrowMut<'this>
344 for VectorBase<NBITS, Repr, Ptr, T, Perm>
345where
346 Ptr: AsMutPtr<Type = u8>,
347 Repr: Representation<NBITS>,
348 Perm: PermutationStrategy<NBITS>,
349 T: CopyMut + ReborrowMut<'this, Target = Mut<'this, <T as CopyRef>::Target>>,
350{
351 type Target = VectorMut<'this, NBITS, Repr, <T as CopyRef>::Target, Perm>;
352
353 fn reborrow_mut(&'this mut self) -> Self::Target {
354 Self::Target {
355 bits: self.bits.reborrow_mut(),
356 meta: self.meta.reborrow_mut(),
357 }
358 }
359}
360
361#[derive(Debug, Error, PartialEq, Clone, Copy)]
366pub enum NotCanonical {
367 #[error("expected a slice length of {0} bytes but instead got {1} bytes")]
368 WrongLength(usize, usize),
369}
370
371impl<'a, const NBITS: usize, Repr, T, Perm> VectorRef<'a, NBITS, Repr, T, Perm>
372where
373 Repr: Representation<NBITS>,
374 Perm: PermutationStrategy<NBITS>,
375 T: bytemuck::Pod,
376{
377 pub fn from_canonical_front(data: &'a [u8], dim: usize) -> Result<Self, NotCanonical> {
385 let expected = Self::canonical_bytes(dim);
386 if data.len() != expected {
387 Err(NotCanonical::WrongLength(expected, data.len()))
388 } else {
389 Ok(unsafe { Self::from_canonical_unchecked(data, dim) })
391 }
392 }
393
394 pub fn from_canonical_back(data: &'a [u8], dim: usize) -> Result<Self, NotCanonical> {
402 let expected = Self::canonical_bytes(dim);
403 if data.len() != expected {
404 Err(NotCanonical::WrongLength(expected, data.len()))
405 } else {
406 Ok(unsafe { Self::from_canonical_back_unchecked(data, dim) })
408 }
409 }
410
411 pub unsafe fn from_canonical_unchecked(data: &'a [u8], dim: usize) -> Self {
419 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
420
421 let bits =
426 unsafe { BitSlice::new_unchecked(data.get_unchecked(std::mem::size_of::<T>()..), dim) };
427
428 let meta =
432 unsafe { Ref::new(NonNull::new_unchecked(data.as_ptr().cast_mut()).cast::<T>()) };
433 Self { bits, meta }
434 }
435
436 pub unsafe fn from_canonical_back_unchecked(data: &'a [u8], dim: usize) -> Self {
444 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
445 let (data, meta) =
448 unsafe { data.split_at_unchecked(data.len() - std::mem::size_of::<T>()) };
449
450 let bits = unsafe { BitSlice::new_unchecked(data, dim) };
455
456 let meta =
460 unsafe { Ref::new(NonNull::new_unchecked(meta.as_ptr().cast_mut()).cast::<T>()) };
461 Self { bits, meta }
462 }
463}
464
465impl<'a, const NBITS: usize, Repr, T, Perm> VectorMut<'a, NBITS, Repr, T, Perm>
466where
467 Repr: Representation<NBITS>,
468 Perm: PermutationStrategy<NBITS>,
469 T: bytemuck::Pod,
470{
471 pub fn from_canonical_front_mut(data: &'a mut [u8], dim: usize) -> Result<Self, NotCanonical> {
479 let expected = Self::canonical_bytes(dim);
480 if data.len() != expected {
481 Err(NotCanonical::WrongLength(expected, data.len()))
482 } else {
483 Ok(unsafe { Self::from_canonical_front_mut_unchecked(data, dim) })
485 }
486 }
487
488 pub unsafe fn from_canonical_front_mut_unchecked(data: &'a mut [u8], dim: usize) -> Self {
496 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
497
498 let (front, back) = unsafe { data.split_at_mut_unchecked(std::mem::size_of::<T>()) };
500
501 let bits = unsafe { MutBitSlice::new_unchecked(back, dim) };
503
504 let meta = unsafe { Mut::new(NonNull::new_unchecked(front.as_mut_ptr()).cast::<T>()) };
506 Self { bits, meta }
507 }
508
509 pub fn from_canonical_back_mut(data: &'a mut [u8], dim: usize) -> Result<Self, NotCanonical> {
517 let len = data.len();
518 let expected = || Self::canonical_bytes(dim);
519 let (front, back) = match data.split_at_mut_checked(Self::slice_bytes(dim)) {
520 Some(v) => v,
521 None => {
522 return Err(NotCanonical::WrongLength(expected(), len));
523 }
524 };
525
526 if back.len() != std::mem::size_of::<T>() {
527 return Err(NotCanonical::WrongLength(expected(), len));
528 }
529
530 let bits = unsafe { MutBitSlice::new_unchecked(front, dim) };
533
534 let meta = unsafe { Mut::new(NonNull::new_unchecked(back.as_mut_ptr()).cast::<T>()) };
537 Ok(Self { bits, meta })
538 }
539}
540
541#[cfg(test)]
546mod tests {
547 use diskann_utils::{Reborrow, ReborrowMut};
548 use rand::{
549 Rng, SeedableRng,
550 distr::{Distribution, StandardUniform, Uniform},
551 rngs::StdRng,
552 };
553
554 use super::*;
555 use crate::bits::{BoxedBitSlice, Representation, Unsigned};
556
557 #[derive(Default, Debug, Clone, Copy, PartialEq, bytemuck::Zeroable, bytemuck::Pod)]
562 #[repr(C)]
563 struct Metadata {
564 a: u32,
565 b: u32,
566 }
567
568 impl Metadata {
569 fn new(a: u32, b: u32) -> Metadata {
570 Self { a, b }
571 }
572 }
573
574 #[test]
575 fn test_vector() {
576 let len = 20;
577 let mut base = Vector::<7, Unsigned, Metadata>::new_boxed(len);
578 assert_eq!(base.len(), len);
579 assert_eq!(base.meta(), Metadata::default());
580 assert!(!base.is_empty());
581 {
583 let mut rb = base.reborrow_mut();
584 assert_eq!(rb.len(), len);
585 rb.set_meta(Metadata::new(1, 2));
586 let mut v = rb.vector_mut();
587
588 assert_eq!(v.len(), len);
589 for i in 0..v.len() {
590 v.set(i, i as i64).unwrap();
591 }
592 }
593
594 let expected_metadata = Metadata::new(1, 2);
596 assert_eq!(base.meta(), expected_metadata);
597 assert_eq!(base.len(), len);
598 let v = base.vector();
599 for i in 0..v.len() {
600 assert_eq!(v.get(i).unwrap(), i as i64);
601 }
602
603 {
605 let rb = base.reborrow();
606 assert_eq!(rb.len(), len);
607 assert_eq!(rb.meta(), expected_metadata);
608 let v = rb.vector();
609 for i in 0..v.len() {
610 assert_eq!(v.get(i).unwrap(), i as i64);
611 }
612 }
613 }
614
615 #[test]
616 fn test_compensated_mut() {
617 let len = 30;
618 let mut v = BoxedBitSlice::<7, Unsigned>::new_boxed(len);
619 let mut m = Metadata::default();
620
621 let mut vector = VectorMut::new(v.reborrow_mut(), &mut m);
623 assert_eq!(vector.len(), len);
624 vector.set_meta(Metadata::new(200, 5));
625 for i in 0..vector.len() {
626 vector.vector_mut().set(i, i as i64).unwrap();
627 }
628
629 assert_eq!(m.a, 200);
631 assert_eq!(m.b, 5);
632 for i in 0..len {
633 assert_eq!(v.get(i).unwrap(), i as i64);
634 }
635 }
636
637 type TestVectorRef<'a, const NBITS: usize> = VectorRef<'a, NBITS, Unsigned, Metadata>;
642 type TestVectorMut<'a, const NBITS: usize> = VectorMut<'a, NBITS, Unsigned, Metadata>;
643
644 fn check_canonicalization<const NBITS: usize, R>(dim: usize, ntrials: usize, rng: &mut R)
645 where
646 Unsigned: Representation<NBITS>,
647 R: Rng,
648 {
649 let bytes = TestVectorRef::<NBITS>::canonical_bytes(dim);
650 assert_eq!(
651 bytes,
652 std::mem::size_of::<Metadata>() + BitSlice::<NBITS, Unsigned>::bytes_for(dim)
653 );
654
655 let mut buffer_front = vec![u8::default(); bytes + std::mem::size_of::<Metadata>() + 1];
656 let mut buffer_back = vec![u8::default(); bytes + std::mem::size_of::<Metadata>() + 1];
657
658 let mut expected = vec![i64::default(); dim];
660
661 let uniform = Uniform::try_from(Unsigned::domain_const::<NBITS>()).unwrap();
662
663 for _ in 0..ntrials {
664 let offset = Uniform::new(0, std::mem::size_of::<Metadata>())
665 .unwrap()
666 .sample(rng);
667 let a: u32 = StandardUniform.sample(rng);
668 let b: u32 = StandardUniform.sample(rng);
669
670 expected.iter_mut().for_each(|i| *i = uniform.sample(rng));
671 {
672 let set = |mut cv: TestVectorMut<NBITS>| {
673 cv.set_meta(Metadata::new(a, b));
674 let mut vector = cv.vector_mut();
675 for (i, e) in expected.iter().enumerate() {
676 vector.set(i, *e).unwrap();
677 }
678 };
679
680 let cv = TestVectorMut::<NBITS>::from_canonical_front_mut(
682 &mut buffer_front[offset..offset + bytes],
683 dim,
684 )
685 .unwrap();
686 set(cv);
687
688 let cv = TestVectorMut::<NBITS>::from_canonical_back_mut(
690 &mut buffer_back[offset..offset + bytes],
691 dim,
692 )
693 .unwrap();
694 set(cv);
695 }
696
697 {
699 let check = |cv: TestVectorRef<NBITS>| {
700 assert_eq!(cv.meta(), Metadata::new(a, b));
701 let vector = cv.vector();
702 for (i, e) in expected.iter().enumerate() {
703 assert_eq!(vector.get(i).unwrap(), *e);
704 }
705 };
706
707 let cv = TestVectorRef::<NBITS>::from_canonical_front(
708 &buffer_front[offset..offset + bytes],
709 dim,
710 )
711 .unwrap();
712 check(cv);
713
714 let cv = TestVectorRef::<NBITS>::from_canonical_back(
715 &buffer_back[offset..offset + bytes],
716 dim,
717 )
718 .unwrap();
719 check(cv);
720 }
721 }
722
723 {
725 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(
727 &mut buffer_front[..bytes - 1],
728 dim,
729 )
730 .unwrap_err();
731
732 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
733
734 let err =
735 TestVectorMut::<NBITS>::from_canonical_back_mut(&mut buffer_back[..bytes - 1], dim)
736 .unwrap_err();
737
738 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
739
740 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(&mut [], dim).unwrap_err();
742
743 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
744
745 let err = TestVectorMut::<NBITS>::from_canonical_back_mut(&mut [], dim).unwrap_err();
746
747 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
748
749 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(
751 &mut buffer_front[..bytes + 1],
752 dim,
753 )
754 .unwrap_err();
755
756 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
757
758 let err =
759 TestVectorMut::<NBITS>::from_canonical_back_mut(&mut buffer_back[..bytes + 1], dim)
760 .unwrap_err();
761
762 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
763 }
764
765 {
767 let err = TestVectorRef::<NBITS>::from_canonical_front(&buffer_front[..bytes - 1], dim)
769 .unwrap_err();
770 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
771
772 let err = TestVectorRef::<NBITS>::from_canonical_back(&buffer_back[..bytes - 1], dim)
773 .unwrap_err();
774 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
775
776 let err = TestVectorRef::<NBITS>::from_canonical_front(&[], dim).unwrap_err();
778 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
779
780 let err = TestVectorRef::<NBITS>::from_canonical_back(&[], dim).unwrap_err();
781 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
782
783 let err = TestVectorRef::<NBITS>::from_canonical_front(&buffer_front[..bytes + 1], dim)
785 .unwrap_err();
786 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
787
788 let err = TestVectorRef::<NBITS>::from_canonical_back(&buffer_back[..bytes + 1], dim)
789 .unwrap_err();
790 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
791 }
792 }
793
794 fn check_canonicalization_zst<const NBITS: usize, R>(dim: usize, ntrials: usize, rng: &mut R)
795 where
796 Unsigned: Representation<NBITS>,
797 R: Rng,
798 {
799 let bytes = VectorRef::<NBITS, Unsigned, ()>::canonical_bytes(dim);
800 assert_eq!(bytes, BitSlice::<NBITS, Unsigned>::bytes_for(dim));
801
802 let max_offset = 10;
803 let mut buffer_front = vec![u8::default(); bytes + max_offset];
804 let mut buffer_back = vec![u8::default(); bytes + max_offset];
805
806 let mut expected = vec![i64::default(); dim];
808
809 let uniform = Uniform::try_from(Unsigned::domain_const::<NBITS>()).unwrap();
810
811 for _ in 0..ntrials {
812 let offset = Uniform::new(0, max_offset).unwrap().sample(rng);
813 expected.iter_mut().for_each(|i| *i = uniform.sample(rng));
814 {
815 let set = |mut cv: VectorMut<NBITS, Unsigned, ()>| {
816 cv.set_meta(());
817 let mut vector = cv.vector_mut();
818 for (i, e) in expected.iter().enumerate() {
819 vector.set(i, *e).unwrap();
820 }
821 };
822
823 let cv = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
824 &mut buffer_front[offset..offset + bytes],
825 dim,
826 )
827 .unwrap();
828 set(cv);
829
830 let cv = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
831 &mut buffer_back[offset..offset + bytes],
832 dim,
833 )
834 .unwrap();
835 set(cv);
836 }
837
838 {
840 let check = |cv: VectorRef<NBITS, Unsigned, ()>| {
841 let vector = cv.vector();
842 for (i, e) in expected.iter().enumerate() {
843 assert_eq!(vector.get(i).unwrap(), *e);
844 }
845 };
846
847 let cv = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
848 &buffer_front[offset..offset + bytes],
849 dim,
850 )
851 .unwrap();
852 check(cv);
853
854 let cv = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
855 &buffer_back[offset..offset + bytes],
856 dim,
857 )
858 .unwrap();
859 check(cv);
860 }
861 }
862
863 {
865 if dim >= 1 {
867 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
868 &mut buffer_front[..bytes - 1],
869 dim,
870 )
871 .unwrap_err();
872 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
873
874 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
875 &mut buffer_back[..bytes - 1],
876 dim,
877 )
878 .unwrap_err();
879 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
880 }
881
882 if dim >= 1 {
884 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(&mut [], dim)
885 .unwrap_err();
886 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
887
888 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(&mut [], dim)
889 .unwrap_err();
890 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
891 }
892
893 {
895 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
896 &mut buffer_front[..bytes + 1],
897 dim,
898 )
899 .unwrap_err();
900
901 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
902
903 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
904 &mut buffer_back[..bytes + 1],
905 dim,
906 )
907 .unwrap_err();
908
909 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
910 }
911 }
912
913 {
915 if dim >= 1 {
917 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
918 &buffer_front[..bytes - 1],
919 dim,
920 )
921 .unwrap_err();
922
923 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
924
925 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
926 &buffer_back[..bytes - 1],
927 dim,
928 )
929 .unwrap_err();
930
931 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
932 }
933
934 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
936 &mut buffer_front[..bytes + 1],
937 dim,
938 )
939 .unwrap_err();
940
941 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
942
943 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
944 &mut buffer_back[..bytes + 1],
945 dim,
946 )
947 .unwrap_err();
948
949 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
950 }
951
952 {
954 if dim >= 1 {
956 let err =
957 VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(&[], dim).unwrap_err();
958
959 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
960
961 let err =
962 VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(&[], dim).unwrap_err();
963
964 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
965 }
966
967 {
969 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
970 &buffer_front[..bytes + 1],
971 dim,
972 )
973 .unwrap_err();
974
975 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
976
977 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
978 &buffer_back[..bytes + 1],
979 dim,
980 )
981 .unwrap_err();
982
983 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
984 }
985 }
986 }
987
988 cfg_if::cfg_if! {
989 if #[cfg(miri)] {
990 const MAX_DIM: usize = 37;
994 const TRIALS_PER_DIM: usize = 1;
995 } else {
996 const MAX_DIM: usize = 256;
997 const TRIALS_PER_DIM: usize = 20;
998 }
999 }
1000
1001 macro_rules! test_canonical {
1002 ($name:ident, $nbits:literal, $seed:literal) => {
1003 #[test]
1004 fn $name() {
1005 let mut rng = StdRng::seed_from_u64($seed);
1006 for dim in 0..MAX_DIM {
1007 check_canonicalization::<$nbits, _>(dim, TRIALS_PER_DIM, &mut rng);
1008 check_canonicalization_zst::<$nbits, _>(dim, TRIALS_PER_DIM, &mut rng);
1009 }
1010 }
1011 };
1012 }
1013
1014 test_canonical!(canonical_8bit, 8, 0xe64518a00ee99e2f);
1015 test_canonical!(canonical_7bit, 7, 0x3907123f8c38def2);
1016 test_canonical!(canonical_6bit, 6, 0xeccaeb83965ff6a1);
1017 test_canonical!(canonical_5bit, 5, 0x9691fe59e49bfb96);
1018 test_canonical!(canonical_4bit, 4, 0xc4d3e9bc699a7e6f);
1019 test_canonical!(canonical_3bit, 3, 0x8a01b2ccdca8fb2b);
1020 test_canonical!(canonical_2bit, 2, 0x3a07429e8184b67f);
1021 test_canonical!(canonical_1bit, 1, 0x93fddb26059c115c);
1022}