1use std::ptr::NonNull;
7
8use diskann_utils::{Reborrow, ReborrowMut};
9use thiserror::Error;
10
11use crate::{
12 alloc::{AllocatorCore, AllocatorError, GlobalAllocator, Poly},
13 bits::{
14 AsMutPtr, AsPtr, BitSlice, BitSliceBase, Dense, MutBitSlice, MutSlicePtr,
15 PermutationStrategy, Representation, SlicePtr,
16 },
17 ownership::{CopyMut, CopyRef, Mut, Owned, Ref},
18};
19
20#[derive(Debug, Clone, Copy)]
178pub struct VectorBase<const NBITS: usize, Repr, Ptr, T, Perm = Dense>
179where
180 Ptr: AsPtr<Type = u8>,
181 Repr: Representation<NBITS>,
182 Perm: PermutationStrategy<NBITS>,
183{
184 bits: BitSliceBase<NBITS, Repr, Ptr, Perm>,
185 meta: T,
186}
187
188impl<const NBITS: usize, Repr, Ptr, T, Perm> VectorBase<NBITS, Repr, Ptr, T, Perm>
189where
190 Ptr: AsPtr<Type = u8>,
191 Repr: Representation<NBITS>,
192 Perm: PermutationStrategy<NBITS>,
193{
194 pub fn slice_bytes(count: usize) -> usize {
196 BitSliceBase::<NBITS, Repr, Ptr, Perm>::bytes_for(count)
197 }
198
199 pub fn canonical_bytes(count: usize) -> usize
204 where
205 T: CopyRef,
206 T::Target: bytemuck::Pod,
207 {
208 Self::slice_bytes(count) + std::mem::size_of::<T::Target>()
209 }
210
211 pub fn new<M>(bits: BitSliceBase<NBITS, Repr, Ptr, Perm>, meta: M) -> Self
213 where
214 M: Into<T>,
215 {
216 Self {
217 bits,
218 meta: meta.into(),
219 }
220 }
221
222 pub fn len(&self) -> usize {
224 self.bits.len()
225 }
226
227 pub fn is_empty(&self) -> bool {
229 self.bits.is_empty()
230 }
231
232 pub fn meta(&self) -> T::Target
234 where
235 T: CopyRef,
236 {
237 self.meta.copy_ref()
238 }
239
240 pub fn vector(&self) -> BitSlice<'_, NBITS, Repr, Perm> {
242 self.bits.reborrow()
243 }
244
245 pub fn vector_mut(&mut self) -> MutBitSlice<'_, NBITS, Repr, Perm>
247 where
248 Ptr: AsMutPtr,
249 {
250 self.bits.reborrow_mut()
251 }
252
253 pub fn set_meta(&mut self, value: T::Target)
259 where
260 Ptr: AsMutPtr,
261 T: CopyMut,
262 {
263 self.meta.copy_mut(value)
264 }
265}
266
267impl<const NBITS: usize, Repr, Perm, T>
268 VectorBase<NBITS, Repr, Poly<[u8], GlobalAllocator>, Owned<T>, Perm>
269where
270 Repr: Representation<NBITS>,
271 Perm: PermutationStrategy<NBITS>,
272 T: Default,
273{
274 pub fn new_boxed(len: usize) -> Self {
276 Self {
277 bits: BitSliceBase::new_boxed(len),
278 meta: Owned::default(),
279 }
280 }
281}
282
283impl<const NBITS: usize, Repr, Perm, T, A> VectorBase<NBITS, Repr, Poly<[u8], A>, Owned<T>, Perm>
284where
285 Repr: Representation<NBITS>,
286 Perm: PermutationStrategy<NBITS>,
287 T: Default,
288 A: AllocatorCore,
289{
290 pub fn new_in(len: usize, allocator: A) -> Result<Self, AllocatorError> {
292 Ok(Self {
293 bits: BitSliceBase::new_in(len, allocator)?,
294 meta: Owned::default(),
295 })
296 }
297}
298
299pub type VectorRef<'a, const NBITS: usize, Repr, T, Perm = Dense> =
303 VectorBase<NBITS, Repr, SlicePtr<'a, u8>, Ref<'a, T>, Perm>;
304
305pub type VectorMut<'a, const NBITS: usize, Repr, T, Perm = Dense> =
309 VectorBase<NBITS, Repr, MutSlicePtr<'a, u8>, Mut<'a, T>, Perm>;
310
311pub type Vector<const NBITS: usize, Repr, T, Perm = Dense> =
315 VectorBase<NBITS, Repr, Poly<[u8], GlobalAllocator>, Owned<T>, Perm>;
316
317pub type PolyVector<const NBITS: usize, Repr, T, Perm, A> =
321 VectorBase<NBITS, Repr, Poly<[u8], A>, Owned<T>, Perm>;
322
323impl<'this, const NBITS: usize, Repr, Ptr, T, Perm> Reborrow<'this>
325 for VectorBase<NBITS, Repr, Ptr, T, Perm>
326where
327 Ptr: AsPtr<Type = u8>,
328 Repr: Representation<NBITS>,
329 Perm: PermutationStrategy<NBITS>,
330 T: CopyRef + Reborrow<'this, Target = Ref<'this, <T as CopyRef>::Target>>,
331{
332 type Target = VectorRef<'this, NBITS, Repr, <T as CopyRef>::Target, Perm>;
333
334 fn reborrow(&'this self) -> Self::Target {
335 Self::Target {
336 bits: self.bits.reborrow(),
337 meta: self.meta.reborrow(),
338 }
339 }
340}
341
342impl<'this, const NBITS: usize, Repr, Ptr, T, Perm> ReborrowMut<'this>
344 for VectorBase<NBITS, Repr, Ptr, T, Perm>
345where
346 Ptr: AsMutPtr<Type = u8>,
347 Repr: Representation<NBITS>,
348 Perm: PermutationStrategy<NBITS>,
349 T: CopyMut + ReborrowMut<'this, Target = Mut<'this, <T as CopyRef>::Target>>,
350{
351 type Target = VectorMut<'this, NBITS, Repr, <T as CopyRef>::Target, Perm>;
352
353 fn reborrow_mut(&'this mut self) -> Self::Target {
354 Self::Target {
355 bits: self.bits.reborrow_mut(),
356 meta: self.meta.reborrow_mut(),
357 }
358 }
359}
360
361#[derive(Debug, Error, PartialEq, Clone, Copy)]
366pub enum NotCanonical {
367 #[error("expected a slice length of {0} bytes but instead got {1} bytes")]
368 WrongLength(usize, usize),
369}
370
371impl<'a, const NBITS: usize, Repr, T, Perm> VectorRef<'a, NBITS, Repr, T, Perm>
372where
373 Repr: Representation<NBITS>,
374 Perm: PermutationStrategy<NBITS>,
375 T: bytemuck::Pod,
376{
377 pub fn from_canonical_front(data: &'a [u8], dim: usize) -> Result<Self, NotCanonical> {
385 let expected = Self::canonical_bytes(dim);
386 if data.len() != expected {
387 Err(NotCanonical::WrongLength(expected, data.len()))
388 } else {
389 Ok(unsafe { Self::from_canonical_unchecked(data, dim) })
391 }
392 }
393
394 pub fn from_canonical_back(data: &'a [u8], dim: usize) -> Result<Self, NotCanonical> {
402 let expected = Self::canonical_bytes(dim);
403 if data.len() != expected {
404 Err(NotCanonical::WrongLength(expected, data.len()))
405 } else {
406 Ok(unsafe { Self::from_canonical_back_unchecked(data, dim) })
408 }
409 }
410
411 pub unsafe fn from_canonical_unchecked(data: &'a [u8], dim: usize) -> Self {
419 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
420
421 let bits =
426 unsafe { BitSlice::new_unchecked(data.get_unchecked(std::mem::size_of::<T>()..), dim) };
427
428 let meta =
432 unsafe { Ref::new(NonNull::new_unchecked(data.as_ptr().cast_mut()).cast::<T>()) };
433 Self { bits, meta }
434 }
435
436 pub unsafe fn from_canonical_back_unchecked(data: &'a [u8], dim: usize) -> Self {
444 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
445 let (data, meta) =
448 unsafe { data.split_at_unchecked(data.len() - std::mem::size_of::<T>()) };
449
450 let bits = unsafe { BitSlice::new_unchecked(data, dim) };
455
456 let meta =
460 unsafe { Ref::new(NonNull::new_unchecked(meta.as_ptr().cast_mut()).cast::<T>()) };
461 Self { bits, meta }
462 }
463}
464
465impl<'a, const NBITS: usize, Repr, T, Perm> VectorMut<'a, NBITS, Repr, T, Perm>
466where
467 Repr: Representation<NBITS>,
468 Perm: PermutationStrategy<NBITS>,
469 T: bytemuck::Pod,
470{
471 pub fn from_canonical_front_mut(data: &'a mut [u8], dim: usize) -> Result<Self, NotCanonical> {
479 let expected = Self::canonical_bytes(dim);
480 let bytes = data.len();
481 let (front, back) = match data.split_at_mut_checked(std::mem::size_of::<T>()) {
482 Some(v) => v,
483 None => {
484 return Err(NotCanonical::WrongLength(expected, bytes));
485 }
486 };
487
488 let bits =
489 MutBitSlice::new(back, dim).map_err(|_| NotCanonical::WrongLength(expected, bytes))?;
490
491 let meta = unsafe { Mut::new(NonNull::new_unchecked(front.as_mut_ptr()).cast::<T>()) };
497 Ok(Self { bits, meta })
498 }
499
500 pub fn from_canonical_back_mut(data: &'a mut [u8], dim: usize) -> Result<Self, NotCanonical> {
508 let len = data.len();
509 let expected = || Self::canonical_bytes(dim);
510 let (front, back) = match data.split_at_mut_checked(Self::slice_bytes(dim)) {
511 Some(v) => v,
512 None => {
513 return Err(NotCanonical::WrongLength(expected(), len));
514 }
515 };
516
517 if back.len() != std::mem::size_of::<T>() {
518 return Err(NotCanonical::WrongLength(expected(), len));
519 }
520
521 let bits = unsafe { MutBitSlice::new_unchecked(front, dim) };
524
525 let meta = unsafe { Mut::new(NonNull::new_unchecked(back.as_mut_ptr()).cast::<T>()) };
528 Ok(Self { bits, meta })
529 }
530}
531
532#[cfg(test)]
537mod tests {
538 use diskann_utils::{Reborrow, ReborrowMut};
539 use rand::{
540 distr::{Distribution, StandardUniform, Uniform},
541 rngs::StdRng,
542 Rng, SeedableRng,
543 };
544
545 use super::*;
546 use crate::bits::{BoxedBitSlice, Representation, Unsigned};
547
548 #[derive(Default, Debug, Clone, Copy, PartialEq, bytemuck::Zeroable, bytemuck::Pod)]
553 #[repr(C)]
554 struct Metadata {
555 a: u32,
556 b: u32,
557 }
558
559 impl Metadata {
560 fn new(a: u32, b: u32) -> Metadata {
561 Self { a, b }
562 }
563 }
564
565 #[test]
566 fn test_vector() {
567 let len = 20;
568 let mut base = Vector::<7, Unsigned, Metadata>::new_boxed(len);
569 assert_eq!(base.len(), len);
570 assert_eq!(base.meta(), Metadata::default());
571 assert!(!base.is_empty());
572 {
574 let mut rb = base.reborrow_mut();
575 assert_eq!(rb.len(), len);
576 rb.set_meta(Metadata::new(1, 2));
577 let mut v = rb.vector_mut();
578
579 assert_eq!(v.len(), len);
580 for i in 0..v.len() {
581 v.set(i, i as i64).unwrap();
582 }
583 }
584
585 let expected_metadata = Metadata::new(1, 2);
587 assert_eq!(base.meta(), expected_metadata);
588 assert_eq!(base.len(), len);
589 let v = base.vector();
590 for i in 0..v.len() {
591 assert_eq!(v.get(i).unwrap(), i as i64);
592 }
593
594 {
596 let rb = base.reborrow();
597 assert_eq!(rb.len(), len);
598 assert_eq!(rb.meta(), expected_metadata);
599 let v = rb.vector();
600 for i in 0..v.len() {
601 assert_eq!(v.get(i).unwrap(), i as i64);
602 }
603 }
604 }
605
606 #[test]
607 fn test_compensated_mut() {
608 let len = 30;
609 let mut v = BoxedBitSlice::<7, Unsigned>::new_boxed(len);
610 let mut m = Metadata::default();
611
612 let mut vector = VectorMut::new(v.reborrow_mut(), &mut m);
614 assert_eq!(vector.len(), len);
615 vector.set_meta(Metadata::new(200, 5));
616 for i in 0..vector.len() {
617 vector.vector_mut().set(i, i as i64).unwrap();
618 }
619
620 assert_eq!(m.a, 200);
622 assert_eq!(m.b, 5);
623 for i in 0..len {
624 assert_eq!(v.get(i).unwrap(), i as i64);
625 }
626 }
627
628 type TestVectorRef<'a, const NBITS: usize> = VectorRef<'a, NBITS, Unsigned, Metadata>;
633 type TestVectorMut<'a, const NBITS: usize> = VectorMut<'a, NBITS, Unsigned, Metadata>;
634
635 fn check_canonicalization<const NBITS: usize, R>(dim: usize, ntrials: usize, rng: &mut R)
636 where
637 Unsigned: Representation<NBITS>,
638 R: Rng,
639 {
640 let bytes = TestVectorRef::<NBITS>::canonical_bytes(dim);
641 assert_eq!(
642 bytes,
643 std::mem::size_of::<Metadata>() + BitSlice::<NBITS, Unsigned>::bytes_for(dim)
644 );
645
646 let mut buffer_front = vec![u8::default(); bytes + std::mem::size_of::<Metadata>() + 1];
647 let mut buffer_back = vec![u8::default(); bytes + std::mem::size_of::<Metadata>() + 1];
648
649 let mut expected = vec![i64::default(); dim];
651
652 let uniform = Uniform::try_from(Unsigned::domain_const::<NBITS>()).unwrap();
653
654 for _ in 0..ntrials {
655 let offset = Uniform::new(0, std::mem::size_of::<Metadata>())
656 .unwrap()
657 .sample(rng);
658 let a: u32 = StandardUniform.sample(rng);
659 let b: u32 = StandardUniform.sample(rng);
660
661 expected.iter_mut().for_each(|i| *i = uniform.sample(rng));
662 {
663 let set = |mut cv: TestVectorMut<NBITS>| {
664 cv.set_meta(Metadata::new(a, b));
665 let mut vector = cv.vector_mut();
666 for (i, e) in expected.iter().enumerate() {
667 vector.set(i, *e).unwrap();
668 }
669 };
670
671 let cv = TestVectorMut::<NBITS>::from_canonical_front_mut(
673 &mut buffer_front[offset..offset + bytes],
674 dim,
675 )
676 .unwrap();
677 set(cv);
678
679 let cv = TestVectorMut::<NBITS>::from_canonical_back_mut(
681 &mut buffer_back[offset..offset + bytes],
682 dim,
683 )
684 .unwrap();
685 set(cv);
686 }
687
688 {
690 let check = |cv: TestVectorRef<NBITS>| {
691 assert_eq!(cv.meta(), Metadata::new(a, b));
692 let vector = cv.vector();
693 for (i, e) in expected.iter().enumerate() {
694 assert_eq!(vector.get(i).unwrap(), *e);
695 }
696 };
697
698 let cv = TestVectorRef::<NBITS>::from_canonical_front(
699 &buffer_front[offset..offset + bytes],
700 dim,
701 )
702 .unwrap();
703 check(cv);
704
705 let cv = TestVectorRef::<NBITS>::from_canonical_back(
706 &buffer_back[offset..offset + bytes],
707 dim,
708 )
709 .unwrap();
710 check(cv);
711 }
712 }
713
714 {
716 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(
718 &mut buffer_front[..bytes - 1],
719 dim,
720 )
721 .unwrap_err();
722
723 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
724
725 let err =
726 TestVectorMut::<NBITS>::from_canonical_back_mut(&mut buffer_back[..bytes - 1], dim)
727 .unwrap_err();
728
729 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
730
731 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(&mut [], dim).unwrap_err();
733
734 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
735
736 let err = TestVectorMut::<NBITS>::from_canonical_back_mut(&mut [], dim).unwrap_err();
737
738 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
739
740 let err = TestVectorMut::<NBITS>::from_canonical_front_mut(
742 &mut buffer_front[..bytes + 1],
743 dim,
744 )
745 .unwrap_err();
746
747 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
748
749 let err =
750 TestVectorMut::<NBITS>::from_canonical_back_mut(&mut buffer_back[..bytes + 1], dim)
751 .unwrap_err();
752
753 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
754 }
755
756 {
758 let err = TestVectorRef::<NBITS>::from_canonical_front(&buffer_front[..bytes - 1], dim)
760 .unwrap_err();
761 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
762
763 let err = TestVectorRef::<NBITS>::from_canonical_back(&buffer_back[..bytes - 1], dim)
764 .unwrap_err();
765 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
766
767 let err = TestVectorRef::<NBITS>::from_canonical_front(&[], dim).unwrap_err();
769 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
770
771 let err = TestVectorRef::<NBITS>::from_canonical_back(&[], dim).unwrap_err();
772 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
773
774 let err = TestVectorRef::<NBITS>::from_canonical_front(&buffer_front[..bytes + 1], dim)
776 .unwrap_err();
777 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
778
779 let err = TestVectorRef::<NBITS>::from_canonical_back(&buffer_back[..bytes + 1], dim)
780 .unwrap_err();
781 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
782 }
783 }
784
785 fn check_canonicalization_zst<const NBITS: usize, R>(dim: usize, ntrials: usize, rng: &mut R)
786 where
787 Unsigned: Representation<NBITS>,
788 R: Rng,
789 {
790 let bytes = VectorRef::<NBITS, Unsigned, ()>::canonical_bytes(dim);
791 assert_eq!(bytes, BitSlice::<NBITS, Unsigned>::bytes_for(dim));
792
793 let max_offset = 10;
794 let mut buffer_front = vec![u8::default(); bytes + max_offset];
795 let mut buffer_back = vec![u8::default(); bytes + max_offset];
796
797 let mut expected = vec![i64::default(); dim];
799
800 let uniform = Uniform::try_from(Unsigned::domain_const::<NBITS>()).unwrap();
801
802 for _ in 0..ntrials {
803 let offset = Uniform::new(0, max_offset).unwrap().sample(rng);
804 expected.iter_mut().for_each(|i| *i = uniform.sample(rng));
805 {
806 let set = |mut cv: VectorMut<NBITS, Unsigned, ()>| {
807 cv.set_meta(());
808 let mut vector = cv.vector_mut();
809 for (i, e) in expected.iter().enumerate() {
810 vector.set(i, *e).unwrap();
811 }
812 };
813
814 let cv = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
815 &mut buffer_front[offset..offset + bytes],
816 dim,
817 )
818 .unwrap();
819 set(cv);
820
821 let cv = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
822 &mut buffer_back[offset..offset + bytes],
823 dim,
824 )
825 .unwrap();
826 set(cv);
827 }
828
829 {
831 let check = |cv: VectorRef<NBITS, Unsigned, ()>| {
832 let vector = cv.vector();
833 for (i, e) in expected.iter().enumerate() {
834 assert_eq!(vector.get(i).unwrap(), *e);
835 }
836 };
837
838 let cv = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
839 &buffer_front[offset..offset + bytes],
840 dim,
841 )
842 .unwrap();
843 check(cv);
844
845 let cv = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
846 &buffer_back[offset..offset + bytes],
847 dim,
848 )
849 .unwrap();
850 check(cv);
851 }
852 }
853
854 {
856 if dim >= 1 {
858 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
859 &mut buffer_front[..bytes - 1],
860 dim,
861 )
862 .unwrap_err();
863 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
864
865 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
866 &mut buffer_back[..bytes - 1],
867 dim,
868 )
869 .unwrap_err();
870 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
871 }
872
873 if dim >= 1 {
875 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(&mut [], dim)
876 .unwrap_err();
877 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
878
879 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(&mut [], dim)
880 .unwrap_err();
881 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
882 }
883
884 {
886 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
887 &mut buffer_front[..bytes + 1],
888 dim,
889 )
890 .unwrap_err();
891
892 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
893
894 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
895 &mut buffer_back[..bytes + 1],
896 dim,
897 )
898 .unwrap_err();
899
900 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
901 }
902 }
903
904 {
906 if dim >= 1 {
908 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
909 &buffer_front[..bytes - 1],
910 dim,
911 )
912 .unwrap_err();
913
914 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
915
916 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
917 &buffer_back[..bytes - 1],
918 dim,
919 )
920 .unwrap_err();
921
922 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
923 }
924
925 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_front_mut(
927 &mut buffer_front[..bytes + 1],
928 dim,
929 )
930 .unwrap_err();
931
932 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
933
934 let err = VectorMut::<NBITS, Unsigned, ()>::from_canonical_back_mut(
935 &mut buffer_back[..bytes + 1],
936 dim,
937 )
938 .unwrap_err();
939
940 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
941 }
942
943 {
945 if dim >= 1 {
947 let err =
948 VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(&[], dim).unwrap_err();
949
950 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
951
952 let err =
953 VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(&[], dim).unwrap_err();
954
955 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
956 }
957
958 {
960 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_front(
961 &buffer_front[..bytes + 1],
962 dim,
963 )
964 .unwrap_err();
965
966 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
967
968 let err = VectorRef::<NBITS, Unsigned, ()>::from_canonical_back(
969 &buffer_back[..bytes + 1],
970 dim,
971 )
972 .unwrap_err();
973
974 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
975 }
976 }
977 }
978
979 cfg_if::cfg_if! {
980 if #[cfg(miri)] {
981 const MAX_DIM: usize = 37;
985 const TRIALS_PER_DIM: usize = 1;
986 } else {
987 const MAX_DIM: usize = 256;
988 const TRIALS_PER_DIM: usize = 20;
989 }
990 }
991
992 macro_rules! test_canonical {
993 ($name:ident, $nbits:literal, $seed:literal) => {
994 #[test]
995 fn $name() {
996 let mut rng = StdRng::seed_from_u64($seed);
997 for dim in 0..MAX_DIM {
998 check_canonicalization::<$nbits, _>(dim, TRIALS_PER_DIM, &mut rng);
999 check_canonicalization_zst::<$nbits, _>(dim, TRIALS_PER_DIM, &mut rng);
1000 }
1001 }
1002 };
1003 }
1004
1005 test_canonical!(canonical_8bit, 8, 0xe64518a00ee99e2f);
1006 test_canonical!(canonical_7bit, 7, 0x3907123f8c38def2);
1007 test_canonical!(canonical_6bit, 6, 0xeccaeb83965ff6a1);
1008 test_canonical!(canonical_5bit, 5, 0x9691fe59e49bfb96);
1009 test_canonical!(canonical_4bit, 4, 0xc4d3e9bc699a7e6f);
1010 test_canonical!(canonical_3bit, 3, 0x8a01b2ccdca8fb2b);
1011 test_canonical!(canonical_2bit, 2, 0x3a07429e8184b67f);
1012 test_canonical!(canonical_1bit, 1, 0x93fddb26059c115c);
1013}