diskann_quantization/meta/slice.rs
1/*
2 * Copyright (c) Microsoft Corporation.
3 * Licensed under the MIT license.
4 */
5
6use std::{
7 ops::{Deref, DerefMut},
8 ptr::NonNull,
9};
10
11use diskann_utils::{Reborrow, ReborrowMut};
12use thiserror::Error;
13
14use crate::{
15 alloc::{AllocatorCore, AllocatorError, Poly},
16 num::PowerOfTwo,
17 ownership::{Mut, Owned, Ref},
18};
19
20/// A wrapper for a traditional Rust slice that provides the addition of arbitrary metadata.
21///
22/// # Examples
23///
24/// The `Slice` has several named variants that should be used instead of `Slice` directly:
25/// * [`PolySlice`]: An owning, independently allocated `Slice`.
26/// * [`SliceMut`]: A mutable, reference-like type.
27/// * [`SliceRef`]: A const, reference-like type.
28///
29/// ```
30/// use diskann_quantization::{
31/// alloc::GlobalAllocator,
32/// meta::slice,
33/// bits::Unsigned,
34/// };
35///
36/// use diskann_utils::{Reborrow, ReborrowMut};
37///
38/// #[derive(Debug, Default, Clone, Copy, PartialEq)]
39/// struct Metadata {
40/// value: f32,
41/// }
42///
43/// // Create a new heap-allocated Vector for 4-bit compressions capable of
44/// // holding 3 elements.
45/// //
46/// // In this case, the associated m
47/// let mut v = slice::PolySlice::new_in(3, GlobalAllocator).unwrap();
48///
49/// // We can inspect the underlying bitslice.
50/// let data = v.vector();
51/// assert_eq!(&data, &[0, 0, 0]);
52/// assert_eq!(*v.meta(), Metadata::default(), "expected default metadata value");
53///
54/// // If we want, we can mutably borrow the bitslice and mutate its components.
55/// let mut data = v.vector_mut();
56/// assert_eq!(data.len(), 3);
57/// data[0] = 1;
58/// data[1] = 2;
59/// data[2] = 3;
60///
61/// // Setting the underlying compensation will be visible in the original allocation.
62/// *v.meta_mut() = Metadata { value: 10.5 };
63///
64/// // Check that the changes are visible.
65/// assert_eq!(v.meta().value, 10.5);
66/// assert_eq!(&v.vector(), &[1, 2, 3]);
67/// ```
68///
69/// ## Constructing a `SliceMut` From Components
70///
71/// The following example shows how to assemble a `SliceMut` from raw parts.
72/// ```
73/// use diskann_quantization::meta::slice;
74///
75/// // For exposition purposes, we will use a slice of `u8` and `f32` as the metadata.
76/// let mut data = vec![0u8; 4];
77/// let mut metadata: f32 = 0.0;
78/// {
79/// let mut v = slice::SliceMut::new(data.as_mut_slice(), &mut metadata);
80///
81/// // Through `v`, we can set all the components in `slice` and the compensation.
82/// *v.meta_mut() = 123.4;
83/// let mut data = v.vector_mut();
84/// data[0] = 1;
85/// data[1] = 2;
86/// data[2] = 3;
87/// data[3] = 4;
88/// }
89///
90/// // Now we can check that the changes made internally are visible.
91/// assert_eq!(&data, &[1, 2, 3, 4]);
92/// assert_eq!(metadata, 123.4);
93/// ```
94///
95/// ## Canonical Layout
96///
97/// When the slice element type `T` and metadata type `M` are both
98/// [`bytemuck::Pod`](https://docs.rs/bytemuck/latest/bytemuck/trait.Pod.html), [`SliceRef`]
99/// and [`SliceMut`] support layout canonicalization, where a raw slice can be used as the
100/// backing store for such vectors, enabling inline storage.
101///
102/// The layout is specified by:
103///
104/// * A base alignment of the maximum alignments of `T` and `M`.
105/// * The first `M` bytes contain the metadata.
106/// * Padding if necessary to reach the alignment of `T`.
107/// * The values of type `T` stored contiguously.
108///
109/// The canonical layout needs the following properties:
110///
111/// * `T: bytemuck::Pod` and `M: bytemuck::Pod: For safely storing and retrieving.
112/// * The length for a vector with `N` dimensions must be equal to the value returned
113/// from [`SliceRef::canonical_bytes`].
114/// * The **alignment** of the base pointer must be equal to [`SliceRef::canonical_align()`].
115///
116/// The following functions can be used to construct slices from raw slices:
117///
118/// * [`SliceRef::from_canonical`]
119/// * [`SliceMut::from_canonical_mut`]
120///
121/// An example is shown below.
122/// ```rust
123/// use diskann_quantization::{
124/// alloc::{AlignedAllocator, Poly},
125/// meta::slice,
126/// num::PowerOfTwo,
127/// };
128///
129/// let dim = 3;
130///
131/// // Since we don't control the alignment of the returned pointer, we need to oversize it.
132/// let bytes = slice::SliceRef::<u16, f32>::canonical_bytes(dim);
133/// let align = slice::SliceRef::<u16, f32>::canonical_align();
134/// let mut data = Poly::broadcast(
135/// 0u8,
136/// bytes,
137/// AlignedAllocator::new(align)
138/// ).unwrap();
139///
140/// // Construct a mutable compensated vector over the slice.
141/// let mut v = slice::SliceMut::<u16, f32>::from_canonical_mut(&mut data, dim).unwrap();
142/// *v.meta_mut() = 1.0;
143/// v.vector_mut().copy_from_slice(&[1, 2, 3]);
144///
145/// // Reconstruct a constant CompensatedVector.
146/// let cv = slice::SliceRef::<u16, f32>::from_canonical(&data, dim).unwrap();
147/// assert_eq!(*cv.meta(), 1.0);
148/// assert_eq!(&cv.vector(), &[1, 2, 3]);
149/// ```
150#[derive(Debug, Clone, Copy)]
151pub struct Slice<T, M> {
152 slice: T,
153 meta: M,
154}
155
156// Use the maximum alignment of `T` and `M` to ensure that no runtime padding is needed.
157//
158// For example, if `T` had a stricter alignment than `M` and we required an alignment of
159// `M`, then the number of padding bytes necessary would depend on the runtime alignment
160// of `M`, which is pretty useless for a storage format.
161const fn canonical_align<T, M>() -> PowerOfTwo {
162 let m_align = PowerOfTwo::alignment_of::<M>();
163 let t_align = PowerOfTwo::alignment_of::<T>();
164
165 // Poor man's `const`-compatible `max`.
166 if m_align.raw() > t_align.raw() {
167 m_align
168 } else {
169 t_align
170 }
171}
172
173// The number of bytes required for the metadata prefix. This will consist of the bytes
174// required for `M` as well as any padding to obtain an alignment of `T`.
175//
176// If `M` is a zero-sized type, then the return value is zero. This works because the base
177// alignment is at least the alignment of `T`, so no padding is necessary.
178const fn canonical_metadata_bytes<T, M>() -> usize {
179 let m_size = std::mem::size_of::<M>();
180 if m_size == 0 {
181 0
182 } else {
183 m_size.next_multiple_of(std::mem::align_of::<T>())
184 }
185}
186
187// A simple computation consisting of the bytes for the metadata, followed by the bytes
188// needed for the slice itself.
189const fn canonical_bytes<T, M>(count: usize) -> usize {
190 canonical_metadata_bytes::<T, M>() + std::mem::size_of::<T>() * count
191}
192
193impl<T, M> Slice<T, M> {
194 /// Construct a new `Slice` over the components.
195 pub fn new<U>(slice: T, meta: U) -> Self
196 where
197 U: Into<M>,
198 {
199 Self {
200 slice,
201 meta: meta.into(),
202 }
203 }
204
205 /// Return the metadata value for this vector.
206 pub fn meta(&self) -> &M::Target
207 where
208 M: Deref,
209 {
210 &self.meta
211 }
212
213 /// Get a mutable reference to the metadata component.
214 pub fn meta_mut(&mut self) -> &mut M::Target
215 where
216 M: DerefMut,
217 {
218 &mut self.meta
219 }
220}
221
222impl<T, M, U, V> Slice<T, M>
223where
224 T: Deref<Target = [U]>,
225 M: Deref<Target = V>,
226{
227 /// Return the number of dimensions of in the slice
228 pub fn len(&self) -> usize {
229 self.slice.len()
230 }
231
232 /// Return whether or not the vector is empty.
233 pub fn is_empty(&self) -> bool {
234 self.slice.is_empty()
235 }
236
237 /// Borrow the data slice.
238 pub fn vector(&self) -> &[U] {
239 &self.slice
240 }
241
242 /// Borrow the integer compressed vector.
243 pub fn vector_mut(&mut self) -> &mut [U]
244 where
245 T: DerefMut,
246 {
247 &mut self.slice
248 }
249
250 /// Return the necessary alignment for the base pointer required for
251 /// [`SliceRef::from_canonical`] and [`SliceMut::from_canonical_mut`].
252 ///
253 /// The return value is guaranteed to be a power of two.
254 pub const fn canonical_align() -> PowerOfTwo {
255 canonical_align::<U, V>()
256 }
257
258 /// Return the number of bytes required to store `count` elements plus metadata in a
259 /// canonical layout.
260 ///
261 /// See: [`SliceRef::from_canonical`], [`SliceMut::from_canonical_mut`].
262 pub const fn canonical_bytes(count: usize) -> usize {
263 canonical_bytes::<U, V>(count)
264 }
265}
266
267impl<T, A, M> Slice<Poly<[T], A>, Owned<M>>
268where
269 A: AllocatorCore,
270 T: Default,
271 M: Default,
272{
273 /// Create a new owned `VectorBase` with its metadata default initialized.
274 pub fn new_in(len: usize, allocator: A) -> Result<Self, AllocatorError> {
275 Ok(Self {
276 slice: Poly::from_iter((0..len).map(|_| T::default()), allocator)?,
277 meta: Owned::default(),
278 })
279 }
280}
281
282/// A reference to a slice and associated metadata.
283pub type SliceRef<'a, T, M> = Slice<&'a [T], Ref<'a, M>>;
284
285/// A mutable reference to a slice and associated metadata.
286pub type SliceMut<'a, T, M> = Slice<&'a mut [T], Mut<'a, M>>;
287
288/// An owning slice and associated metadata.
289pub type PolySlice<T, M, A> = Slice<Poly<[T], A>, Owned<M>>;
290
291//////////////
292// Reborrow //
293//////////////
294
295impl<'a, T, A, M> Reborrow<'a> for Slice<Poly<[T], A>, Owned<M>>
296where
297 A: AllocatorCore,
298 M: 'static,
299{
300 type Target = SliceRef<'a, T, M>;
301 fn reborrow(&'a self) -> Self::Target {
302 Slice {
303 slice: &*self.slice,
304 meta: Ref::from(&self.meta.0),
305 }
306 }
307}
308
309/////////////////
310// ReborrowMut //
311////////////////
312
313impl<'a, T, A, M> ReborrowMut<'a> for Slice<Poly<[T], A>, Owned<M>>
314where
315 A: AllocatorCore,
316 M: 'static,
317{
318 type Target = SliceMut<'a, T, M>;
319 fn reborrow_mut(&'a mut self) -> Self::Target {
320 Slice {
321 slice: &mut *self.slice,
322 meta: Mut::from(&mut self.meta.0),
323 }
324 }
325}
326
327//////////////////////
328// Canonical Layout //
329//////////////////////
330
331#[derive(Debug, Error, PartialEq, Clone, Copy)]
332pub enum NotCanonical {
333 #[error("expected a slice length of {0} bytes but instead got {1} bytes")]
334 WrongLength(usize, usize),
335 #[error("expected a base pointer alignment of at least {0}")]
336 NotAligned(usize),
337}
338
339impl<'a, T, M> SliceRef<'a, T, M>
340where
341 T: bytemuck::Pod,
342 M: bytemuck::Pod,
343{
344 /// Construct an instance of `Self` viewing `data` as the canonical layout for a vector.
345 /// The canonical layout is as follows:
346 ///
347 /// * `std::mem::size_of::<T>().max(std::mem::size_of::<M>())` for the metadata.
348 /// * Necessary additional padding to achieve the alignment requirements for `T`.
349 /// * `std::mem::size_of::<T>() * dim` for the slice.
350 ///
351 /// Returns an error if:
352 ///
353 /// * `data` is not aligned to `Self::canonical_align()`.
354 /// * `data.len() != `Self::canonical_bytes(dim)`.
355 pub fn from_canonical(data: &'a [u8], dim: usize) -> Result<Self, NotCanonical> {
356 let expected_align = Self::canonical_align().raw();
357 let expected_len = Self::canonical_bytes(dim);
358
359 if !(data.as_ptr() as usize).is_multiple_of(expected_align) {
360 Err(NotCanonical::NotAligned(expected_align))
361 } else if data.len() != expected_len {
362 Err(NotCanonical::WrongLength(expected_len, data.len()))
363 } else {
364 // SAFETY: We have checked both the length and alignment of `data`.
365 Ok(unsafe { Self::from_canonical_unchecked(data, dim) })
366 }
367 }
368
369 /// Construct a `VectorRef` from the raw data.
370 ///
371 /// # Safety
372 ///
373 /// * `data.as_ptr()` must be aligned to `Self::canonical_align()`.
374 /// * `data.len()` must be equal to `Self::canonical_bytes(dim)`.
375 ///
376 /// This invariant is checked in debug builds and will panic if not satisfied.
377 pub unsafe fn from_canonical_unchecked(data: &'a [u8], dim: usize) -> Self {
378 debug_assert_eq!(data.len(), Self::canonical_bytes(dim));
379 let offset = canonical_metadata_bytes::<T, M>();
380
381 // SAFETY: The length pre-condition of this function implies that the offset region
382 // `[offset, offset + size_of::<T>() * dim]` is valid for reading.
383 //
384 // Additionally, the alignment requirment of the base pointer ensures that after
385 // applying `offset`, we still have proper alignment for `T`.
386 //
387 // The `bytemuck::Pod` bound ensures we don't have malformed types after the type cast.
388 let slice =
389 unsafe { std::slice::from_raw_parts(data.as_ptr().add(offset).cast::<T>(), dim) };
390
391 // SAFETY: The pointer is valid and non-null because `data` is a slice, its length
392 // must be at least `std::mem::size_of::<M>()` (from the length precondition for
393 // this function).
394 //
395 // The alignemnt pre-condition ensures that the pointer is suitable aligned.
396 //
397 // THe `bytemuck::Pod` bound ensures that the resulting type is valid.
398 let meta =
399 unsafe { Ref::new(NonNull::new_unchecked(data.as_ptr().cast_mut()).cast::<M>()) };
400 Self { slice, meta }
401 }
402}
403
404impl<'a, T, M> SliceMut<'a, T, M>
405where
406 T: bytemuck::Pod,
407 M: bytemuck::Pod,
408{
409 /// Construct an instance of `Self` viewing `data` as the canonical layout for a vector.
410 /// The canonical layout is as follows:
411 ///
412 /// * `std::mem::size_of::<T>().max(std::mem::size_of::<M>())` for the metadata.
413 /// * Necessary additional padding to achieve the alignment requirements for `T`.
414 /// * `std::mem::size_of::<T>() * dim` for the slice.
415 ///
416 /// Returns an error if:
417 ///
418 /// * `data` is not aligned to `Self::canonical_align()`.
419 /// * `data.len() != `Self::canonical_bytes(dim)`.
420 pub fn from_canonical_mut(data: &'a mut [u8], dim: usize) -> Result<Self, NotCanonical> {
421 let expected_align = Self::canonical_align().raw();
422 let expected_len = Self::canonical_bytes(dim);
423
424 if !(data.as_ptr() as usize).is_multiple_of(expected_align) {
425 return Err(NotCanonical::NotAligned(expected_align));
426 } else if data.len() != expected_len {
427 return Err(NotCanonical::WrongLength(expected_len, data.len()));
428 }
429
430 let offset = canonical_metadata_bytes::<T, M>();
431
432 // SAFETY: `offset < expected_len` and `data.len() == expected_len`, so `offset`
433 // is a valid interior offset for `data`.
434 let (meta, slice) = unsafe { data.split_at_mut_unchecked(offset) };
435
436 // SAFETY: `data.as_ptr()` when offset by `offset` will have an alignment suitable
437 // for type `T`.
438 //
439 // We have checked that `data.len() == expected_len`, which implies that the region
440 // of memory between `offset` and `data.len()` covers exactly `size_of::<T>() * dim`
441 // bytes.
442 //
443 // The `bytemuck::Pod` requirement on `T` ensures the resulting values are valid.
444 let slice = unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr().cast::<T>(), dim) };
445
446 // SAFETY: `data.as_ptr()` has an alignemnt of at least that required by `M`.
447 //
448 // Since `data` is a slice, its base pointer is `NonNull`.
449 //
450 // The `bytemuck::Pod` requirement ensures we have a valid instance.
451 let meta = unsafe { Mut::new(NonNull::new_unchecked(meta.as_mut_ptr()).cast::<M>()) };
452
453 Ok(Self { slice, meta })
454 }
455}
456
457///////////
458// Tests //
459///////////
460
461#[cfg(test)]
462mod tests {
463 use std::fmt::Debug;
464
465 use rand::{
466 SeedableRng,
467 distr::{Distribution, Uniform},
468 rngs::StdRng,
469 };
470
471 use super::*;
472 use crate::{
473 alloc::{AlignedAllocator, GlobalAllocator},
474 num::PowerOfTwo,
475 };
476
477 ////////////////////////
478 // Compensated Vector //
479 ////////////////////////
480
481 #[derive(Default, Debug, Clone, Copy, PartialEq, bytemuck::Zeroable, bytemuck::Pod)]
482 #[repr(C)]
483 struct Metadata {
484 a: u32,
485 b: u32,
486 }
487
488 impl Metadata {
489 fn new(a: u32, b: u32) -> Metadata {
490 Self { a, b }
491 }
492 }
493
494 #[test]
495 fn test_vector() {
496 let len = 20;
497 let mut base = PolySlice::<f32, Metadata, _>::new_in(len, GlobalAllocator).unwrap();
498
499 assert_eq!(base.len(), len);
500 assert_eq!(*base.meta(), Metadata::default());
501 assert!(!base.is_empty());
502
503 // Ensure that if we reborrow mutably that changes are visible.
504 {
505 *base.meta_mut() = Metadata::new(1, 2);
506 let v = base.vector_mut();
507
508 assert_eq!(v.len(), len);
509 v.iter_mut().enumerate().for_each(|(i, v)| *v = i as f32);
510 }
511
512 // Are the changes visible?
513 {
514 let expected_metadata = Metadata::new(1, 2);
515 assert_eq!(*base.meta(), expected_metadata);
516 assert_eq!(base.len(), len);
517 let v = base.vector();
518 v.iter().enumerate().for_each(|(i, v)| {
519 assert_eq!(*v, i as f32);
520 })
521 }
522 }
523
524 //////////////////////
525 // Canonicalization //
526 //////////////////////
527
528 // A test zero-sized type with non-strict alignment.
529 #[derive(Debug, Clone, Copy, PartialEq, bytemuck::Zeroable, bytemuck::Pod)]
530 #[repr(C)]
531 struct Zst;
532
533 #[expect(clippy::infallible_try_from)]
534 impl TryFrom<usize> for Zst {
535 type Error = std::convert::Infallible;
536 fn try_from(_: usize) -> Result<Self, Self::Error> {
537 Ok(Self)
538 }
539 }
540
541 // A test zero-sized type with a strict alignment.
542 #[derive(Debug, Clone, Copy, PartialEq, bytemuck::Zeroable, bytemuck::Pod)]
543 #[repr(C, align(16))]
544 struct ZstAligned;
545
546 #[expect(clippy::infallible_try_from)]
547 impl TryFrom<usize> for ZstAligned {
548 type Error = std::convert::Infallible;
549 fn try_from(_: usize) -> Result<Self, Self::Error> {
550 Ok(Self)
551 }
552 }
553
554 fn check_canonicalization<T, M>(
555 dim: usize,
556 align: usize,
557 slope: usize,
558 offset: usize,
559 ntrials: usize,
560 rng: &mut StdRng,
561 ) where
562 T: bytemuck::Pod + TryFrom<usize, Error: Debug> + Debug + PartialEq,
563 M: bytemuck::Pod + TryFrom<usize, Error: Debug> + Debug + PartialEq,
564 {
565 let bytes = SliceRef::<T, M>::canonical_bytes(dim);
566
567 assert_eq!(
568 bytes,
569 slope * dim + offset,
570 "computed bytes did not match the expected formula"
571 );
572
573 let expected_align = std::mem::align_of::<T>().max(std::mem::align_of::<M>());
574 assert_eq!(SliceRef::<T, M>::canonical_align().raw(), align);
575 assert_eq!(SliceRef::<T, M>::canonical_align().raw(), expected_align);
576
577 let mut buffer = Poly::broadcast(
578 0u8,
579 bytes + expected_align,
580 AlignedAllocator::new(PowerOfTwo::new(expected_align).unwrap()),
581 )
582 .unwrap();
583
584 // Expected metadata and vector encoding.
585 let mut expected = vec![usize::default(); dim];
586 let dist = Uniform::new(0, 255).unwrap();
587
588 for _ in 0..ntrials {
589 let m: usize = dist.sample(rng);
590 expected.iter_mut().for_each(|i| *i = dist.sample(rng));
591 {
592 let mut v =
593 SliceMut::<T, M>::from_canonical_mut(&mut buffer[..bytes], dim).unwrap();
594 *v.meta_mut() = m.try_into().unwrap();
595
596 assert_eq!(v.vector().len(), dim);
597 assert_eq!(v.vector_mut().len(), dim);
598 std::iter::zip(v.vector_mut().iter_mut(), expected.iter_mut()).for_each(
599 |(v, e)| {
600 *v = (*e).try_into().unwrap();
601 },
602 );
603 }
604
605 // Make sure the reconstruction is valid.
606 {
607 let v = SliceRef::<T, M>::from_canonical(&buffer[..bytes], dim).unwrap();
608 assert_eq!(*v.meta(), m.try_into().unwrap());
609
610 assert_eq!(v.vector().len(), dim);
611 std::iter::zip(v.vector().iter(), expected.iter()).for_each(|(v, e)| {
612 assert_eq!(*v, (*e).try_into().unwrap());
613 });
614 }
615 }
616
617 // Length Errors
618 {
619 for len in 0..bytes {
620 // Too short
621 let err =
622 SliceMut::<T, M>::from_canonical_mut(&mut buffer[..len], dim).unwrap_err();
623 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
624
625 // Too short
626 let err = SliceRef::<T, M>::from_canonical(&buffer[..len], dim).unwrap_err();
627 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
628 }
629
630 // Too long
631 let err =
632 SliceMut::<T, M>::from_canonical_mut(&mut buffer[..bytes + 1], dim).unwrap_err();
633
634 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
635
636 let err = SliceRef::<T, M>::from_canonical(&buffer[..bytes + 1], dim).unwrap_err();
637
638 assert!(matches!(err, NotCanonical::WrongLength(_, _)));
639 }
640
641 // Alignment
642 {
643 for offset in 1..expected_align {
644 let err =
645 SliceMut::<T, M>::from_canonical_mut(&mut buffer[offset..offset + bytes], dim)
646 .unwrap_err();
647 assert!(matches!(err, NotCanonical::NotAligned(_)));
648
649 let err = SliceRef::<T, M>::from_canonical(&buffer[offset..offset + bytes], dim)
650 .unwrap_err();
651 assert!(matches!(err, NotCanonical::NotAligned(_)));
652 }
653 }
654 }
655
656 cfg_if::cfg_if! {
657 if #[cfg(miri)] {
658 const MAX_DIM: usize = 10;
659 const TRIALS_PER_DIM: usize = 1;
660 } else {
661 const MAX_DIM: usize = 256;
662 const TRIALS_PER_DIM: usize = 20;
663 }
664 }
665
666 macro_rules! test_canonical {
667 ($name:ident, $M:ty, $T:ty, $align:literal, $slope:literal, $offset:literal, $seed:literal) => {
668 #[test]
669 fn $name() {
670 let mut rng = StdRng::seed_from_u64($seed);
671 for dim in 0..MAX_DIM {
672 check_canonicalization::<$T, $M>(
673 dim,
674 $align,
675 $slope,
676 $offset,
677 TRIALS_PER_DIM,
678 &mut rng,
679 );
680 }
681 }
682 };
683 }
684
685 test_canonical!(canonical_u8_u32, u8, u32, 4, 4, 4, 0x60884b7a4ca28f49);
686 test_canonical!(canonical_u32_u8, u32, u8, 4, 1, 4, 0x874aa5d8f40ec5ef);
687 test_canonical!(canonical_u32_u32, u32, u32, 4, 4, 4, 0x516c550e7be19acc);
688
689 test_canonical!(canonical_zst_u32, Zst, u32, 4, 4, 0, 0x908682ebda7c0fb9);
690 test_canonical!(canonical_u32_zst, u32, Zst, 4, 0, 4, 0xf223385881819c1c);
691
692 test_canonical!(
693 canonical_zstaligned_u32,
694 ZstAligned,
695 u32,
696 16,
697 4,
698 0,
699 0x1811ee0fd078a173
700 );
701 test_canonical!(
702 canonical_u32_zstaligned,
703 u32,
704 ZstAligned,
705 16,
706 0,
707 16,
708 0x6c9a67b09c0b6c0f
709 );
710}