1use std::marker::PhantomData;
19
20use crate::alphabet::{Alphabet, AlphabetExt};
21use crate::Nanoid;
22
23#[derive(Debug, Clone, PartialEq, Eq, Hash, thiserror::Error)]
25pub enum PackError {
26 #[error("Invalid character '{char}' at position {position}")]
28 InvalidCharacter {
29 position: usize,
31 char: char,
33 },
34
35 #[error("Invalid character index {index} at position {position}")]
37 InvalidIndex {
38 position: usize,
40 index: usize,
42 },
43}
44
45pub trait AlphabetPackExt: Alphabet {
51 const PACK_BITS: usize;
53
54 const CHAR_TO_INDEX: [u8; 128];
57
58 #[inline]
61 fn char_to_index(ch: u8) -> Option<usize> {
62 if ch >= 128 {
63 return None;
64 }
65 let idx = Self::CHAR_TO_INDEX[ch as usize];
66 if idx == u8::MAX {
67 None
68 } else {
69 Some(idx as usize)
70 }
71 }
72}
73
74impl<A: Alphabet + AlphabetExt> AlphabetPackExt for A {
80 const PACK_BITS: usize = (<Self as Alphabet>::SYMBOL_LIST.len() - 1).ilog2() as usize + 1;
81
82 const CHAR_TO_INDEX: [u8; 128] = {
83 let mut map = [u8::MAX; 128];
84 let mut i = 0;
85 while i < <Self as Alphabet>::SYMBOL_LIST.len() {
86 map[<Self as Alphabet>::SYMBOL_LIST[i] as usize] = i as u8;
87 i += 1;
88 }
89 map
90 };
91}
92
93#[cfg_attr(feature = "zeroize", derive(zeroize::Zeroize))]
122pub struct PackedNanoid<const N: usize, const B: usize, A: AlphabetPackExt> {
123 inner: [u8; B],
124 _marker: PhantomData<fn() -> A>,
125}
126
127impl<const N: usize, const B: usize, A: AlphabetPackExt> PackedNanoid<N, B, A> {
128 pub fn pack(nanoid: &Nanoid<N, A>) -> Result<Self, PackError> {
144 let mut packed = [0u8; B];
145 Self::pack_impl(&nanoid.inner, &mut packed)?;
146 Ok(Self {
147 inner: packed,
148 _marker: PhantomData,
149 })
150 }
151
152 pub fn unpack(&self) -> Result<Nanoid<N, A>, PackError> {
171 let mut chars = [0u8; N];
172 Self::unpack_impl(&self.inner, &mut chars)?;
173
174 Ok(Nanoid {
176 inner: chars,
177 _marker: PhantomData,
178 })
179 }
180
181 #[must_use]
194 #[inline]
195 pub const fn as_bytes(&self) -> &[u8; B] {
196 &self.inner
197 }
198
199 #[must_use]
205 #[inline]
206 pub const unsafe fn from_bytes_unchecked(bytes: [u8; B]) -> Self {
207 Self {
208 inner: bytes,
209 _marker: PhantomData,
210 }
211 }
212
213 fn pack_impl(src: &[u8; N], dst: &mut [u8; B]) -> Result<(), PackError> {
214 let pack_bits = A::PACK_BITS;
215 let mut bit_buffer: u64 = 0;
216 let mut bits_in_buffer: usize = 0;
217 let mut dst_idx: usize = 0;
218
219 for (i, &ch) in src.iter().enumerate() {
220 let idx = A::char_to_index(ch).ok_or(PackError::InvalidCharacter {
221 position: i,
222 char: ch as char,
223 })?;
224
225 bit_buffer = (bit_buffer << pack_bits) | (idx as u64);
226 bits_in_buffer += pack_bits;
227
228 while bits_in_buffer >= 8 && dst_idx < B {
229 bits_in_buffer -= 8;
230 dst[dst_idx] = ((bit_buffer >> bits_in_buffer) & 0xFF) as u8;
231 dst_idx += 1;
232 }
233 }
234
235 if bits_in_buffer > 0 && dst_idx < B {
236 dst[dst_idx] = ((bit_buffer << (8 - bits_in_buffer)) & 0xFF) as u8;
237 }
238
239 Ok(())
240 }
241
242 fn unpack_impl(src: &[u8; B], dst: &mut [u8; N]) -> Result<(), PackError> {
243 let pack_bits = A::PACK_BITS;
244 let mask = (1u64 << pack_bits) - 1;
245 let mut bit_buffer: u64 = 0;
246 let mut bits_in_buffer: usize = 0;
247 let mut src_idx: usize = 0;
248
249 for (i, dst_byte) in dst.iter_mut().enumerate() {
250 while bits_in_buffer < pack_bits && src_idx < B {
251 bit_buffer = (bit_buffer << 8) | (src[src_idx] as u64);
252 bits_in_buffer += 8;
253 src_idx += 1;
254 }
255
256 bits_in_buffer -= pack_bits;
257 let idx = ((bit_buffer >> bits_in_buffer) & mask) as usize;
258
259 if idx >= A::VALID_SYMBOL_LIST.len() {
260 return Err(PackError::InvalidIndex {
261 position: i,
262 index: idx,
263 });
264 }
265
266 *dst_byte = A::VALID_SYMBOL_LIST[idx];
267 }
268
269 Ok(())
270 }
271}
272
273#[cfg(feature = "rkyv")]
274impl<const N: usize, const B: usize, A: Alphabet> rkyv::Archive for PackedNanoid<N, B, A> {
275 type Archived = [u8; B];
276 type Resolver = [(); B];
277
278 fn resolve(&self, _: Self::Resolver, out: rkyv::Place<Self::Archived>) {
279 out.write(self.inner);
280 }
281}
282
283#[cfg(feature = "rkyv")]
284impl<const N: usize, const B: usize, A: Alphabet, S> rkyv::Serialize<S> for PackedNanoid<N, B, A>
285where
286 S: rkyv::rancor::Fallible + ?Sized,
287{
288 fn serialize(&self, serializer: &mut S) -> Result<Self::Resolver, S::Error> {
289 self.inner.serialize(serializer)
290 }
291}
292
293#[cfg(feature = "rkyv")]
294impl<const N: usize, const B: usize, A: Alphabet, D: rkyv::rancor::Fallible + ?Sized>
295 rkyv::Deserialize<PackedNanoid<N, B, A>, D> for [u8; B]
296{
297 fn deserialize(&self, _: &mut D) -> Result<PackedNanoid<N, B, A>, D::Error> {
298 Ok(PackedNanoid {
299 inner: *self,
300 _marker: PhantomData,
301 })
302 }
303}
304
305impl<const N: usize, const B: usize, A: AlphabetPackExt> Default for PackedNanoid<N, B, A> {
306 fn default() -> Self {
307 Self {
308 inner: [0u8; B],
309 _marker: PhantomData,
310 }
311 }
312}
313
314impl<const N: usize, const B: usize, A: AlphabetPackExt> Copy for PackedNanoid<N, B, A> {}
315
316impl<const N: usize, const B: usize, A: AlphabetPackExt> Clone for PackedNanoid<N, B, A> {
317 fn clone(&self) -> Self {
318 *self
319 }
320}
321
322impl<const N: usize, const B: usize, A: AlphabetPackExt> PartialEq for PackedNanoid<N, B, A> {
323 fn eq(&self, other: &Self) -> bool {
324 self.inner == other.inner
325 }
326}
327
328impl<const N: usize, const B: usize, A: AlphabetPackExt> Eq for PackedNanoid<N, B, A> {}
329
330impl<const N: usize, const B: usize, A: AlphabetPackExt> std::hash::Hash for PackedNanoid<N, B, A> {
331 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
332 self.inner.hash(state);
333 }
334}
335
336impl<const N: usize, const B: usize, A: AlphabetPackExt> PartialOrd for PackedNanoid<N, B, A> {
337 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
338 Some(self.cmp(other))
339 }
340}
341
342impl<const N: usize, const B: usize, A: AlphabetPackExt> Ord for PackedNanoid<N, B, A> {
343 #[inline]
344 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
345 self.inner.cmp(&other.inner)
346 }
347}
348
349impl<const N: usize, const B: usize, A: AlphabetPackExt> std::fmt::Debug for PackedNanoid<N, B, A> {
350 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
351 f.debug_tuple("PackedNanoid").field(&self.inner).finish()
352 }
353}
354
355impl<const N: usize, const B: usize, A: AlphabetPackExt> AsRef<[u8; B]> for PackedNanoid<N, B, A> {
356 fn as_ref(&self) -> &[u8; B] {
357 &self.inner
358 }
359}
360
361#[macro_export]
378macro_rules! packed_nanoid_type {
379 ($n:expr, $alphabet:ty) => {
380 $crate::PackedNanoid<
381 $n,
382 {
383 ($n * <$alphabet as $crate::packed::AlphabetPackExt>::PACK_BITS).div_ceil(8)
384 },
385 $alphabet,
386 >
387 };
388}
389
390#[cfg(test)]
391mod tests {
392 use pretty_assertions::{assert_eq, assert_ne};
393
394 use super::*;
395 use crate::alphabet::{
396 Base16Alphabet, Base32Alphabet, Base36Alphabet, Base58Alphabet, Base62Alphabet,
397 Base64UrlAlphabet,
398 };
399
400 #[test]
401 fn test_pack_bits_values() {
402 assert_eq!(Base16Alphabet::PACK_BITS, 4);
403 assert_eq!(Base32Alphabet::PACK_BITS, 5);
404 assert_eq!(Base36Alphabet::PACK_BITS, 6);
405 assert_eq!(Base58Alphabet::PACK_BITS, 6);
406 assert_eq!(Base62Alphabet::PACK_BITS, 6);
407 assert_eq!(Base64UrlAlphabet::PACK_BITS, 6);
408 }
409
410 #[test]
411 fn test_roundtrip_base64url() {
412 for _ in 0..100 {
413 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
414 let packed: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
415 let unpacked = packed.unpack().unwrap();
416 assert_eq!(id, unpacked);
417 }
418
419 for _ in 0..100 {
420 let id: Nanoid<10, Base64UrlAlphabet> = Nanoid::new();
421 let packed: PackedNanoid<10, 8, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
422 let unpacked = packed.unpack().unwrap();
423 assert_eq!(id, unpacked);
424 }
425
426 for _ in 0..100 {
427 let id: Nanoid<8, Base64UrlAlphabet> = Nanoid::new();
428 let packed: PackedNanoid<8, 6, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
429 let unpacked = packed.unpack().unwrap();
430 assert_eq!(id, unpacked);
431 }
432
433 for _ in 0..100 {
434 let id: Nanoid<4, Base64UrlAlphabet> = Nanoid::new();
435 let packed: PackedNanoid<4, 3, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
436 let unpacked = packed.unpack().unwrap();
437 assert_eq!(id, unpacked);
438 }
439 }
440
441 #[test]
442 fn test_roundtrip_base32() {
443 for _ in 0..100 {
444 let id: Nanoid<21, Base32Alphabet> = Nanoid::new();
445 let packed: PackedNanoid<21, 14, Base32Alphabet> = PackedNanoid::pack(&id).unwrap();
446 let unpacked = packed.unpack().unwrap();
447 assert_eq!(id, unpacked);
448 }
449
450 for _ in 0..100 {
451 let id: Nanoid<10, Base32Alphabet> = Nanoid::new();
452 let packed: PackedNanoid<10, 7, Base32Alphabet> = PackedNanoid::pack(&id).unwrap();
453 let unpacked = packed.unpack().unwrap();
454 assert_eq!(id, unpacked);
455 }
456
457 for _ in 0..100 {
458 let id: Nanoid<8, Base32Alphabet> = Nanoid::new();
459 let packed: PackedNanoid<8, 5, Base32Alphabet> = PackedNanoid::pack(&id).unwrap();
460 let unpacked = packed.unpack().unwrap();
461 assert_eq!(id, unpacked);
462 }
463 }
464
465 #[test]
466 fn test_roundtrip_base16() {
467 for _ in 0..100 {
468 let id: Nanoid<21, Base16Alphabet> = Nanoid::new();
469 let packed: PackedNanoid<21, 11, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
470 let unpacked = packed.unpack().unwrap();
471 assert_eq!(id, unpacked);
472 }
473
474 for _ in 0..100 {
475 let id: Nanoid<10, Base16Alphabet> = Nanoid::new();
476 let packed: PackedNanoid<10, 5, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
477 let unpacked = packed.unpack().unwrap();
478 assert_eq!(id, unpacked);
479 }
480
481 for _ in 0..100 {
482 let id: Nanoid<8, Base16Alphabet> = Nanoid::new();
483 let packed: PackedNanoid<8, 4, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
484 let unpacked = packed.unpack().unwrap();
485 assert_eq!(id, unpacked);
486 }
487 }
488
489 #[test]
490 fn test_packed_size_reduction() {
491 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
492 let packed: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
493 assert_eq!(packed.as_bytes().len(), 16);
494
495 let id: Nanoid<21, Base32Alphabet> = Nanoid::new();
496 let packed: PackedNanoid<21, 14, Base32Alphabet> = PackedNanoid::pack(&id).unwrap();
497 assert_eq!(packed.as_bytes().len(), 14);
498
499 let id: Nanoid<21, Base16Alphabet> = Nanoid::new();
500 let packed: PackedNanoid<21, 11, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
501 assert_eq!(packed.as_bytes().len(), 11);
502 }
503
504 #[test]
505 fn test_eq() {
506 let id: Nanoid<21, Base64UrlAlphabet> = "ABCDEFGHIJKLMNOPQ123_".parse().unwrap();
507 let packed1: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
508 let packed2: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
509 assert_eq!(packed1, packed2);
510 }
511
512 #[test]
513 fn test_ne() {
514 let id1: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
515 let id2: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
516 let packed1: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id1).unwrap();
517 let packed2: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id2).unwrap();
518 assert_ne!(packed1, packed2);
519 }
520
521 #[test]
522 fn test_packed_nanoid_type_macro() {
523 type Packed64 = packed_nanoid_type!(21, Base64UrlAlphabet);
524 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
525 let packed: Packed64 = PackedNanoid::pack(&id).unwrap();
526 let unpacked: Nanoid<21, Base64UrlAlphabet> = packed.unpack().unwrap();
527 assert_eq!(id, unpacked);
528 }
529
530 #[test]
531 fn test_known_values_base16() {
532 let id: Nanoid<4, Base16Alphabet> = "0123".parse().unwrap();
539 let packed: PackedNanoid<4, 2, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
540 assert_eq!(packed.as_bytes(), &[0x67, 0x89]);
541
542 let unpacked = packed.unpack().unwrap();
543 assert_eq!(unpacked.as_str(), "0123");
544 }
545
546 #[test]
547 fn test_char_to_index() {
548 assert_eq!(Base64UrlAlphabet::char_to_index(b'A'), Some(0));
550 assert_eq!(Base64UrlAlphabet::char_to_index(b'Z'), Some(25));
551 assert_eq!(Base64UrlAlphabet::char_to_index(b'a'), Some(26));
552 assert_eq!(Base64UrlAlphabet::char_to_index(b'z'), Some(51));
553 assert_eq!(Base64UrlAlphabet::char_to_index(b'0'), Some(52));
554 assert_eq!(Base64UrlAlphabet::char_to_index(b'9'), Some(61));
555 assert_eq!(Base64UrlAlphabet::char_to_index(b'_'), Some(62));
556 assert_eq!(Base64UrlAlphabet::char_to_index(b'-'), Some(63));
557 assert_eq!(Base64UrlAlphabet::char_to_index(b'@'), None);
558 assert_eq!(Base64UrlAlphabet::char_to_index(b' '), None);
559 assert_eq!(Base64UrlAlphabet::char_to_index(0x80), None); assert_eq!(Base16Alphabet::char_to_index(b'A'), Some(0));
563 assert_eq!(Base16Alphabet::char_to_index(b'F'), Some(5));
564 assert_eq!(Base16Alphabet::char_to_index(b'0'), Some(6));
565 assert_eq!(Base16Alphabet::char_to_index(b'9'), Some(15));
566 assert_eq!(Base16Alphabet::char_to_index(b'G'), None);
567 assert_eq!(Base16Alphabet::char_to_index(b'a'), None);
568
569 assert_eq!(Base32Alphabet::char_to_index(b'A'), Some(0));
571 assert_eq!(Base32Alphabet::char_to_index(b'Z'), Some(25));
572 assert_eq!(Base32Alphabet::char_to_index(b'2'), Some(26));
573 assert_eq!(Base32Alphabet::char_to_index(b'7'), Some(31));
574 assert_eq!(Base32Alphabet::char_to_index(b'1'), None);
575 assert_eq!(Base32Alphabet::char_to_index(b'8'), None);
576 }
577
578 #[cfg(feature = "rkyv")]
579 #[cfg(test)]
580 mod rkyv_tests {
581 use rkyv::rancor::Error;
582
583 use crate::alphabet::{Base16Alphabet, Base32Alphabet, Base64UrlAlphabet};
584 use crate::packed::PackedNanoid;
585 use crate::Nanoid;
586
587 #[test]
588 fn test_rkyv_archive_bytes_match_packed() {
589 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
590 let packed: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
591
592 let archived = rkyv::to_bytes::<Error>(&packed).unwrap();
593 let archived_ref: &[u8; 16] = unsafe { &*archived.as_ptr().cast() };
594
595 assert_eq!(archived_ref, packed.as_bytes());
596 }
597
598 #[test]
599 fn test_rkyv_roundtrip_base64url() {
600 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
601 let packed: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
602
603 let bytes = rkyv::to_bytes::<Error>(&packed).unwrap();
604 let deserialized: PackedNanoid<21, 16, Base64UrlAlphabet> =
605 rkyv::from_bytes::<PackedNanoid<21, 16, Base64UrlAlphabet>, Error>(&bytes).unwrap();
606
607 assert_eq!(packed, deserialized);
608 assert_eq!(id, deserialized.unpack().unwrap());
609 }
610
611 #[test]
612 fn test_rkyv_roundtrip_base32() {
613 let id: Nanoid<21, Base32Alphabet> = Nanoid::new();
614 let packed: PackedNanoid<21, 14, Base32Alphabet> = PackedNanoid::pack(&id).unwrap();
615
616 let bytes = rkyv::to_bytes::<Error>(&packed).unwrap();
617 let deserialized: PackedNanoid<21, 14, Base32Alphabet> =
618 rkyv::from_bytes::<PackedNanoid<21, 14, Base32Alphabet>, Error>(&bytes).unwrap();
619
620 assert_eq!(packed, deserialized);
621 assert_eq!(id, deserialized.unpack().unwrap());
622 }
623
624 #[test]
625 fn test_rkyv_roundtrip_base16() {
626 let id: Nanoid<21, Base16Alphabet> = Nanoid::new();
627 let packed: PackedNanoid<21, 11, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
628
629 let bytes = rkyv::to_bytes::<Error>(&packed).unwrap();
630 let deserialized: PackedNanoid<21, 11, Base16Alphabet> =
631 rkyv::from_bytes::<PackedNanoid<21, 11, Base16Alphabet>, Error>(&bytes).unwrap();
632
633 assert_eq!(packed, deserialized);
634 assert_eq!(id, deserialized.unpack().unwrap());
635 }
636
637 #[test]
638 fn test_rkyv_roundtrip_different_sizes() {
639 for _ in 0..10 {
640 let id: Nanoid<10, Base64UrlAlphabet> = Nanoid::new();
641 let packed: PackedNanoid<10, 8, Base64UrlAlphabet> =
642 PackedNanoid::pack(&id).unwrap();
643
644 let bytes = rkyv::to_bytes::<Error>(&packed).unwrap();
645 let deserialized: PackedNanoid<10, 8, Base64UrlAlphabet> =
646 rkyv::from_bytes::<PackedNanoid<10, 8, Base64UrlAlphabet>, Error>(&bytes)
647 .unwrap();
648
649 assert_eq!(packed, deserialized);
650 assert_eq!(id, deserialized.unpack().unwrap());
651 }
652 }
653
654 #[test]
655 fn test_rkyv_archive_size_equals_packed_size() {
656 let id: Nanoid<21, Base64UrlAlphabet> = Nanoid::new();
657 let packed: PackedNanoid<21, 16, Base64UrlAlphabet> = PackedNanoid::pack(&id).unwrap();
658
659 let bytes = rkyv::to_bytes::<Error>(&packed).unwrap();
660 assert_eq!(bytes.len(), 16);
661 }
662
663 #[test]
664 fn test_rkyv_known_value() {
665 let id: Nanoid<4, Base16Alphabet> = "0123".parse().unwrap();
666 let packed: PackedNanoid<4, 2, Base16Alphabet> = PackedNanoid::pack(&id).unwrap();
667
668 assert_eq!(packed.as_bytes(), &[0x67, 0x89]);
669
670 let archived = rkyv::to_bytes::<Error>(&packed).unwrap();
671 assert_eq!(archived.as_slice(), &[0x67, 0x89]);
672
673 let deserialized: PackedNanoid<4, 2, Base16Alphabet> =
674 rkyv::from_bytes::<PackedNanoid<4, 2, Base16Alphabet>, Error>(&archived).unwrap();
675 assert_eq!(packed, deserialized);
676 assert_eq!(id, deserialized.unpack().unwrap());
677 }
678 }
679}