1use crate::util::bit_util::ceil;
21use std::fmt::Debug;
22
23#[derive(Debug)]
31pub struct UnalignedBitChunk<'a> {
32 lead_padding: usize,
33 trailing_padding: usize,
34
35 prefix: Option<u64>,
36 chunks: &'a [u64],
37 suffix: Option<u64>,
38}
39
40impl<'a> UnalignedBitChunk<'a> {
41 pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
43 if len == 0 {
44 return Self {
45 lead_padding: 0,
46 trailing_padding: 0,
47 prefix: None,
48 chunks: &[],
49 suffix: None,
50 };
51 }
52
53 let byte_offset = offset / 8;
54 let offset_padding = offset % 8;
55
56 let bytes_len = (len + offset_padding).div_ceil(8);
57 let buffer = &buffer[byte_offset..byte_offset + bytes_len];
58
59 let prefix_mask = compute_prefix_mask(offset_padding);
60
61 if buffer.len() <= 8 {
63 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, offset_padding);
64 let prefix = read_u64(buffer) & suffix_mask & prefix_mask;
65
66 return Self {
67 lead_padding: offset_padding,
68 trailing_padding,
69 prefix: Some(prefix),
70 chunks: &[],
71 suffix: None,
72 };
73 }
74
75 if buffer.len() <= 16 {
77 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, offset_padding);
78 let prefix = read_u64(&buffer[..8]) & prefix_mask;
79 let suffix = read_u64(&buffer[8..]) & suffix_mask;
80
81 return Self {
82 lead_padding: offset_padding,
83 trailing_padding,
84 prefix: Some(prefix),
85 chunks: &[],
86 suffix: Some(suffix),
87 };
88 }
89
90 let (prefix, mut chunks, suffix) = unsafe { buffer.align_to::<u64>() };
92 assert!(
93 prefix.len() < 8 && suffix.len() < 8,
94 "align_to did not return largest possible aligned slice"
95 );
96
97 let (alignment_padding, prefix) = match (offset_padding, prefix.is_empty()) {
98 (0, true) => (0, None),
99 (_, true) => {
100 let prefix = chunks[0] & prefix_mask;
101 chunks = &chunks[1..];
102 (0, Some(prefix))
103 }
104 (_, false) => {
105 let alignment_padding = (8 - prefix.len()) * 8;
106
107 let prefix = (read_u64(prefix) & prefix_mask) << alignment_padding;
108 (alignment_padding, Some(prefix))
109 }
110 };
111
112 let lead_padding = offset_padding + alignment_padding;
113 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, lead_padding);
114
115 let suffix = match (trailing_padding, suffix.is_empty()) {
116 (0, _) => None,
117 (_, true) => {
118 let suffix = chunks[chunks.len() - 1] & suffix_mask;
119 chunks = &chunks[..chunks.len() - 1];
120 Some(suffix)
121 }
122 (_, false) => Some(read_u64(suffix) & suffix_mask),
123 };
124
125 Self {
126 lead_padding,
127 trailing_padding,
128 prefix,
129 chunks,
130 suffix,
131 }
132 }
133
134 pub fn lead_padding(&self) -> usize {
136 self.lead_padding
137 }
138
139 pub fn trailing_padding(&self) -> usize {
141 self.trailing_padding
142 }
143
144 pub fn prefix(&self) -> Option<u64> {
146 self.prefix
147 }
148
149 pub fn suffix(&self) -> Option<u64> {
151 self.suffix
152 }
153
154 pub fn chunks(&self) -> &'a [u64] {
156 self.chunks
157 }
158
159 pub fn iter(&self) -> UnalignedBitChunkIterator<'a> {
161 self.prefix
162 .into_iter()
163 .chain(self.chunks.iter().cloned())
164 .chain(self.suffix)
165 }
166
167 pub fn count_ones(&self) -> usize {
169 self.iter().map(|x| x.count_ones() as usize).sum()
170 }
171}
172
173pub type UnalignedBitChunkIterator<'a> = std::iter::Chain<
175 std::iter::Chain<std::option::IntoIter<u64>, std::iter::Cloned<std::slice::Iter<'a, u64>>>,
176 std::option::IntoIter<u64>,
177>;
178
179#[inline]
180fn read_u64(input: &[u8]) -> u64 {
181 let len = input.len().min(8);
182 let mut buf = [0_u8; 8];
183 buf[..len].copy_from_slice(input);
184 u64::from_le_bytes(buf)
185}
186
187#[inline]
188fn compute_prefix_mask(lead_padding: usize) -> u64 {
189 !((1 << lead_padding) - 1)
190}
191
192#[inline]
193fn compute_suffix_mask(len: usize, lead_padding: usize) -> (u64, usize) {
194 let trailing_bits = (len + lead_padding) % 64;
195
196 if trailing_bits == 0 {
197 return (u64::MAX, 0);
198 }
199
200 let trailing_padding = 64 - trailing_bits;
201 let suffix_mask = (1 << trailing_bits) - 1;
202 (suffix_mask, trailing_padding)
203}
204
205#[derive(Debug)]
211pub struct BitChunks<'a> {
212 buffer: &'a [u8],
213 bit_offset: usize,
215 chunk_len: usize,
217 remainder_len: usize,
219}
220
221impl<'a> BitChunks<'a> {
222 pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
224 assert!(
225 ceil(offset + len, 8) <= buffer.len(),
226 "offset + len out of bounds"
227 );
228
229 let byte_offset = offset / 8;
230 let bit_offset = offset % 8;
231
232 let chunk_len = len / 64;
234 let remainder_len = len % 64;
236
237 BitChunks::<'a> {
238 buffer: &buffer[byte_offset..],
239 bit_offset,
240 chunk_len,
241 remainder_len,
242 }
243 }
244}
245
246#[derive(Debug)]
248pub struct BitChunkIterator<'a> {
249 buffer: &'a [u8],
250 bit_offset: usize,
251 chunk_len: usize,
252 index: usize,
253}
254
255impl<'a> BitChunks<'a> {
256 #[inline]
258 pub const fn remainder_len(&self) -> usize {
259 self.remainder_len
260 }
261
262 #[inline]
264 pub const fn chunk_len(&self) -> usize {
265 self.chunk_len
266 }
267
268 #[inline]
270 pub fn remainder_bits(&self) -> u64 {
271 let bit_len = self.remainder_len;
272 if bit_len == 0 {
273 0
274 } else {
275 let bit_offset = self.bit_offset;
276 let byte_len = ceil(bit_len + bit_offset, 8);
279 let base = unsafe {
281 self.buffer
282 .as_ptr()
283 .add(self.chunk_len * std::mem::size_of::<u64>())
284 };
285
286 let mut bits = unsafe { std::ptr::read(base) } as u64 >> bit_offset;
287 for i in 1..byte_len {
288 let byte = unsafe { std::ptr::read(base.add(i)) };
289 bits |= (byte as u64) << (i * 8 - bit_offset);
290 }
291
292 bits & ((1 << bit_len) - 1)
293 }
294 }
295
296 #[inline]
298 pub const fn iter(&self) -> BitChunkIterator<'a> {
299 BitChunkIterator::<'a> {
300 buffer: self.buffer,
301 bit_offset: self.bit_offset,
302 chunk_len: self.chunk_len,
303 index: 0,
304 }
305 }
306
307 #[inline]
309 pub fn iter_padded(&self) -> impl Iterator<Item = u64> + 'a {
310 self.iter().chain(std::iter::once(self.remainder_bits()))
311 }
312}
313
314impl<'a> IntoIterator for BitChunks<'a> {
315 type Item = u64;
316 type IntoIter = BitChunkIterator<'a>;
317
318 fn into_iter(self) -> Self::IntoIter {
319 self.iter()
320 }
321}
322
323impl Iterator for BitChunkIterator<'_> {
324 type Item = u64;
325
326 #[inline]
327 fn next(&mut self) -> Option<u64> {
328 let index = self.index;
329 if index >= self.chunk_len {
330 return None;
331 }
332
333 #[allow(clippy::cast_ptr_alignment)]
335 let raw_data = self.buffer.as_ptr() as *const u64;
336
337 let current = unsafe { std::ptr::read_unaligned(raw_data.add(index)).to_le() };
340
341 let bit_offset = self.bit_offset;
342
343 let combined = if bit_offset == 0 {
344 current
345 } else {
346 let next =
349 unsafe { std::ptr::read_unaligned(raw_data.add(index + 1) as *const u8) as u64 };
350
351 (current >> bit_offset) | (next << (64 - bit_offset))
352 };
353
354 self.index = index + 1;
355
356 Some(combined)
357 }
358
359 #[inline]
360 fn size_hint(&self) -> (usize, Option<usize>) {
361 (
362 self.chunk_len - self.index,
363 Some(self.chunk_len - self.index),
364 )
365 }
366}
367
368impl ExactSizeIterator for BitChunkIterator<'_> {
369 #[inline]
370 fn len(&self) -> usize {
371 self.chunk_len - self.index
372 }
373}
374
375#[cfg(test)]
376mod tests {
377 use rand::distr::uniform::UniformSampler;
378 use rand::distr::uniform::UniformUsize;
379 use rand::prelude::*;
380 use rand::rng;
381
382 use crate::buffer::Buffer;
383 use crate::util::bit_chunk_iterator::UnalignedBitChunk;
384
385 #[test]
386 fn test_iter_aligned() {
387 let input: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7];
388 let buffer: Buffer = Buffer::from(input);
389
390 let bitchunks = buffer.bit_chunks(0, 64);
391 let result = bitchunks.into_iter().collect::<Vec<_>>();
392
393 assert_eq!(vec![0x0706050403020100], result);
394 }
395
396 #[test]
397 fn test_iter_unaligned() {
398 let input: &[u8] = &[
399 0b00000000, 0b00000001, 0b00000010, 0b00000100, 0b00001000, 0b00010000, 0b00100000,
400 0b01000000, 0b11111111,
401 ];
402 let buffer: Buffer = Buffer::from(input);
403
404 let bitchunks = buffer.bit_chunks(4, 64);
405
406 assert_eq!(0, bitchunks.remainder_len());
407 assert_eq!(0, bitchunks.remainder_bits());
408
409 let result = bitchunks.into_iter().collect::<Vec<_>>();
410
411 assert_eq!(
412 vec![0b1111010000000010000000010000000010000000010000000010000000010000],
413 result
414 );
415 }
416
417 #[test]
418 fn test_iter_unaligned_remainder_1_byte() {
419 let input: &[u8] = &[
420 0b00000000, 0b00000001, 0b00000010, 0b00000100, 0b00001000, 0b00010000, 0b00100000,
421 0b01000000, 0b11111111,
422 ];
423 let buffer: Buffer = Buffer::from(input);
424
425 let bitchunks = buffer.bit_chunks(4, 66);
426
427 assert_eq!(2, bitchunks.remainder_len());
428 assert_eq!(0b00000011, bitchunks.remainder_bits());
429
430 let result = bitchunks.into_iter().collect::<Vec<_>>();
431
432 assert_eq!(
433 vec![0b1111010000000010000000010000000010000000010000000010000000010000],
434 result
435 );
436 }
437
438 #[test]
439 fn test_iter_unaligned_remainder_bits_across_bytes() {
440 let input: &[u8] = &[0b00111111, 0b11111100];
441 let buffer: Buffer = Buffer::from(input);
442
443 let bitchunks = buffer.bit_chunks(6, 7);
446
447 assert_eq!(7, bitchunks.remainder_len());
448 assert_eq!(0b1110000, bitchunks.remainder_bits());
449 }
450
451 #[test]
452 fn test_iter_unaligned_remainder_bits_large() {
453 let input: &[u8] = &[
454 0b11111111, 0b00000000, 0b11111111, 0b00000000, 0b11111111, 0b00000000, 0b11111111,
455 0b00000000, 0b11111111,
456 ];
457 let buffer: Buffer = Buffer::from(input);
458
459 let bitchunks = buffer.bit_chunks(2, 63);
460
461 assert_eq!(63, bitchunks.remainder_len());
462 assert_eq!(
463 0b100_0000_0011_1111_1100_0000_0011_1111_1100_0000_0011_1111_1100_0000_0011_1111,
464 bitchunks.remainder_bits()
465 );
466 }
467
468 #[test]
469 fn test_iter_remainder_out_of_bounds() {
470 const ALLOC_SIZE: usize = 4 * 1024;
472 let input = vec![0xFF_u8; ALLOC_SIZE];
473
474 let buffer: Buffer = Buffer::from_vec(input);
475
476 let bitchunks = buffer.bit_chunks(57, ALLOC_SIZE * 8 - 57);
477
478 assert_eq!(u64::MAX, bitchunks.iter().last().unwrap());
479 assert_eq!(0x7F, bitchunks.remainder_bits());
480 }
481
482 #[test]
483 #[should_panic(expected = "offset + len out of bounds")]
484 fn test_out_of_bound_should_panic_length_is_more_than_buffer_length() {
485 const ALLOC_SIZE: usize = 4 * 1024;
486 let input = vec![0xFF_u8; ALLOC_SIZE];
487
488 let buffer: Buffer = Buffer::from_vec(input);
489
490 buffer.bit_chunks(0, (ALLOC_SIZE + 1) * 8);
492 }
493
494 #[test]
495 #[should_panic(expected = "offset + len out of bounds")]
496 fn test_out_of_bound_should_panic_length_is_more_than_buffer_length_but_not_when_not_using_ceil()
497 {
498 const ALLOC_SIZE: usize = 4 * 1024;
499 let input = vec![0xFF_u8; ALLOC_SIZE];
500
501 let buffer: Buffer = Buffer::from_vec(input);
502
503 buffer.bit_chunks(0, (ALLOC_SIZE * 8) + 1);
505 }
506
507 #[test]
508 #[should_panic(expected = "offset + len out of bounds")]
509 fn test_out_of_bound_should_panic_when_offset_is_not_zero_and_length_is_the_entire_buffer_length()
510 {
511 const ALLOC_SIZE: usize = 4 * 1024;
512 let input = vec![0xFF_u8; ALLOC_SIZE];
513
514 let buffer: Buffer = Buffer::from_vec(input);
515
516 buffer.bit_chunks(8, ALLOC_SIZE * 8);
518 }
519
520 #[test]
521 #[should_panic(expected = "offset + len out of bounds")]
522 fn test_out_of_bound_should_panic_when_offset_is_not_zero_and_length_is_the_entire_buffer_length_with_ceil()
523 {
524 const ALLOC_SIZE: usize = 4 * 1024;
525 let input = vec![0xFF_u8; ALLOC_SIZE];
526
527 let buffer: Buffer = Buffer::from_vec(input);
528
529 buffer.bit_chunks(1, ALLOC_SIZE * 8);
531 }
532
533 #[test]
534 #[allow(clippy::assertions_on_constants)]
535 fn test_unaligned_bit_chunk_iterator() {
536 let buffer = Buffer::from(&[0xFF; 5]);
537 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 40);
538
539 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0);
541 assert_eq!(unaligned.trailing_padding(), 24);
542 assert_eq!(
544 unaligned.prefix(),
545 Some(0b0000000000000000000000001111111111111111111111111111111111111111)
546 );
547 assert_eq!(unaligned.suffix(), None);
548
549 let buffer = buffer.slice(1);
550 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 32);
551
552 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0);
554 assert_eq!(unaligned.trailing_padding(), 32);
555 assert_eq!(
557 unaligned.prefix(),
558 Some(0b0000000000000000000000000000000011111111111111111111111111111111)
559 );
560 assert_eq!(unaligned.suffix(), None);
561
562 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 5, 27);
563
564 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 5); assert_eq!(unaligned.trailing_padding(), 32);
567 assert_eq!(
569 unaligned.prefix(),
570 Some(0b0000000000000000000000000000000011111111111111111111111111100000)
571 );
572 assert_eq!(unaligned.suffix(), None);
573
574 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 12, 20);
575
576 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 4); assert_eq!(unaligned.trailing_padding(), 40);
579 assert_eq!(
581 unaligned.prefix(),
582 Some(0b0000000000000000000000000000000000000000111111111111111111110000)
583 );
584 assert_eq!(unaligned.suffix(), None);
585
586 let buffer = Buffer::from(&[0xFF; 14]);
587
588 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
590 assert_eq!(prefix.len(), 0);
591 assert_eq!(aligned.len(), 1);
592 assert_eq!(suffix.len(), 6);
593
594 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 112);
595
596 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0); assert_eq!(unaligned.trailing_padding(), 16);
599 assert_eq!(unaligned.prefix(), Some(u64::MAX));
600 assert_eq!(unaligned.suffix(), Some((1 << 48) - 1));
601
602 let buffer = Buffer::from(&[0xFF; 16]);
603
604 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
606 assert_eq!(prefix.len(), 0);
607 assert_eq!(aligned.len(), 2);
608 assert_eq!(suffix.len(), 0);
609
610 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 128);
611
612 assert_eq!(unaligned.prefix(), Some(u64::MAX));
613 assert_eq!(unaligned.suffix(), Some(u64::MAX));
614 assert!(unaligned.chunks().is_empty()); let buffer = Buffer::from(&[0xFF; 64]);
617
618 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
620 assert_eq!(prefix.len(), 0);
621 assert_eq!(aligned.len(), 8);
622 assert_eq!(suffix.len(), 0);
623
624 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 512);
625
626 assert_eq!(unaligned.suffix(), None);
628 assert_eq!(unaligned.prefix(), None);
629 assert_eq!(unaligned.chunks(), [u64::MAX; 8].as_slice());
630 assert_eq!(unaligned.lead_padding(), 0);
631 assert_eq!(unaligned.trailing_padding(), 0);
632
633 let buffer = buffer.slice(1); let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
637 assert_eq!(prefix.len(), 7);
638 assert_eq!(aligned.len(), 7);
639 assert_eq!(suffix.len(), 0);
640
641 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 504);
642
643 assert_eq!(unaligned.prefix(), Some(u64::MAX - 0xFF));
645 assert_eq!(unaligned.suffix(), None);
646 assert_eq!(unaligned.chunks(), [u64::MAX; 7].as_slice());
647 assert_eq!(unaligned.lead_padding(), 8);
648 assert_eq!(unaligned.trailing_padding(), 0);
649
650 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 300);
651
652 assert_eq!(unaligned.lead_padding(), 25);
659 assert_eq!(unaligned.trailing_padding(), 59);
660 assert_eq!(unaligned.prefix(), Some(u64::MAX - (1 << 25) + 1));
661 assert_eq!(unaligned.suffix(), Some(0b11111));
662 assert_eq!(unaligned.chunks(), [u64::MAX; 4].as_slice());
663
664 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 0);
665
666 assert_eq!(unaligned.prefix(), None);
667 assert_eq!(unaligned.suffix(), None);
668 assert!(unaligned.chunks().is_empty());
669 assert_eq!(unaligned.lead_padding(), 0);
670 assert_eq!(unaligned.trailing_padding(), 0);
671
672 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 1);
673
674 assert_eq!(unaligned.prefix(), Some(2));
675 assert_eq!(unaligned.suffix(), None);
676 assert!(unaligned.chunks().is_empty());
677 assert_eq!(unaligned.lead_padding(), 1);
678 assert_eq!(unaligned.trailing_padding(), 62);
679 }
680
681 #[test]
682 #[cfg_attr(miri, ignore)]
683 fn fuzz_unaligned_bit_chunk_iterator() {
684 let mut rng = rng();
685
686 let uusize = UniformUsize::new(usize::MIN, usize::MAX).unwrap();
687 for _ in 0..100 {
688 let mask_len = rng.random_range(0..1024);
689 let bools: Vec<_> = std::iter::from_fn(|| Some(rng.random()))
690 .take(mask_len)
691 .collect();
692
693 let buffer = Buffer::from_iter(bools.iter().cloned());
694
695 let max_offset = 64.min(mask_len);
696 let offset = uusize.sample(&mut rng).checked_rem(max_offset).unwrap_or(0);
697
698 let max_truncate = 128.min(mask_len - offset);
699 let truncate = uusize
700 .sample(&mut rng)
701 .checked_rem(max_truncate)
702 .unwrap_or(0);
703
704 let unaligned =
705 UnalignedBitChunk::new(buffer.as_slice(), offset, mask_len - offset - truncate);
706
707 let bool_slice = &bools[offset..mask_len - truncate];
708
709 let count = unaligned.count_ones();
710 let expected_count = bool_slice.iter().filter(|x| **x).count();
711
712 assert_eq!(count, expected_count);
713
714 let collected: Vec<u64> = unaligned.iter().collect();
715
716 let get_bit = |idx: usize| -> bool {
717 let padded_index = idx + unaligned.lead_padding();
718 let byte_idx = padded_index / 64;
719 let bit_idx = padded_index % 64;
720 (collected[byte_idx] & (1 << bit_idx)) != 0
721 };
722
723 for (idx, b) in bool_slice.iter().enumerate() {
724 assert_eq!(*b, get_bit(idx))
725 }
726 }
727 }
728}