1use std::ops::Not;
5
6use arrow_buffer::bit_chunk_iterator::BitChunks;
7use arrow_buffer::bit_chunk_iterator::UnalignedBitChunk;
8use bitvec::view::BitView;
9
10use crate::BitBuffer;
11use crate::BufferMut;
12use crate::ByteBufferMut;
13use crate::bit::get_bit_unchecked;
14use crate::bit::ops;
15use crate::bit::set_bit_unchecked;
16use crate::bit::unset_bit_unchecked;
17use crate::buffer_mut;
18
19#[derive(Debug, Clone, Eq)]
39pub struct BitBufferMut {
40 buffer: ByteBufferMut,
41 offset: usize,
45 len: usize,
46}
47
48impl PartialEq for BitBufferMut {
49 fn eq(&self, other: &Self) -> bool {
50 if self.len != other.len {
51 return false;
52 }
53
54 self.chunks()
55 .iter_padded()
56 .zip(other.chunks().iter_padded())
57 .all(|(a, b)| a == b)
58 }
59}
60
61impl BitBufferMut {
62 pub fn from_buffer(buffer: ByteBufferMut, offset: usize, len: usize) -> Self {
64 assert!(
65 len <= buffer.len() * 8,
66 "Buffer len {} is too short for the given length {len}",
67 buffer.len()
68 );
69 Self {
70 buffer,
71 offset,
72 len,
73 }
74 }
75
76 pub fn copy_from(bit_buffer: &BitBuffer) -> Self {
78 Self {
79 buffer: ByteBufferMut::copy_from(bit_buffer.inner()),
80 offset: bit_buffer.offset(),
81 len: bit_buffer.len(),
82 }
83 }
84
85 pub fn with_capacity(capacity: usize) -> Self {
87 Self {
88 buffer: BufferMut::with_capacity(capacity.div_ceil(8)),
89 offset: 0,
90 len: 0,
91 }
92 }
93
94 pub fn new_set(len: usize) -> Self {
96 Self {
97 buffer: buffer_mut![0xFF; len.div_ceil(8)],
98 offset: 0,
99 len,
100 }
101 }
102
103 pub fn new_unset(len: usize) -> Self {
105 Self {
106 buffer: BufferMut::zeroed(len.div_ceil(8)),
107 offset: 0,
108 len,
109 }
110 }
111
112 #[inline(always)]
114 pub fn empty() -> Self {
115 Self::with_capacity(0)
116 }
117
118 pub fn full(value: bool, len: usize) -> Self {
120 if value {
121 Self::new_set(len)
122 } else {
123 Self::new_unset(len)
124 }
125 }
126
127 pub fn collect_bool<F: FnMut(usize) -> bool>(len: usize, mut f: F) -> Self {
129 let mut buffer = BufferMut::with_capacity(len.div_ceil(64) * 8);
130
131 let chunks = len / 64;
132 let remainder = len % 64;
133 for chunk in 0..chunks {
134 let mut packed = 0;
135 for bit_idx in 0..64 {
136 let i = bit_idx + chunk * 64;
137 packed |= (f(i) as u64) << bit_idx;
138 }
139
140 unsafe { buffer.push_unchecked(packed) }
142 }
143
144 if remainder != 0 {
145 let mut packed = 0;
146 for bit_idx in 0..remainder {
147 let i = bit_idx + chunks * 64;
148 packed |= (f(i) as u64) << bit_idx;
149 }
150
151 unsafe { buffer.push_unchecked(packed) }
153 }
154
155 buffer.truncate(len.div_ceil(8));
156
157 Self {
158 buffer: buffer.into_byte_buffer(),
159 offset: 0,
160 len,
161 }
162 }
163
164 pub fn inner(&self) -> &ByteBufferMut {
166 &self.buffer
167 }
168
169 pub fn into_inner(self) -> ByteBufferMut {
171 self.buffer
172 }
173
174 #[inline(always)]
176 pub fn len(&self) -> usize {
177 self.len
178 }
179
180 #[inline(always)]
182 pub fn is_empty(&self) -> bool {
183 self.len == 0
184 }
185
186 #[inline(always)]
188 pub fn offset(&self) -> usize {
189 self.offset
190 }
191
192 #[inline(always)]
194 pub fn value(&self, index: usize) -> bool {
195 assert!(index < self.len);
196 unsafe { self.value_unchecked(index) }
198 }
199
200 #[inline(always)]
206 pub unsafe fn value_unchecked(&self, index: usize) -> bool {
207 unsafe { get_bit_unchecked(self.buffer.as_ptr(), self.offset + index) }
208 }
209
210 pub fn chunks(&self) -> BitChunks<'_> {
214 BitChunks::new(self.buffer.as_slice(), self.offset, self.len)
215 }
216
217 #[inline(always)]
219 pub fn capacity(&self) -> usize {
220 (self.buffer.capacity() * 8) - self.offset
221 }
222
223 pub fn reserve(&mut self, additional: usize) {
225 let required_bits = self.offset + self.len + additional;
226 let required_bytes = required_bits.div_ceil(8); let additional_bytes = required_bytes.saturating_sub(self.buffer.len());
229 self.buffer.reserve(additional_bytes);
230 }
231
232 pub fn clear(&mut self) {
234 self.len = 0;
236 self.offset = 0;
237 }
238
239 pub fn set_to(&mut self, index: usize, value: bool) {
243 if value {
244 self.set(index);
245 } else {
246 self.unset(index);
247 }
248 }
249
250 pub unsafe fn set_to_unchecked(&mut self, index: usize, value: bool) {
256 if value {
257 unsafe { self.set_unchecked(index) }
259 } else {
260 unsafe { self.unset_unchecked(index) }
262 }
263 }
264
265 pub fn set(&mut self, index: usize) {
269 assert!(index < self.len, "index {index} exceeds len {}", self.len);
270
271 unsafe { self.set_unchecked(index) };
273 }
274
275 #[inline]
279 pub fn unset(&mut self, index: usize) {
280 assert!(index < self.len, "index {index} exceeds len {}", self.len);
281
282 unsafe { self.unset_unchecked(index) };
284 }
285
286 unsafe fn set_unchecked(&mut self, index: usize) {
294 unsafe { set_bit_unchecked(self.buffer.as_mut_ptr(), self.offset + index) }
296 }
297
298 #[inline]
306 pub unsafe fn unset_unchecked(&mut self, index: usize) {
307 unsafe { unset_bit_unchecked(self.buffer.as_mut_ptr(), self.offset + index) }
309 }
310
311 #[inline(always)]
318 pub unsafe fn set_len(&mut self, new_len: usize) {
319 debug_assert!(
320 new_len <= self.capacity(),
321 "`set_len` requires that new_len <= capacity()"
322 );
323
324 let bytes_len = (self.offset + new_len).div_ceil(8);
326 unsafe { self.buffer.set_len(bytes_len) };
327
328 self.len = new_len;
329 }
330
331 pub fn truncate(&mut self, len: usize) {
335 if len > self.len {
336 return;
337 }
338
339 let new_len_bytes = (self.offset + len).div_ceil(8);
340 self.buffer.truncate(new_len_bytes);
341 self.len = len;
342 }
343
344 pub fn append(&mut self, value: bool) {
346 if value {
347 self.append_true()
348 } else {
349 self.append_false()
350 }
351 }
352
353 pub fn append_true(&mut self) {
355 let bit_pos = self.offset + self.len;
356 let byte_pos = bit_pos / 8;
357 let bit_in_byte = bit_pos % 8;
358
359 if byte_pos >= self.buffer.len() {
361 self.buffer.push(0u8);
362 }
363
364 self.buffer.as_mut_slice()[byte_pos] |= 1 << bit_in_byte;
366 self.len += 1;
367 }
368
369 pub fn append_false(&mut self) {
371 let bit_pos = self.offset + self.len;
372 let byte_pos = bit_pos / 8;
373 let bit_in_byte = bit_pos % 8;
374
375 if byte_pos >= self.buffer.len() {
377 self.buffer.push(0u8);
378 }
379
380 if bit_in_byte != 0 {
382 self.buffer.as_mut_slice()[byte_pos] &= !(1 << bit_in_byte);
383 }
384
385 self.len += 1;
386 }
387
388 pub fn append_n(&mut self, value: bool, n: usize) {
393 if n == 0 {
394 return;
395 }
396
397 let start_bit_pos = self.offset + self.len;
398 let end_bit_pos = start_bit_pos + n;
399 let required_bytes = end_bit_pos.div_ceil(8);
400
401 if required_bytes > self.buffer.len() {
403 self.buffer.push_n(0x00, required_bytes - self.buffer.len());
404 }
405
406 let fill_byte = if value { 0xFF } else { 0x00 };
407
408 let start_byte = start_bit_pos / 8;
410 let start_bit = start_bit_pos % 8;
411 let end_byte = end_bit_pos / 8;
412 let end_bit = end_bit_pos % 8;
413
414 let slice = self.buffer.as_mut_slice();
415
416 if start_byte == end_byte {
417 let mask = ((1u8 << (end_bit - start_bit)) - 1) << start_bit;
419 if value {
420 slice[start_byte] |= mask;
421 } else {
422 slice[start_byte] &= !mask;
423 }
424 } else {
425 if start_bit != 0 {
427 let mask = !((1u8 << start_bit) - 1);
428 if value {
429 slice[start_byte] |= mask;
430 } else {
431 slice[start_byte] &= !mask;
432 }
433 }
434
435 let fill_start = if start_bit != 0 {
437 start_byte + 1
438 } else {
439 start_byte
440 };
441 let fill_end = end_byte;
442 if fill_start < fill_end {
443 slice[fill_start..fill_end].fill(fill_byte);
444 }
445
446 if end_bit != 0 {
448 let mask = (1u8 << end_bit) - 1;
449 if value {
450 slice[end_byte] |= mask;
451 } else {
452 slice[end_byte] &= !mask;
453 }
454 }
455 }
456
457 self.len += n;
458 }
459
460 pub fn append_buffer(&mut self, buffer: &BitBuffer) {
464 let bit_len = buffer.len();
465 if bit_len == 0 {
466 return;
467 }
468
469 let start_bit_pos = self.offset + self.len;
470 let end_bit_pos = start_bit_pos + bit_len;
471 let required_bytes = end_bit_pos.div_ceil(8);
472
473 if required_bytes > self.buffer.len() {
475 self.buffer.push_n(0x00, required_bytes - self.buffer.len());
476 }
477
478 let self_slice = self
480 .buffer
481 .as_mut_slice()
482 .view_bits_mut::<bitvec::prelude::Lsb0>();
483 let other_slice = buffer
484 .inner()
485 .as_slice()
486 .view_bits::<bitvec::prelude::Lsb0>();
487
488 let source_range = buffer.offset()..buffer.offset() + bit_len;
490 self_slice[start_bit_pos..end_bit_pos].copy_from_bitslice(&other_slice[source_range]);
491
492 self.len += bit_len;
493 }
494
495 #[must_use = "consider BitBufferMut::truncate if you don't need the other half"]
503 pub fn split_off(&mut self, at: usize) -> Self {
504 assert!(
505 at <= self.capacity(),
506 "index {at} exceeds capacity {}",
507 self.capacity()
508 );
509
510 let tail_len = self.len.saturating_sub(at);
512 let byte_pos = (self.offset + at).div_ceil(8);
513
514 if at > self.len() || ((self.offset + at) % 8 == 0) {
518 let tail_buffer = self.buffer.split_off(byte_pos);
519 self.len = self.len.min(at);
520
521 return Self {
523 buffer: tail_buffer,
524 offset: 0,
525 len: tail_len,
526 };
527 }
528
529 let u64_cap = tail_len.div_ceil(8);
532 let mut tail_buffer_u64 = BufferMut::<u64>::with_capacity(u64_cap);
533 tail_buffer_u64.extend(
534 BitChunks::new(self.buffer.as_slice(), self.offset + at, tail_len).iter_padded(),
535 );
536
537 self.truncate(at);
538 BitBufferMut::from_buffer(tail_buffer_u64.into_byte_buffer(), 0, tail_len)
539 }
540
541 pub fn unsplit(&mut self, other: Self) {
549 if (self.offset + self.len) % 8 == 0 && other.offset == 0 {
550 self.buffer.unsplit(other.buffer);
552 self.len += other.len;
553 return;
554 }
555
556 self.append_buffer(&other.freeze())
558 }
559
560 pub fn freeze(self) -> BitBuffer {
562 BitBuffer::new_with_offset(self.buffer.freeze(), self.len, self.offset)
563 }
564
565 pub fn as_slice(&self) -> &[u8] {
567 self.buffer.as_slice()
568 }
569
570 pub fn as_mut_slice(&mut self) -> &mut [u8] {
572 self.buffer.as_mut_slice()
573 }
574
575 pub fn as_mut_ptr(&mut self) -> *mut u8 {
577 self.buffer.as_mut_ptr()
578 }
579
580 pub fn unaligned_chunks(&self) -> UnalignedBitChunk<'_> {
582 UnalignedBitChunk::new(self.buffer.as_slice(), self.offset, self.len)
583 }
584
585 pub fn true_count(&self) -> usize {
587 self.unaligned_chunks().count_ones()
588 }
589
590 pub fn false_count(&self) -> usize {
592 self.len - self.true_count()
593 }
594}
595
596impl Default for BitBufferMut {
597 fn default() -> Self {
598 Self::with_capacity(0)
599 }
600}
601
602impl Not for BitBufferMut {
604 type Output = BitBufferMut;
605
606 fn not(mut self) -> Self::Output {
607 ops::bitwise_unary_op_mut(&mut self, |b| !b);
608 self
609 }
610}
611
612impl From<&[bool]> for BitBufferMut {
613 fn from(value: &[bool]) -> Self {
614 BitBuffer::collect_bool(value.len(), |i| value[i]).into_mut()
615 }
616}
617
618impl From<Vec<bool>> for BitBufferMut {
619 fn from(value: Vec<bool>) -> Self {
620 value.as_slice().into()
621 }
622}
623
624impl FromIterator<bool> for BitBufferMut {
625 fn from_iter<T: IntoIterator<Item = bool>>(iter: T) -> Self {
626 let mut iter = iter.into_iter();
627
628 let (lower_bound, _) = iter.size_hint();
631
632 let mut buf = BitBufferMut::new_unset(lower_bound);
637 assert_eq!(buf.offset, 0);
638
639 let ptr = buf.buffer.as_mut_ptr();
641 for i in 0..lower_bound {
642 let Some(v) = iter.next() else {
643 unsafe { buf.set_len(i) };
646 return buf;
647 };
648
649 if v {
650 unsafe { set_bit_unchecked(ptr, i) }
652 }
653 }
654
655 for v in iter {
657 buf.append(v);
658 }
659
660 buf
661 }
662}
663
664#[cfg(test)]
665mod tests {
666 use crate::BufferMut;
667 use crate::bit::buf_mut::BitBufferMut;
668 use crate::bitbuffer;
669 use crate::bitbuffer_mut;
670 use crate::buffer_mut;
671
672 #[test]
673 fn test_bits_mut() {
674 let mut bools = bitbuffer_mut![false; 10];
675 bools.set_to(0, true);
676 bools.set_to(9, true);
677
678 let bools = bools.freeze();
679 assert!(bools.value(0));
680 for i in 1..=8 {
681 assert!(!bools.value(i));
682 }
683 assert!(bools.value(9));
684 }
685
686 #[test]
687 fn test_append_n() {
688 let mut bools = BitBufferMut::with_capacity(10);
689 assert_eq!(bools.len(), 0);
690 assert!(bools.is_empty());
691
692 bools.append(true);
693 bools.append_n(false, 8);
694 bools.append_n(true, 1);
695
696 let bools = bools.freeze();
697
698 assert_eq!(bools.true_count(), 2);
699 assert!(bools.value(0));
700 assert!(bools.value(9));
701 }
702
703 #[test]
704 fn test_reserve_ensures_len_plus_additional() {
705 let mut bits = BitBufferMut::with_capacity(10);
709 assert_eq!(bits.len(), 0);
710
711 bits.reserve(100);
712
713 assert!(bits.capacity() >= 100);
715
716 bits.append_n(true, 50);
717 assert_eq!(bits.len(), 50);
718
719 bits.reserve(100);
720
721 assert!(bits.capacity() >= 150);
723 }
724
725 #[test]
726 fn test_with_offset_zero() {
727 let buf = BufferMut::zeroed(2);
729 let mut bit_buf = BitBufferMut::from_buffer(buf, 0, 16);
730
731 bit_buf.set(0);
733 bit_buf.set(7);
734 bit_buf.set(8);
735 bit_buf.set(15);
736
737 assert!(bit_buf.value(0));
739 assert!(bit_buf.value(7));
740 assert!(bit_buf.value(8));
741 assert!(bit_buf.value(15));
742 assert!(!bit_buf.value(1));
743 assert!(!bit_buf.value(9));
744
745 assert_eq!(bit_buf.as_slice()[0], 0b10000001);
747 assert_eq!(bit_buf.as_slice()[1], 0b10000001);
748 }
749
750 #[test]
751 fn test_with_offset_within_byte() {
752 let buf = buffer_mut![0b11111111, 0b00000000, 0b00000000];
754 let mut bit_buf = BitBufferMut::from_buffer(buf, 3, 10);
755
756 assert!(bit_buf.value(0)); assert!(bit_buf.value(4)); assert!(!bit_buf.value(5)); bit_buf.set(7);
764 assert!(bit_buf.value(7));
765
766 bit_buf.unset(0);
768 assert!(!bit_buf.value(0));
769 }
770
771 #[test]
772 fn test_with_offset_byte_boundary() {
773 let buf = buffer_mut![0xFF, 0x00, 0xFF];
775 let mut bit_buf = BitBufferMut::from_buffer(buf, 8, 16);
776
777 for i in 0..8 {
779 assert!(!bit_buf.value(i));
780 }
781 for i in 8..16 {
783 assert!(bit_buf.value(i));
784 }
785
786 bit_buf.set(0);
788 bit_buf.set(3);
789 assert!(bit_buf.value(0));
790 assert!(bit_buf.value(3));
791 }
792
793 #[test]
794 fn test_with_large_offset() {
795 let buf = buffer_mut![0xFF, 0xFF, 0xFF, 0xFF];
797 let mut bit_buf = BitBufferMut::from_buffer(buf, 13, 10);
798
799 for i in 0..10 {
801 assert!(bit_buf.value(i));
802 }
803
804 bit_buf.unset(0);
806 bit_buf.unset(5);
807 bit_buf.unset(9);
808
809 assert!(!bit_buf.value(0));
810 assert!(bit_buf.value(1));
811 assert!(!bit_buf.value(5));
812 assert!(!bit_buf.value(9));
813 }
814
815 #[test]
816 fn test_append_with_offset() {
817 let buf = buffer_mut![0b11100000]; let mut bit_buf = BitBufferMut::from_buffer(buf, 3, 0); bit_buf.append(false); bit_buf.append(true); bit_buf.append(true); assert_eq!(bit_buf.len(), 3);
827 assert!(!bit_buf.value(0));
828 assert!(bit_buf.value(1));
829 assert!(bit_buf.value(2));
830 }
831
832 #[test]
833 fn test_append_n_with_offset_crossing_boundary() {
834 let buf = BufferMut::zeroed(4);
836 let mut bit_buf = BitBufferMut::from_buffer(buf, 5, 0);
837
838 bit_buf.append_n(true, 10); assert_eq!(bit_buf.len(), 10);
842 for i in 0..10 {
843 assert!(bit_buf.value(i));
844 }
845
846 assert_eq!(bit_buf.as_slice()[0], 0b11100000);
850 assert_eq!(bit_buf.as_slice()[1], 0b01111111);
851 }
852
853 #[test]
854 fn test_truncate_with_offset() {
855 let buf = buffer_mut![0xFF, 0xFF];
856 let mut bit_buf = BitBufferMut::from_buffer(buf, 4, 12);
857
858 assert_eq!(bit_buf.len(), 12);
859
860 bit_buf.truncate(8);
862 assert_eq!(bit_buf.len(), 8);
863
864 bit_buf.truncate(3);
866 assert_eq!(bit_buf.len(), 3);
867
868 bit_buf.truncate(10);
870 assert_eq!(bit_buf.len(), 3);
871 }
872
873 #[test]
874 fn test_capacity_with_offset() {
875 let buf = buffer_mut![0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let bit_buf = BitBufferMut::from_buffer(buf, 5, 0);
878
879 assert!(bit_buf.capacity() >= 75);
882 assert_eq!(bit_buf.capacity() % 8, (80 - 5) % 8);
884 }
885
886 #[test]
887 fn test_reserve_with_offset() {
888 let buf = buffer_mut![0, 0]; let mut bit_buf = BitBufferMut::from_buffer(buf, 3, 0);
891
892 let initial_capacity = bit_buf.capacity();
894 assert!(initial_capacity >= 13);
895
896 bit_buf.reserve(20);
898
899 assert!(bit_buf.capacity() >= 20);
901 }
902
903 #[test]
904 fn test_freeze_with_offset() {
905 let buf = buffer_mut![0b11110000, 0b00001111];
906 let mut bit_buf = BitBufferMut::from_buffer(buf, 4, 8);
907
908 bit_buf.set(0);
910 bit_buf.set(7);
911
912 let frozen = bit_buf.freeze();
914 assert_eq!(frozen.offset(), 4);
915 assert_eq!(frozen.len(), 8);
916
917 assert!(frozen.value(0));
919 assert!(frozen.value(7));
920 }
921
922 #[cfg_attr(miri, ignore)] #[test]
924 fn test_append_buffer_with_offsets() {
925 let source = bitbuffer![false, false, true, true, false, true];
927
928 let buf = BufferMut::zeroed(4);
930 let mut dest = BitBufferMut::from_buffer(buf, 3, 0);
931
932 dest.append(true);
934 dest.append(false);
935
936 dest.append_buffer(&source);
938
939 assert_eq!(dest.len(), 8);
940 assert!(dest.value(0)); assert!(!dest.value(1)); assert!(!dest.value(2)); assert!(!dest.value(3)); assert!(dest.value(4)); assert!(dest.value(5)); assert!(!dest.value(6)); assert!(dest.value(7)); }
949
950 #[test]
951 fn test_set_unset_unchecked_with_offset() {
952 let buf = BufferMut::zeroed(3);
953 let mut bit_buf = BitBufferMut::from_buffer(buf, 7, 10);
954
955 unsafe {
956 bit_buf.set_unchecked(0);
957 bit_buf.set_unchecked(5);
958 bit_buf.set_unchecked(9);
959 }
960
961 assert!(bit_buf.value(0));
962 assert!(bit_buf.value(5));
963 assert!(bit_buf.value(9));
964
965 unsafe {
966 bit_buf.unset_unchecked(5);
967 }
968
969 assert!(!bit_buf.value(5));
970 }
971
972 #[test]
973 fn test_value_unchecked_with_offset() {
974 let buf = buffer_mut![0b11110000, 0b00001111];
975 let bit_buf = BitBufferMut::from_buffer(buf, 4, 8);
976
977 unsafe {
978 assert!(bit_buf.value_unchecked(0));
980 assert!(bit_buf.value_unchecked(3));
981
982 assert!(bit_buf.value_unchecked(4));
984 assert!(bit_buf.value_unchecked(7));
985 }
986 }
987
988 #[test]
989 fn test_append_alternating_with_offset() {
990 let buf = BufferMut::zeroed(4);
991 let mut bit_buf = BitBufferMut::from_buffer(buf, 2, 0);
992
993 for i in 0..20 {
995 bit_buf.append(i % 2 == 0);
996 }
997
998 assert_eq!(bit_buf.len(), 20);
999 for i in 0..20 {
1000 assert_eq!(bit_buf.value(i), i % 2 == 0);
1001 }
1002 }
1003
1004 #[test]
1005 fn test_new_set_new_unset() {
1006 let set_buf = bitbuffer_mut![true; 10];
1007 let unset_buf = bitbuffer_mut![false; 10];
1008
1009 for i in 0..10 {
1010 assert!(set_buf.value(i));
1011 assert!(!unset_buf.value(i));
1012 }
1013
1014 assert_eq!(set_buf.len(), 10);
1015 assert_eq!(unset_buf.len(), 10);
1016 }
1017
1018 #[test]
1019 fn test_append_n_false_with_offset() {
1020 let buf = BufferMut::zeroed(4);
1021 let mut bit_buf = BitBufferMut::from_buffer(buf, 5, 0);
1022
1023 bit_buf.append_n(false, 15);
1024
1025 assert_eq!(bit_buf.len(), 15);
1026 for i in 0..15 {
1027 assert!(!bit_buf.value(i));
1028 }
1029 }
1030
1031 #[test]
1032 fn test_append_n_true_with_offset() {
1033 let buf = BufferMut::zeroed(4);
1034 let mut bit_buf = BitBufferMut::from_buffer(buf, 5, 0);
1035
1036 bit_buf.append_n(true, 15);
1037
1038 assert_eq!(bit_buf.len(), 15);
1039 for i in 0..15 {
1040 assert!(bit_buf.value(i));
1041 }
1042 }
1043
1044 #[test]
1045 fn test_mixed_operations_with_offset() {
1046 let buf = BufferMut::zeroed(5);
1048 let mut bit_buf = BitBufferMut::from_buffer(buf, 3, 0);
1049
1050 bit_buf.append_n(true, 5);
1052 bit_buf.append_n(false, 3);
1053 bit_buf.append(true);
1054
1055 assert_eq!(bit_buf.len(), 9);
1056
1057 bit_buf.set(6); bit_buf.unset(2); assert!(bit_buf.value(0));
1063 assert!(bit_buf.value(1));
1064 assert!(!bit_buf.value(2)); assert!(bit_buf.value(3));
1066 assert!(bit_buf.value(4));
1067 assert!(!bit_buf.value(5));
1068 assert!(bit_buf.value(6)); assert!(!bit_buf.value(7));
1070 assert!(bit_buf.value(8));
1071
1072 bit_buf.truncate(6);
1074 assert_eq!(bit_buf.len(), 6);
1075
1076 let frozen = bit_buf.freeze();
1078 assert_eq!(frozen.offset(), 3);
1079 assert_eq!(frozen.len(), 6);
1080 }
1081
1082 #[test]
1083 fn test_from_iterator_with_incorrect_size_hint() {
1084 struct LyingIterator {
1091 values: Vec<bool>,
1092 index: usize,
1093 }
1094
1095 impl Iterator for LyingIterator {
1096 type Item = bool;
1097
1098 fn next(&mut self) -> Option<Self::Item> {
1099 (self.index < self.values.len()).then(|| {
1100 let val = self.values[self.index];
1101 self.index += 1;
1102 val
1103 })
1104 }
1105
1106 fn size_hint(&self) -> (usize, Option<usize>) {
1107 let remaining = self.values.len() - self.index;
1110 let lower = remaining.min(5); let upper = Some(5); (lower, upper)
1113 }
1114 }
1115
1116 let lying_iter = LyingIterator {
1118 values: vec![
1119 true, false, true, false, true, false, true, false, true, false,
1120 ],
1121 index: 0,
1122 };
1123
1124 let bit_buf: BitBufferMut = lying_iter.collect();
1127
1128 assert_eq!(bit_buf.len(), 10);
1130 for i in 0..10 {
1131 assert_eq!(bit_buf.value(i), i % 2 == 0);
1132 }
1133 }
1134
1135 #[test]
1136 fn test_split_off() {
1137 for i in 0..10 {
1139 let buf = bitbuffer![0 1 0 1 0 1 0 1 0 1];
1140
1141 let mut buf_mut = buf.clone().into_mut();
1142 assert_eq!(buf_mut.len(), 10);
1143
1144 let tail = buf_mut.split_off(i);
1145 assert_eq!(buf_mut.len(), i);
1146 assert_eq!(buf_mut.freeze(), buf.slice(0..i));
1147
1148 assert_eq!(tail.len(), 10 - i);
1149 assert_eq!(tail.freeze(), buf.slice(i..10));
1150 }
1151 }
1152
1153 #[test]
1154 fn test_split_off_with_offset() {
1155 for i in 0..10 {
1157 let buf = bitbuffer![0 1 0 1 0 1 0 1 0 1 0 1].slice(2..);
1158
1159 let mut buf_mut = buf.clone().into_mut();
1160 assert_eq!(buf_mut.len(), 10);
1161
1162 let tail = buf_mut.split_off(i);
1163 assert_eq!(buf_mut.len(), i);
1164 assert_eq!(buf_mut.freeze(), buf.slice(0..i));
1165
1166 assert_eq!(tail.len(), 10 - i);
1167 assert_eq!(tail.freeze(), buf.slice(i..10));
1168 }
1169 }
1170}