1use alloc::{vec, vec::Vec};
2use core::{
3 alloc::Layout,
4 cmp,
5 iter::FusedIterator,
6 mem::{self, MaybeUninit},
7 ops::{Bound, RangeBounds},
8 slice,
9};
10
11use crate::{IoSlice, IoSliceMut, SeekFrom};
12
13mod block;
14
15use self::block::Block;
16
17#[derive(Debug, Copy, Clone, PartialEq, Eq)]
18struct Seeker {
19 current_block: usize,
20 current_pos_in_block: usize,
21 current_pos: usize,
22 end_pos: usize,
23}
24
25impl Seeker {
26 fn start(len: usize) -> Seeker {
27 Seeker {
28 current_block: 0,
29 current_pos_in_block: 0,
30 current_pos: 0,
31 end_pos: len,
32 }
33 }
34
35 #[inline]
36 fn is_at_end(&self) -> bool {
37 self.current_pos >= self.end_pos
38 }
39
40 fn read(&mut self, mut buf: &mut [impl IoSliceMut], blocks: &[Block]) -> Result<usize, usize> {
41 let mut read_len = 0;
42 loop {
43 let block = blocks.get(self.current_block).ok_or(read_len)?;
44 let len = block.read(self.current_pos_in_block, &mut buf);
45 if len > 0 {
46 read_len += len;
47 self.advance(len, blocks).expect("Inconsistent seeker");
48 } else {
49 break Ok(read_len);
50 }
51 }
52 }
53
54 fn write(
55 &mut self,
56 buf: &mut &mut [impl IoSlice],
57 blocks: &mut [Block],
58 ) -> Result<usize, usize> {
59 let mut written_len = 0;
60 loop {
61 let block = blocks.get_mut(self.current_block).ok_or(written_len)?;
62 let len = block.write(self.current_pos_in_block, buf);
63 if len > 0 {
64 written_len += len;
65 self.advance(len, blocks).expect("Inconsistent seeker");
66 } else {
67 break Ok(written_len);
68 }
69 }
70 }
71
72 fn append_end(&mut self, extra: usize) {
73 if self.current_pos >= self.end_pos {
74 self.current_pos += extra;
75 self.end_pos = self.current_pos;
76 } else {
77 self.end_pos += extra;
78 }
79 }
80
81 fn append_new(&mut self, block: &Block) {
82 if self.current_pos == self.end_pos {
83 self.current_pos += block.len();
84 self.current_block += 1;
85 self.current_pos_in_block = 0;
86 }
87 self.end_pos += block.len();
88 }
89
90 fn extend_end(&mut self, extra: usize, last: &Block) {
91 if (self.end_pos..(self.end_pos + extra)).contains(&self.current_pos) {
92 self.current_block -= 1;
93 self.current_pos_in_block += last.len() - extra;
94 } else if self.current_pos >= self.end_pos + extra {
95 self.current_pos_in_block -= extra;
96 }
97 self.end_pos += extra;
98 }
99
100 fn extend_new(&mut self, len: usize) {
101 if self.current_pos_in_block >= len {
102 self.current_block += 1;
103 self.current_pos_in_block -= len;
104 }
105 self.end_pos += len;
106 }
107
108 fn truncate(&mut self, new_pos: usize, blocks: &[Block]) -> Option<(usize, usize)> {
109 if self.end_pos <= new_pos {
110 return None;
111 }
112 self.end_pos = new_pos;
113
114 match self.current_pos.cmp(&new_pos) {
115 cmp::Ordering::Less => {
116 if blocks.len() <= self.current_block {
117 None
118 } else {
119 let mut pos = self.current_pos - self.current_pos_in_block;
120 for (bi_delta, block) in blocks[self.current_block..].iter().enumerate() {
121 if new_pos < block.len() + pos {
122 return Some((self.current_block + bi_delta, new_pos - pos));
123 }
124 pos += block.len();
125 }
126 None
127 }
128 }
129 cmp::Ordering::Equal => Some(if self.current_pos_in_block > 0 {
130 self.current_block += 1;
131 let pos_in_block = mem::replace(&mut self.current_pos_in_block, 0);
132 (self.current_block - 1, pos_in_block)
133 } else {
134 (self.current_block, 0)
135 }),
136 cmp::Ordering::Greater => {
137 let mut pos = self.current_pos_in_block;
138 for (bi_delta, block) in blocks[..self.current_block].iter().rev().enumerate() {
139 pos += block.len();
140 if self.current_pos <= new_pos + pos {
141 self.current_block -= bi_delta;
142 self.current_pos_in_block = self.current_pos - new_pos;
143 return Some((self.current_block - 1, pos + new_pos - self.current_pos));
144 }
145 }
146 unreachable!()
147 }
148 }
149 }
150
151 fn advance(&mut self, delta: usize, blocks: &[Block]) -> Option<usize> {
152 let new_pos = self.current_pos + delta;
153 if new_pos >= self.end_pos {
154 self.current_block = blocks.len();
155 self.current_pos_in_block = new_pos - self.end_pos;
156 self.current_pos = new_pos;
157 self.end_pos = new_pos;
158 return Some(new_pos);
159 }
160 let mut pos = self.current_pos_in_block + delta;
161 for (bi_delta, block) in blocks[self.current_block..].iter().enumerate() {
162 if pos < block.len() {
163 self.current_block += bi_delta;
164 self.current_pos_in_block = pos;
165 self.current_pos = new_pos;
166 return Some(new_pos);
167 }
168 pos -= block.len();
169 }
170 None
171 }
172
173 fn seek_from_start(&mut self, start: usize, blocks: &[Block]) -> Option<usize> {
174 let new_pos = start;
175 if new_pos >= self.end_pos {
176 self.current_block = blocks.len();
177 self.current_pos_in_block = new_pos - self.end_pos;
178 self.current_pos = new_pos;
179 self.end_pos = new_pos;
180 return Some(new_pos);
181 }
182 let mut pos = start;
183 for (bi, block) in blocks.iter().enumerate() {
184 if pos < block.len() {
185 self.current_block = bi;
186 self.current_pos_in_block = pos;
187 self.current_pos = new_pos;
188 return Some(new_pos);
189 }
190 pos -= block.len();
191 }
192 None
193 }
194
195 fn seek_bound(&mut self, bound: usize, blocks: &[Block]) {
196 let mid = self.end_pos / 2;
197 if bound < mid {
198 self.seek_from_start(bound, blocks);
199 } else {
200 self.seek_from_end(-(self.end_pos.saturating_sub(bound) as isize), blocks);
201 }
202 }
203
204 fn seek_from_end(&mut self, end: isize, blocks: &[Block]) -> Option<usize> {
205 if end >= 0 {
206 self.current_block = blocks.len();
207 self.current_pos_in_block = end as usize;
208 self.current_pos = self.end_pos + end as usize;
209 return Some(self.current_pos);
210 }
211
212 let pos_delta = (-end) as usize;
213 if self.end_pos < pos_delta {
214 return None;
215 }
216 let new_pos = self.end_pos - pos_delta;
217 if new_pos == 0 {
218 self.current_block = 0;
219 self.current_pos_in_block = 0;
220 self.current_pos = 0;
221 return Some(0);
222 }
223
224 let mut pos = 0;
225 for (bi, block) in blocks.iter().enumerate().rev() {
226 pos += block.len();
227 if pos >= pos_delta {
228 self.current_block = bi;
229 self.current_pos_in_block = pos - pos_delta;
230 self.current_pos = new_pos;
231 return Some(new_pos);
232 }
233 }
234 None
235 }
236
237 fn seek_from_current(&mut self, current: isize, blocks: &[Block]) -> Option<usize> {
238 match current.cmp(&0) {
239 cmp::Ordering::Equal => Some(self.current_pos),
240 cmp::Ordering::Greater => self.advance(current as usize, blocks),
241 cmp::Ordering::Less => {
242 let pos_delta = (-current) as usize;
243 if self.current_pos < pos_delta {
244 return None;
245 }
246 let new_pos = self.current_pos - pos_delta;
247 if new_pos == 0 {
248 self.current_block = 0;
249 self.current_pos_in_block = 0;
250 self.current_pos = 0;
251 return Some(0);
252 }
253
254 if self.current_pos_in_block >= pos_delta {
255 self.current_pos_in_block -= pos_delta;
256 self.current_pos = new_pos;
257 return Some(new_pos);
258 }
259
260 let mut pos = self.current_pos_in_block;
261 for (bi_delta, block) in blocks[..self.current_block].iter().rev().enumerate() {
262 pos += block.len();
263 if pos >= pos_delta {
264 self.current_block -= bi_delta + 1;
265 self.current_pos_in_block = pos - pos_delta;
266 self.current_pos = new_pos;
267 return Some(new_pos);
268 }
269 }
270
271 None
272 }
273 }
274 }
275}
276
277#[derive(Debug, Clone)]
281pub struct BlockedVec {
282 blocks: Vec<Block>,
283 layout: Layout,
284 len: usize,
285 seeker: Option<Seeker>,
286}
287
288impl BlockedVec {
289 #[cfg(feature = "std")]
295 pub fn new() -> Self {
296 let ps = page_size::get();
297 let layout = Layout::from_size_align(ps, ps).expect("Invalid layout");
298 Self::new_paged(layout)
299 }
300
301 pub fn new_paged(page_layout: Layout) -> Self {
303 BlockedVec {
304 blocks: Vec::new(),
305 layout: page_layout,
306 len: 0,
307 seeker: None,
308 }
309 }
310
311 #[cfg(feature = "std")]
318 pub fn with_len(len: usize) -> Self {
319 let ps = page_size::get();
320 let layout = Layout::from_size_align(ps, ps).expect("Invalid layout");
321 Self::with_len_paged(len, layout)
322 }
323
324 pub fn with_len_paged(len: usize, page_layout: Layout) -> Self {
327 match Block::with_len(page_layout, len) {
328 Some(block) => BlockedVec {
329 blocks: vec![block],
330 layout: page_layout,
331 len,
332 seeker: Some(Seeker::start(len)),
333 },
334 None => Self::new_paged(page_layout),
335 }
336 }
337
338 pub fn len(&self) -> usize {
340 self.len
341 }
342
343 #[must_use]
345 pub fn is_empty(&self) -> bool {
346 self.len == 0
347 }
348
349 fn append_inner(&mut self, buf: &mut &mut [impl IoSlice]) -> usize {
350 match &mut self.seeker {
351 Some(seeker) => {
352 let last = self.blocks.last_mut().expect("Inconsistent seeker");
353 let skip = if seeker.is_at_end() {
354 let skip = mem::replace(&mut seeker.current_pos_in_block, 0);
355 seeker.current_pos -= skip;
356 skip
357 } else {
358 0
359 };
360 match last.extend(skip, buf) {
361 Ok(len) => {
362 seeker.append_end(len);
363 self.len += len;
364 len - skip
365 }
366 Err(len) => {
367 seeker.append_end(len);
368 self.len += len;
369
370 let skip2 = if len > skip { 0 } else { skip - len };
371
372 if let Some((new, len2)) = Block::from_buf(self.layout, skip2, buf) {
373 seeker.append_new(&new);
374 self.len += new.len();
375 self.blocks.push(new);
376
377 len + len2 - skip + skip2
378 } else {
379 len - skip
380 }
381 }
382 }
383 }
384 None => match Block::from_buf(self.layout, 0, buf) {
385 Some((block, len)) => {
386 let mut seeker = Seeker::start(block.len());
387 seeker.current_block = 1;
388 seeker.current_pos = seeker.end_pos;
389 self.seeker = Some(seeker);
390 self.len = block.len();
391 self.blocks = vec![block];
392 len
393 }
394 None => 0,
395 },
396 }
397 }
398
399 pub fn extend(&mut self, additional: usize) {
406 match &mut self.seeker {
407 Some(seeker) => {
408 let last = self.blocks.last_mut().expect("Inconsistent seeker");
409 match last.extend(additional, &mut (&mut [] as &mut [&[u8]])) {
410 Ok(len) => {
411 seeker.extend_end(len, last);
412 self.len += len
413 }
414 Err(len) => {
415 seeker.extend_end(len, last);
416 self.len += len;
417
418 let len2 = additional - len;
419 let (new, _) =
420 Block::from_buf(self.layout, len2, &mut (&mut [] as &mut [&[u8]]))
421 .expect("Inconsistent blocks");
422 seeker.extend_new(len2);
423 self.len += len2;
424 self.blocks.push(new);
425 }
426 }
427 }
428 None => *self = Self::with_len_paged(additional, self.layout),
429 }
430 }
431
432 pub fn append_vectored(&mut self, mut buf: &mut [impl IoSlice]) -> usize {
442 self.seek(SeekFrom::End(0)).expect("Inconsistent seeker");
443 self.append_inner(&mut buf)
444 }
445
446 pub fn append(&mut self, buf: &[u8]) -> usize {
452 self.append_vectored(&mut [buf])
453 }
454
455 pub fn read_vectored(&mut self, buf: &mut [impl IoSliceMut]) -> usize {
457 let seeker = match &mut self.seeker {
458 Some(seeker) => seeker,
459 None => return 0,
460 };
461 match seeker.read(buf, &self.blocks) {
462 Ok(len) => len,
463 Err(len) => len,
464 }
465 }
466
467 pub fn read_at_vectored(&self, pos: usize, buf: &mut [impl IoSliceMut]) -> usize {
469 let mut seeker = match self.seeker {
470 Some(mut seeker) => match seeker.seek_from_start(pos, &self.blocks) {
471 Some(_) => seeker,
472 None => return 0,
473 },
474 None => return 0,
475 };
476 match seeker.read(buf, &self.blocks) {
477 Ok(len) => len,
478 Err(len) => len,
479 }
480 }
481
482 #[inline]
485 pub fn read(&mut self, buf: &mut [u8]) -> usize {
486 self.read_vectored(&mut [buf])
487 }
488
489 #[inline]
492 pub fn read_at(&self, pos: usize, buf: &mut [u8]) -> usize {
493 self.read_at_vectored(pos, &mut [buf])
494 }
495
496 pub fn write_vectored(&mut self, mut buf: &mut [impl IoSlice]) -> usize {
500 let seeker = match &mut self.seeker {
501 Some(seeker) => seeker,
502 None => return 0,
503 };
504 match seeker.write(&mut buf, &mut self.blocks) {
505 Ok(len) => len,
506 Err(len) => self.append_inner(&mut buf) + len,
507 }
508 }
509
510 pub fn write_at_vectored(&mut self, pos: usize, mut buf: &mut [impl IoSlice]) -> usize {
514 let mut seeker = match self.seeker {
515 Some(mut seeker) => match seeker.seek_from_start(pos, &self.blocks) {
516 Some(_) => seeker,
517 None => return 0,
518 },
519 None => return 0,
520 };
521 match seeker.write(&mut buf, &mut self.blocks) {
522 Ok(len) => len,
523 Err(len) => self.append_inner(&mut buf) + len,
524 }
525 }
526
527 #[inline]
529 pub fn write(&mut self, buf: &[u8]) -> usize {
530 self.write_vectored(&mut [buf])
531 }
532
533 #[inline]
536 pub fn write_at(&mut self, pos: usize, buf: &[u8]) -> usize {
537 self.write_at_vectored(pos, &mut [buf])
538 }
539
540 pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {
542 match &mut self.seeker {
543 Some(seeker) => match pos {
544 SeekFrom::Start(start) => seeker.seek_from_start(start as usize, &self.blocks),
545 SeekFrom::End(end) => seeker.seek_from_end(end as isize, &self.blocks),
546 SeekFrom::Current(current) => {
547 seeker.seek_from_current(current as isize, &self.blocks)
548 }
549 },
550 _ if pos == SeekFrom::End(0) => Some(0),
551 _ => None,
552 }
553 }
554
555 pub fn truncate(&mut self, len: usize) -> bool {
557 match &mut self.seeker {
558 Some(seeker) => match seeker.truncate(len, &self.blocks) {
559 Some((bi, pos_in_block)) => {
560 let bi = match self.blocks[bi].truncate(pos_in_block) {
561 Some(true) => bi,
562 _ => bi + 1,
563 };
564 if bi < self.blocks.len() {
565 self.blocks.truncate(bi);
566 }
567 self.len = len;
568 true
569 }
570 None => {
571 self.len = len;
572 seeker.end_pos > len
573 }
574 },
575 None => false,
576 }
577 }
578
579 pub fn resize(&mut self, new_len: usize) {
582 if self.len < new_len {
583 let extra = new_len - self.len;
584 self.extend(extra)
585 } else {
586 self.truncate(new_len);
587 }
588 }
589
590 #[inline]
592 pub fn iter(&self) -> BlockIter<'_> {
593 BlockIter {
594 blocks: &self.blocks,
595 }
596 }
597
598 #[inline]
600 pub fn iter_mut(&mut self) -> BlockIterMut<'_> {
601 BlockIterMut {
602 blocks: self.blocks.iter_mut(),
603 }
604 }
605
606 #[inline]
608 pub fn bytes(&self) -> impl Iterator<Item = u8> + Clone + core::fmt::Debug + '_ {
609 self.iter().flatten().copied()
610 }
611
612 pub fn range<R>(&self, range: R) -> RangeIter<'_>
642 where
643 R: RangeBounds<usize>,
644 {
645 let mut start = match self.seeker {
646 Some(_) => Seeker::start(self.len),
647 None => return RangeIter::end(),
648 };
649 match range.start_bound() {
650 Bound::Included(&bound) => start.seek_bound(bound, &self.blocks),
651 Bound::Excluded(&bound) => start.seek_bound(bound + 1, &self.blocks),
652 Bound::Unbounded => {}
653 }
654
655 let mut end = start;
656 match range.end_bound() {
657 Bound::Included(&bound) => end.seek_bound(bound, &self.blocks),
658 Bound::Excluded(&bound) => end.seek_bound(bound - 1, &self.blocks),
659 Bound::Unbounded => {
660 let _ = end.seek_from_end(-1, &self.blocks);
661 }
662 }
663
664 RangeIter {
665 start_block: start.current_block,
666 start_offset: start.current_pos_in_block,
667 end_block: end.current_block,
668 end_offset: end.current_pos_in_block,
669 blocks: &self.blocks[start.current_block..],
670 }
671 }
672
673 pub fn range_mut<R>(&mut self, range: R) -> RangeIterMut<'_>
704 where
705 R: RangeBounds<usize>,
706 {
707 let mut start = match self.seeker {
708 Some(_) => Seeker::start(self.len),
709 None => return RangeIterMut::end(),
710 };
711 match range.start_bound() {
712 Bound::Included(&bound) => start.seek_bound(bound, &self.blocks),
713 Bound::Excluded(&bound) => start.seek_bound(bound + 1, &self.blocks),
714 Bound::Unbounded => {}
715 }
716
717 let mut end = start;
718 match range.end_bound() {
719 Bound::Included(&bound) => end.seek_bound(bound, &self.blocks),
720 Bound::Excluded(&bound) => end.seek_bound(bound - 1, &self.blocks),
721 Bound::Unbounded => {
722 let _ = end.seek_from_end(-1, &self.blocks);
723 }
724 }
725 RangeIterMut {
726 start_block: start.current_block,
727 start_offset: start.current_pos_in_block,
728 end_block: end.current_block,
729 end_offset: end.current_pos_in_block,
730 blocks: self.blocks[start.current_block..].iter_mut(),
731 }
732 }
733}
734
735#[cfg(feature = "std")]
736impl Default for BlockedVec {
737 fn default() -> Self {
738 Self::new()
739 }
740}
741
742#[cfg(feature = "std")]
743impl std::io::Seek for BlockedVec {
744 #[inline]
745 fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
746 self.seek(pos.into())
747 .map(|pos| pos as u64)
748 .ok_or_else(|| std::io::ErrorKind::InvalidInput.into())
749 }
750}
751
752#[cfg(feature = "std")]
753impl std::io::Read for BlockedVec {
754 #[inline]
755 fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
756 Ok(self.read(buf))
757 }
758
759 #[inline]
760 fn read_vectored(&mut self, bufs: &mut [std::io::IoSliceMut<'_>]) -> std::io::Result<usize> {
761 Ok(self.read_vectored(bufs))
762 }
763}
764
765#[cfg(feature = "std")]
766impl std::io::Write for BlockedVec {
767 #[inline]
768 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
769 Ok(self.write(buf))
770 }
771
772 #[inline]
773 fn write_vectored(&mut self, bufs: &[std::io::IoSlice<'_>]) -> std::io::Result<usize> {
774 Ok(self.write_vectored(&mut Vec::from(bufs)))
775 }
776
777 #[inline]
778 fn flush(&mut self) -> std::io::Result<()> {
779 Ok(())
780 }
781}
782
783#[derive(Debug, Clone, Copy)]
784#[repr(transparent)]
785pub struct BlockIter<'a> {
786 blocks: &'a [Block],
787}
788
789impl<'a> Iterator for BlockIter<'a> {
790 type Item = &'a [u8];
791
792 fn next(&mut self) -> Option<Self::Item> {
793 let (ret, next) = self.blocks.split_first()?;
794 self.blocks = next;
795 Some(unsafe { MaybeUninit::slice_assume_init_ref(ret.as_slice()) })
797 }
798
799 #[inline]
800 fn size_hint(&self) -> (usize, Option<usize>) {
801 (self.blocks.len(), Some(self.blocks.len()))
802 }
803}
804
805impl ExactSizeIterator for BlockIter<'_> {}
806
807impl FusedIterator for BlockIter<'_> {}
808
809#[derive(Debug)]
810#[repr(transparent)]
811pub struct BlockIterMut<'a> {
812 blocks: slice::IterMut<'a, Block>,
813}
814
815impl<'a> Iterator for BlockIterMut<'a> {
816 type Item = &'a mut [u8];
817
818 fn next(&mut self) -> Option<Self::Item> {
819 let ret = self.blocks.next()?;
820 Some(unsafe { MaybeUninit::slice_assume_init_mut(ret.as_mut_slice()) })
822 }
823}
824
825impl ExactSizeIterator for BlockIterMut<'_> {}
826
827impl FusedIterator for BlockIterMut<'_> {}
828
829#[derive(Debug, Clone, Copy)]
830pub struct RangeIter<'a> {
831 start_block: usize,
832 start_offset: usize,
833 end_block: usize,
834 end_offset: usize,
835 blocks: &'a [Block],
836}
837
838impl<'a> RangeIter<'a> {
839 #[inline]
840 fn end() -> Self {
841 RangeIter {
842 start_block: 0,
843 start_offset: 0,
844 end_block: 0,
845 end_offset: 0,
846 blocks: &[],
847 }
848 }
849}
850
851impl<'a> Iterator for RangeIter<'a> {
852 type Item = &'a [u8];
853
854 fn next(&mut self) -> Option<Self::Item> {
855 if self.start_block > self.end_block
856 || (self.start_block == self.end_block && self.start_offset >= self.end_offset)
857 {
858 return None;
859 }
860
861 let (block, next) = self.blocks.split_first()?;
862 let ret = if self.start_block < self.end_block {
863 &block.as_slice()[self.start_offset..]
864 } else {
865 &block.as_slice()[self.start_offset..=self.end_offset]
866 };
867 self.blocks = next;
868 self.start_block += 1;
869 self.start_offset = 0;
870
871 Some(unsafe { MaybeUninit::slice_assume_init_ref(ret) })
873 }
874
875 fn size_hint(&self) -> (usize, Option<usize>) {
876 (
877 (self.end_block - self.start_block).saturating_sub(1),
878 Some(self.end_block - self.start_block),
879 )
880 }
881}
882
883impl FusedIterator for RangeIter<'_> {}
884
885#[derive(Debug)]
886pub struct RangeIterMut<'a> {
887 start_block: usize,
888 start_offset: usize,
889 end_block: usize,
890 end_offset: usize,
891 blocks: slice::IterMut<'a, Block>,
892}
893
894impl<'a> RangeIterMut<'a> {
895 #[inline]
896 fn end() -> Self {
897 RangeIterMut {
898 start_block: 0,
899 start_offset: 0,
900 end_block: 0,
901 end_offset: 0,
902 blocks: [].iter_mut(),
903 }
904 }
905}
906
907impl<'a> Iterator for RangeIterMut<'a> {
908 type Item = &'a mut [u8];
909
910 fn next(&mut self) -> Option<Self::Item> {
911 if self.start_block > self.end_block
912 || (self.start_block == self.end_block && self.start_offset >= self.end_offset)
913 {
914 return None;
915 }
916
917 let block = self.blocks.next()?;
918 let ret = if self.start_block < self.end_block {
919 &mut block.as_mut_slice()[self.start_offset..]
920 } else {
921 &mut block.as_mut_slice()[self.start_offset..=self.end_offset]
922 };
923 self.start_block += 1;
924 self.start_offset = 0;
925
926 Some(unsafe { MaybeUninit::slice_assume_init_mut(ret) })
928 }
929
930 fn size_hint(&self) -> (usize, Option<usize>) {
931 (
932 (self.end_block - self.start_block).saturating_sub(1),
933 Some(self.end_block - self.start_block),
934 )
935 }
936}
937
938impl FusedIterator for RangeIterMut<'_> {}
939
940#[cfg(test)]
941mod tests {
942 use std::io::{Read, Seek, Write};
943
944 use super::*;
945
946 fn test_inner() -> Option<()> {
947 let layout = Layout::new::<[u8; 4]>();
948 let mut vec = BlockedVec::new_paged(layout);
949 vec.append(&[1, 2, 3, 4, 5]);
950 vec.seek(SeekFrom::Start(3))?;
951 vec.write_all(&[6, 7, 8, 9, 10]).unwrap();
952 vec.seek(SeekFrom::End(-3))?;
953 vec.write_all(&[11, 12, 13, 14, 15]).unwrap();
954 vec.seek(SeekFrom::Current(-7))?;
955 vec.seek(SeekFrom::Current(1))?;
956 vec.write_all(&[16, 17, 18, 19, 20]).unwrap();
957 vec.seek(SeekFrom::End(3))?;
958 vec.write_all(&[21, 22, 23, 24, 25]).unwrap();
959 vec.resize(6);
960 vec.seek(SeekFrom::Current(-3))?;
961 vec.resize(12);
962 vec.append(&[26, 27, 28, 29, 30]);
963 vec.rewind().unwrap();
966 let mut buf = [0; 17];
967 vec.read_exact(&mut buf).unwrap();
968 assert_eq!(
969 buf,
970 [1, 2, 3, 6, 16, 17, 0, 0, 0, 0, 0, 0, 26, 27, 28, 29, 30]
971 );
972 Some(())
973 }
974
975 #[test]
976 fn test_rw() {
977 test_inner().expect("Failed to seek")
978 }
979
980 #[test]
981 fn test_clone() {
982 let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
983
984 let layout = Layout::new::<[u8; 4]>();
985 let mut vec = BlockedVec::new_paged(layout);
986 vec.append(&data);
987
988 let cloned = vec.clone();
989 let mut buf = [0; 10];
990 cloned.read_at(0, &mut buf);
991 assert_eq!(buf, data);
992 }
993}