1use std::{borrow::Borrow, cmp, collections::VecDeque, fmt, io, mem, ops, ptr};
2
3use crate::{BufMut, BytePageSize, ByteString, Bytes, BytesMut};
4use crate::{buf::UninitSlice, storage::StorageVec};
5
6pub struct BytePages {
7 size: BytePageSize,
8 pages: VecDeque<BytePage>,
9 current: StorageVec,
10}
11
12impl BytePages {
13 pub fn new(size: BytePageSize) -> Self {
18 debug_assert!(size != BytePageSize::Unset, "Page cannot be Unset");
19
20 BytePages {
21 size,
22 pages: VecDeque::with_capacity(8),
23 current: StorageVec::sized(size),
24 }
25 }
26
27 pub fn page_size(&self) -> BytePageSize {
29 self.size
30 }
31
32 pub fn set_page_size(&mut self, size: BytePageSize) {
34 self.size = size;
35 }
36
37 pub fn prepend<T>(&mut self, buf: T) -> bool
39 where
40 BytePage: From<T>,
41 {
42 let p = BytePage::from(buf);
43 if p.is_empty() {
44 false
45 } else {
46 self.pages.push_front(p);
47 true
48 }
49 }
50
51 pub fn append<T>(&mut self, buf: T)
53 where
54 BytePage: From<T>,
55 {
56 let p = BytePage::from(buf);
57 let remaining = self.current.remaining();
58
59 if p.len() <= remaining {
60 self.put_slice(p.as_ref());
61 } else if self.current.len() == 0 {
62 match p.into_storage() {
63 Ok(st) => {
64 self.current = st;
65 }
66 Err(page) => {
67 self.pages.push_back(page);
69 }
70 }
71 } else {
72 self.pages.push_back(BytePage {
74 inner: StorageType::Storage(mem::replace(
75 &mut self.current,
76 StorageVec::sized(self.size),
77 )),
78 });
79
80 self.pages.push_back(p);
82 }
83 }
84
85 #[inline]
86 pub fn extend_from_slice(&mut self, extend: &[u8]) {
92 self.put_slice(extend);
93 }
94
95 #[inline]
96 pub fn len(&self) -> usize {
98 self.pages
99 .iter()
100 .fold(self.current.len(), |c, page| c + page.len())
101 }
102
103 #[inline]
104 pub fn is_empty(&self) -> bool {
106 for p in &self.pages {
107 if !p.is_empty() {
108 return false;
109 }
110 }
111 self.current.len() == 0
112 }
113
114 #[inline]
115 pub fn num_pages(&self) -> usize {
117 if self.current.len() == 0 {
118 self.pages.len()
119 } else {
120 self.pages.len() + 1
121 }
122 }
123
124 pub fn take(&mut self) -> Option<BytePage> {
126 if let Some(page) = self.pages.pop_front() {
127 Some(page)
128 } else if self.current.len() == 0 {
129 None
130 } else {
131 Some(BytePage::from(mem::replace(
132 &mut self.current,
133 StorageVec::sized(self.size),
134 )))
135 }
136 }
137
138 #[inline]
139 pub fn copy_to(&self, pages: &mut BytePages) {
144 for p in &self.pages {
145 pages.append(p.clone());
146 }
147
148 if self.current.len() != 0 {
149 pages.append(BytePage::from(Bytes::copy_from_slice(
150 self.current.as_ref(),
151 )));
152 }
153 }
154
155 #[inline]
156 pub fn move_to(&mut self, pages: &mut BytePages) {
158 while let Some(page) = self.take() {
159 pages.append(page);
160 }
161 }
162
163 #[must_use]
171 pub fn split_to(&mut self, at: usize) -> BytePages {
172 let mut pages = BytePages::new(self.size);
173 self.split_into(at, &mut pages);
174 pages
175 }
176
177 pub fn split_into(&mut self, mut at: usize, to: &mut BytePages) {
185 while let Some(mut page) = self.pages.pop_front() {
186 let len = cmp::min(page.len(), at);
187 to.append(page.split_to(len));
188
189 if !page.is_empty() {
190 self.pages.push_front(page);
191 return;
192 }
193 at -= len;
194 }
195
196 if at > 0
197 && let Some(mut page) = self.take()
198 {
199 let len = cmp::min(page.len(), at);
200 to.append(page.split_to(len));
201 self.append(page);
202 }
203 }
204
205 #[inline]
207 #[must_use]
208 pub fn freeze(&mut self) -> Bytes {
209 let mut buf = BytesMut::with_capacity(self.len());
210 while let Some(p) = self.take() {
211 buf.extend_from_slice(&p);
212 }
213 buf.freeze()
214 }
215
216 #[inline]
217 pub fn try_get_current_from(&mut self, pages: &mut BytePages) {
218 if self.pages.is_empty() && self.current.len() == 0 && pages.current.len() != 0 {
219 self.current = mem::replace(&mut pages.current, StorageVec::sized(self.size));
220 }
221 }
222
223 pub fn with_bytes_mut<F, R>(&mut self, f: F) -> R
225 where
226 F: FnOnce(&mut BytesMut) -> R,
227 {
228 let mut buf = BytesMut {
229 storage: StorageVec(self.current.0),
230 };
231 buf.storage.unsize();
232
233 let res = f(&mut buf);
234 mem::forget(buf);
235 res
236 }
237}
238
239impl fmt::Debug for BytePages {
240 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
241 let mut f = fmt.debug_tuple("BytePages");
242 for p in &self.pages {
243 f.field(p);
244 }
245 if self.current.len() != 0 {
246 f.field(&crate::debug::BsDebug(self.current.as_ref()));
247 }
248 f.finish()
249 }
250}
251
252impl Default for BytePages {
253 fn default() -> Self {
254 BytePages::new(BytePageSize::Size16)
255 }
256}
257
258impl BufMut for BytePages {
259 #[inline]
260 fn remaining_mut(&self) -> usize {
261 self.current.remaining()
262 }
263
264 #[inline]
265 unsafe fn advance_mut(&mut self, cnt: usize) {
266 self.current.set_len(self.current.len() + cnt);
268 }
269
270 #[inline]
271 fn chunk_mut(&mut self) -> &mut UninitSlice {
272 unsafe {
273 let ptr = &mut self.current.as_ptr();
275 UninitSlice::from_raw_parts_mut(
276 ptr.add(self.current.len()),
277 self.remaining_mut(),
278 )
279 }
280 }
281
282 fn put_slice(&mut self, mut src: &[u8]) {
283 while !src.is_empty() {
284 let amount = cmp::min(src.len(), self.current.remaining());
285 unsafe {
286 ptr::copy_nonoverlapping(
287 src.as_ptr(),
288 self.chunk_mut().as_mut_ptr(),
289 amount,
290 );
291 self.advance_mut(amount);
292 }
293 src = &src[amount..];
294
295 if self.current.is_full() {
297 self.pages.push_back(BytePage::from(mem::replace(
298 &mut self.current,
299 StorageVec::sized(self.size),
300 )));
301 }
302 }
303 }
304
305 #[inline]
306 fn put_u8(&mut self, n: u8) {
307 self.current.put_u8(n);
308 if self.current.is_full() {
309 self.pages.push_back(BytePage::from(mem::replace(
310 &mut self.current,
311 StorageVec::sized(self.size),
312 )));
313 }
314 }
315
316 #[inline]
317 fn put_i8(&mut self, n: i8) {
318 self.put_u8(n as u8);
319 }
320}
321
322impl Clone for BytePages {
323 fn clone(&self) -> Self {
324 let mut pages = BytePages::new(self.size);
325 self.copy_to(&mut pages);
326 pages
327 }
328}
329
330impl io::Write for BytePages {
331 fn write(&mut self, src: &[u8]) -> Result<usize, io::Error> {
332 self.put_slice(src);
333 Ok(src.len())
334 }
335
336 fn flush(&mut self) -> Result<(), io::Error> {
337 Ok(())
338 }
339}
340
341impl From<BytePages> for Bytes {
342 fn from(pages: BytePages) -> Bytes {
343 BytesMut::from(pages).freeze()
344 }
345}
346
347impl From<BytePages> for BytesMut {
348 fn from(mut pages: BytePages) -> BytesMut {
349 let mut buf = BytesMut::with_capacity(pages.len());
350 while let Some(p) = pages.take() {
351 buf.extend_from_slice(&p);
352 }
353 buf
354 }
355}
356
357pub struct BytePage {
358 inner: StorageType,
359}
360
361enum StorageType {
362 Bytes(Bytes),
363 Storage(StorageVec),
364 Vec(Vec<u8>),
365}
366
367impl BytePage {
368 #[inline]
369 pub fn len(&self) -> usize {
371 match &self.inner {
372 StorageType::Bytes(b) => b.len(),
373 StorageType::Storage(b) => b.len(),
374 StorageType::Vec(b) => b.len(),
375 }
376 }
377
378 #[inline]
379 pub fn is_empty(&self) -> bool {
381 match &self.inner {
382 StorageType::Bytes(b) => b.is_empty(),
383 StorageType::Storage(b) => b.len() == 0,
384 StorageType::Vec(b) => b.is_empty(),
385 }
386 }
387
388 pub fn as_ptr(&self) -> *const u8 {
390 unsafe {
391 match &self.inner {
392 StorageType::Bytes(b) => b.storage.as_ptr(),
393 StorageType::Storage(b) => b.as_ptr(),
394 StorageType::Vec(b) => b.as_ptr(),
395 }
396 }
397 }
398
399 #[must_use]
407 pub fn split_to(&mut self, at: usize) -> BytePage {
408 match &mut self.inner {
409 StorageType::Bytes(b) => {
410 let buf = b.split_to(cmp::min(at, b.len()));
411 BytePage {
412 inner: StorageType::Bytes(buf),
413 }
414 }
415 StorageType::Storage(_) => {
416 let inner = mem::replace(&mut self.inner, StorageType::Bytes(Bytes::new()));
417 if let StorageType::Storage(st) = inner {
418 self.inner = StorageType::Bytes(Bytes {
419 storage: st.freeze(),
420 });
421 self.split_to(at)
422 } else {
423 unreachable!()
424 }
425 }
426 StorageType::Vec(_) => {
427 let inner = mem::replace(&mut self.inner, StorageType::Bytes(Bytes::new()));
428 if let StorageType::Vec(b) = inner {
429 self.inner = StorageType::Bytes(Bytes::copy_from_slice(&b));
430 self.split_to(at)
431 } else {
432 unreachable!()
433 }
434 }
435 }
436 }
437
438 #[inline]
447 pub fn advance_to(&mut self, cnt: usize) {
448 match &mut self.inner {
449 StorageType::Bytes(b) => b.advance_to(cnt),
450 StorageType::Storage(b) => unsafe { b.set_start(cnt as u32) },
451 StorageType::Vec(b) => {
452 self.inner = StorageType::Bytes(Bytes::copy_from_slice(&b[cnt..]));
453 }
454 }
455 }
456
457 #[inline]
459 #[must_use]
460 pub fn freeze(self) -> Bytes {
461 match self.inner {
462 StorageType::Bytes(b) => b,
463 StorageType::Storage(st) => Bytes {
464 storage: st.freeze(),
465 },
466 StorageType::Vec(v) => Bytes::from(v),
467 }
468 }
469
470 fn into_storage(self) -> Result<StorageVec, Self> {
471 if let StorageType::Storage(mut st) = self.inner {
472 if !st.is_full() && st.is_unique() {
474 Ok(st)
475 } else {
476 Err(Self {
477 inner: StorageType::Storage(st),
478 })
479 }
480 } else {
481 Err(self)
482 }
483 }
484}
485
486impl Clone for BytePage {
487 fn clone(&self) -> Self {
488 let inner = match &self.inner {
489 StorageType::Bytes(b) => StorageType::Bytes(b.clone()),
490 StorageType::Storage(st) => {
491 StorageType::Storage(unsafe { st.clone() })
494 }
495 StorageType::Vec(b) => StorageType::Bytes(Bytes::copy_from_slice(b)),
496 };
497
498 Self { inner }
499 }
500}
501
502impl AsRef<[u8]> for BytePage {
503 #[inline]
504 fn as_ref(&self) -> &[u8] {
505 match &self.inner {
506 StorageType::Bytes(b) => b.as_ref(),
507 StorageType::Storage(b) => b.as_ref(),
508 StorageType::Vec(b) => b.as_ref(),
509 }
510 }
511}
512
513impl Borrow<[u8]> for BytePage {
514 #[inline]
515 fn borrow(&self) -> &[u8] {
516 self.as_ref()
517 }
518}
519
520impl From<Bytes> for BytePage {
521 fn from(buf: Bytes) -> Self {
522 BytePage {
523 inner: StorageType::Bytes(buf),
524 }
525 }
526}
527
528impl<'a> From<&'a Bytes> for BytePage {
529 fn from(buf: &'a Bytes) -> Self {
530 BytePage {
531 inner: StorageType::Bytes(buf.clone()),
532 }
533 }
534}
535
536impl From<BytesMut> for BytePage {
537 fn from(buf: BytesMut) -> Self {
538 BytePage {
539 inner: StorageType::Storage(buf.storage),
540 }
541 }
542}
543
544impl From<ByteString> for BytePage {
545 fn from(s: ByteString) -> Self {
546 s.into_bytes().into()
547 }
548}
549
550impl<'a> From<&'a ByteString> for BytePage {
551 fn from(s: &'a ByteString) -> Self {
552 s.clone().into_bytes().into()
553 }
554}
555
556impl From<StorageVec> for BytePage {
557 fn from(buf: StorageVec) -> Self {
558 BytePage {
559 inner: StorageType::Storage(buf),
560 }
561 }
562}
563
564impl From<Vec<u8>> for BytePage {
565 fn from(buf: Vec<u8>) -> Self {
566 BytePage {
567 inner: StorageType::Vec(buf),
568 }
569 }
570}
571
572impl From<&'static str> for BytePage {
573 fn from(buf: &'static str) -> Self {
574 BytePage::from(Bytes::from_static(buf.as_bytes()))
575 }
576}
577
578impl From<&'static [u8]> for BytePage {
579 fn from(buf: &'static [u8]) -> Self {
580 BytePage::from(Bytes::from_static(buf))
581 }
582}
583
584impl<const N: usize> From<&'static [u8; N]> for BytePage {
585 fn from(src: &'static [u8; N]) -> Self {
586 BytePage::from(Bytes::from_static(src))
587 }
588}
589
590impl From<BytePage> for Bytes {
591 fn from(page: BytePage) -> Self {
592 match page.inner {
593 StorageType::Bytes(b) => b,
594 StorageType::Storage(storage) => BytesMut { storage }.freeze(),
595 StorageType::Vec(v) => Bytes::copy_from_slice(&v),
596 }
597 }
598}
599
600impl From<BytePage> for BytesMut {
601 fn from(page: BytePage) -> Self {
602 match page.inner {
603 StorageType::Bytes(b) => b.into(),
604 StorageType::Storage(storage) => BytesMut { storage },
605 StorageType::Vec(v) => BytesMut::copy_from_slice(&v),
606 }
607 }
608}
609
610impl PartialEq for BytePage {
611 fn eq(&self, other: &BytePage) -> bool {
612 self.as_ref() == other.as_ref()
613 }
614}
615
616impl<'a> PartialEq<&'a [u8]> for BytePage {
617 fn eq(&self, other: &&'a [u8]) -> bool {
618 self.as_ref() == *other
619 }
620}
621
622impl<'a, const N: usize> PartialEq<&'a [u8; N]> for BytePage {
623 fn eq(&self, other: &&'a [u8; N]) -> bool {
624 self.as_ref() == other.as_ref()
625 }
626}
627
628impl io::Read for BytePage {
629 fn read(&mut self, dst: &mut [u8]) -> io::Result<usize> {
630 let len = cmp::min(self.len(), dst.len());
631 if len > 0 {
632 dst[..len].copy_from_slice(&self[..len]);
633 self.advance_to(len);
634 }
635 Ok(len)
636 }
637}
638
639impl ops::Deref for BytePage {
640 type Target = [u8];
641
642 #[inline]
643 fn deref(&self) -> &[u8] {
644 self.as_ref()
645 }
646}
647
648impl fmt::Debug for BytePage {
649 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
650 fmt::Debug::fmt(&crate::debug::BsDebug(self.as_ref()), fmt)
651 }
652}
653
654#[cfg(test)]
655mod tests {
656 use super::*;
657
658 #[test]
659 fn pages() {
660 let mut pages = BytePages::new(BytePageSize::Size8);
662 assert!(pages.is_empty());
663 assert_eq!(pages.len(), 0);
664 assert_eq!(pages.num_pages(), 0);
665 pages.extend_from_slice(b"b");
666 assert_eq!(pages.len(), 1);
667 assert_eq!(pages.num_pages(), 1);
668 pages.extend_from_slice("a".repeat(9 * 1024).as_bytes());
669 assert_eq!(pages.len(), 9217);
670 assert_eq!(pages.num_pages(), 2);
671 assert!(!pages.is_empty());
672
673 let mut pgs = BytePages::new(BytePageSize::Size8);
674 pgs.put_i8(b'a' as i8);
675 let p = pgs.take().unwrap();
676 assert_eq!(p.len(), 1);
677 assert_eq!(p.as_ref(), b"a");
678
679 pgs.extend_from_slice("a".repeat(8 * 1024 - 1).as_bytes());
680 assert_eq!(pgs.num_pages(), 1);
681 pgs.put_u8(b'a');
682 assert_eq!(pgs.num_pages(), 1);
683 assert_eq!(pgs.current.len(), 0);
684
685 pgs.put_u8(b'a');
686 assert_eq!(pgs.num_pages(), 2);
687
688 pgs.append(Bytes::copy_from_slice("a".repeat(8 * 1024).as_bytes()));
689 assert_eq!(pgs.num_pages(), 3);
690 assert_eq!(pgs.current.len(), 0);
691
692 let p = pages.take().unwrap();
694 assert_eq!(p.len(), 8192);
695 let p = pages.take().unwrap();
696 assert_eq!(p.len(), 1025);
697 assert!(!p.is_empty());
698 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
699 assert_eq!(p.as_ref(), "a".repeat(1025).as_bytes());
700 assert!(pages.take().is_none());
701
702 let p = BytePage::from(Bytes::copy_from_slice(b"123"));
703 assert_eq!(p.len(), 3);
704 assert!(!p.is_empty());
705 assert_eq!(p.as_ref(), b"123");
706 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
707
708 let p = BytePage::from(&b"123"[..]);
709 assert_eq!(p.len(), 3);
710 assert!(!p.is_empty());
711 assert_eq!(p.as_ref(), b"123");
712 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
713
714 let p = BytePage::from(b"123");
715 assert_eq!(p.len(), 3);
716 assert!(!p.is_empty());
717 assert_eq!(p.as_ref(), b"123");
718 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
719
720 let p = BytePage::from("123");
721 assert_eq!(p.len(), 3);
722 assert!(!p.is_empty());
723 assert_eq!(p.as_ref(), b"123");
724 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
725 assert_eq!(p.freeze(), b"123");
726
727 let p = BytePage::from(vec![b'1', b'2', b'3']);
728 assert_eq!(p.len(), 3);
729 assert!(!p.is_empty());
730 assert_eq!(p.as_ref(), b"123");
731 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
732 assert_eq!(p.freeze(), b"123");
733
734 let mut p = BytePage::from(vec![b'1', b'2', b'3']);
735 p.advance_to(1);
736 assert_eq!(p.len(), 2);
737 assert!(!p.is_empty());
738 assert_eq!(p.as_ref(), b"23");
739
740 let mut pages = BytePages::new(BytePageSize::Size8);
742 pages.extend_from_slice(b"b");
743 assert_eq!(format!("{pages:?}"), "BytePages(b\"b\")");
744 let p = pages.take().unwrap();
745 assert_eq!(p.as_ref(), b"b");
746
747 let mut pages = BytePages::new(BytePageSize::Size8);
748 pages.extend_from_slice(b"a");
749 pages.append(Bytes::copy_from_slice(b"123"));
750 pages.pages.push_back(p);
751 assert_eq!(format!("{pages:?}"), "BytePages(b\"b\", b\"a123\")");
752 }
753
754 #[test]
755 fn pages_copy_to() {
756 let mut pages = BytePages::default();
757 let mut pages2 = BytePages::default();
758 pages.put_slice(b"456");
759 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
760 pages.copy_to(&mut pages2);
761 let p = pages.freeze();
762 assert_eq!(p, b"123456");
763 let p2 = pages2.freeze();
764 assert_eq!(p2, b"123456");
765
766 let mut pages = BytePages::default();
767 let mut pages2 = BytePages::default();
768 pages.put_slice(b"456");
769 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
770 pages.copy_to(&mut pages2);
771 pages.put_u8(b'7');
772 let p = pages.freeze();
773 assert_eq!(p, b"1234567");
774 let p2 = pages2.freeze();
775 assert_eq!(p2, b"123456");
776
777 let mut pages = BytePages::default();
778 pages.put_slice(b"456");
779 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
780 let mut pages2 = pages.clone();
781 pages.put_u8(b'7');
782 let p = pages.freeze();
783 assert_eq!(p, b"1234567");
784 let p2 = pages2.freeze();
785 assert_eq!(p2, b"123456");
786 }
787
788 #[test]
789 fn pages_methods() {
790 let mut pages = BytePages::default();
792 pages.put_slice(b"456");
793 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
794 let mut pages2 = pages.split_to(1);
795 let p = pages.freeze();
796 assert_eq!(p, b"23456");
797 let p2 = pages2.freeze();
798 assert_eq!(p2, b"1");
799
800 let mut pages = BytePages::default();
801 pages.put_slice(b"456");
802 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
803 let mut pages2 = pages.split_to(4);
804 let p = pages.freeze();
805 assert_eq!(p, b"56");
806 let p2 = pages2.freeze();
807 assert_eq!(p2, b"1234");
808
809 let mut pages = BytePages::default();
811 pages.put_slice(b"456");
812 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
813 let mut pages2 = BytePages::default();
814 pages.split_into(1, &mut pages2);
815 let p = pages.freeze();
816 assert_eq!(p, b"23456");
817 let p2 = pages2.freeze();
818 assert_eq!(p2, b"1");
819
820 let mut pages = BytePages::default();
822 pages.with_bytes_mut(|buf| buf.extend_from_slice(b"123"));
823 let p = pages.freeze();
824 assert_eq!(p, b"123");
825 }
826
827 #[test]
828 fn page_clone() {
829 let p = BytePage::from(Bytes::copy_from_slice(b"123"));
831 let p2 = p.clone();
832 assert_eq!(p, p2);
833
834 let mut p = BytePage::from(BytesMut::copy_from_slice(b"123"));
836 if let StorageType::Storage(ref mut st) = p.inner {
837 assert!(st.is_unique());
838 } else {
839 panic!()
840 }
841 let p2 = p.clone();
842 assert_eq!(p, p2);
843 if let StorageType::Storage(mut st) = p.inner {
844 assert!(!st.is_unique());
845 } else {
846 panic!()
847 }
848
849 let p = BytePage::from(vec![b'1', b'2', b'3']);
851 let p2 = p.clone();
852 assert_eq!(p, p2);
853 if let StorageType::Bytes(_) = p2.inner {
854 } else {
855 panic!()
856 }
857 }
858
859 #[test]
860 fn page_split_to() {
861 let mut p = BytePage::from(Bytes::copy_from_slice(b"123"));
863 let p2 = p.split_to(1);
864 assert_eq!(p, b"23");
865 assert_eq!(p2, b"1");
866
867 let mut p = BytePage::from(BytesMut::copy_from_slice(b"123"));
869 let p2 = p.split_to(1);
870 assert_eq!(p, b"23");
871 assert_eq!(p2, b"1");
872
873 let mut p = BytePage::from(vec![b'1', b'2', b'3']);
875 let p2 = p.split_to(1);
876 assert_eq!(p, b"23");
877 assert_eq!(p2, b"1");
878 }
879
880 #[test]
881 fn page_read() {
882 use std::io::Read;
883
884 let mut page = BytePage::from(Bytes::copy_from_slice(b"123"));
885
886 let mut buf = [0; 10];
887 assert_eq!(page.read(&mut buf).unwrap(), 3);
888 assert_eq!(page.len(), 0);
889 assert_eq!(buf, [49, 50, 51, 0, 0, 0, 0, 0, 0, 0]);
890 }
891}