1#![allow(clippy::missing_panics_doc, clippy::box_collection)]
2use std::{borrow::Borrow, cell::Cell, cmp, collections::VecDeque, fmt, io, mem, ops, ptr};
3
4use crate::{BufMut, BytePageSize, ByteString, Bytes, BytesMut};
5use crate::{buf::UninitSlice, storage::StorageVec};
6
7pub struct BytePages {
8 st: Option<Box<Inner>>,
9 current: StorageVec,
10}
11
12struct Inner {
13 size: BytePageSize,
14 pages: VecDeque<BytePage>,
15}
16
17thread_local! {
18 static CACHE: Cell<Option<Box<Vec<Box<Inner>>>>> = Cell::new(Some(Box::default()));
19}
20const CACHE_SIZE: usize = 128;
21
22impl BytePages {
23 pub fn new(size: BytePageSize) -> Self {
28 debug_assert!(size != BytePageSize::Unset, "Page cannot be Unset");
29
30 let st = CACHE.with(move |c| {
31 let mut cache = c.take().unwrap();
32
33 let item = if let Some(mut item) = cache.pop() {
34 item.size = size;
35 item
36 } else {
37 Box::new(Inner {
38 size,
39 pages: VecDeque::with_capacity(8),
40 })
41 };
42 c.set(Some(cache));
43 item
44 });
45
46 BytePages {
47 st: Some(st),
48 current: StorageVec::sized(size),
49 }
50 }
51
52 fn pages(&self) -> &VecDeque<BytePage> {
53 &self.st.as_ref().unwrap().pages
54 }
55
56 fn pages_mut(&mut self) -> &mut VecDeque<BytePage> {
57 &mut self.st.as_mut().unwrap().pages
58 }
59
60 pub fn page_size(&self) -> BytePageSize {
62 self.st.as_ref().unwrap().size
63 }
64
65 pub fn set_page_size(&mut self, size: BytePageSize) {
67 self.st.as_mut().unwrap().size = size;
68 }
69
70 pub fn prepend<T>(&mut self, buf: T) -> bool
72 where
73 BytePage: From<T>,
74 {
75 let p = BytePage::from(buf);
76 if p.is_empty() {
77 false
78 } else {
79 self.pages_mut().push_front(p);
80 true
81 }
82 }
83
84 pub fn append<T>(&mut self, buf: T)
86 where
87 BytePage: From<T>,
88 {
89 let p = BytePage::from(buf);
90 let remaining = self.current.remaining();
91
92 if p.len() <= remaining {
93 self.put_slice(p.as_ref());
94 } else if self.current.len() == 0 {
95 match p.into_storage() {
96 Ok(st) => {
97 self.current = st;
98 }
99 Err(page) => {
100 self.pages_mut().push_back(page);
102 }
103 }
104 } else {
105 let storage = StorageVec::sized(self.page_size());
107 let page: BytePage = From::from(mem::replace(&mut self.current, storage));
108 let pages = self.pages_mut();
109 pages.push_back(page);
110 pages.push_back(p);
112 }
113 }
114
115 #[inline]
116 pub fn extend_from_slice(&mut self, extend: &[u8]) {
122 self.put_slice(extend);
123 }
124
125 #[inline]
126 pub fn len(&self) -> usize {
128 self.pages()
129 .iter()
130 .fold(self.current.len(), |c, page| c + page.len())
131 }
132
133 #[inline]
134 pub fn is_empty(&self) -> bool {
136 for p in self.pages() {
137 if !p.is_empty() {
138 return false;
139 }
140 }
141 self.current.len() == 0
142 }
143
144 #[inline]
145 pub fn num_pages(&self) -> usize {
147 if self.current.len() == 0 {
148 self.pages().len()
149 } else {
150 self.pages().len() + 1
151 }
152 }
153
154 pub fn take(&mut self) -> Option<BytePage> {
156 if let Some(page) = self.pages_mut().pop_front() {
157 Some(page)
158 } else if self.current.len() == 0 {
159 None
160 } else {
161 let storage = StorageVec::sized(self.page_size());
162 Some(BytePage::from(mem::replace(&mut self.current, storage)))
163 }
164 }
165
166 #[inline]
167 pub fn copy_to(&self, pages: &mut BytePages) {
172 for p in self.pages() {
173 pages.append(p.clone());
174 }
175
176 if self.current.len() != 0 {
177 pages.append(BytePage::from(Bytes::copy_from_slice(
178 self.current.as_ref(),
179 )));
180 }
181 }
182
183 #[inline]
184 pub fn move_to(&mut self, pages: &mut BytePages) {
186 while let Some(page) = self.take() {
187 pages.append(page);
188 }
189 }
190
191 #[must_use]
199 pub fn split_to(&mut self, at: usize) -> BytePages {
200 let mut pages = BytePages::new(self.page_size());
201 self.split_into(at, &mut pages);
202 pages
203 }
204
205 pub fn split_into(&mut self, mut at: usize, to: &mut BytePages) {
213 {
214 let pages = self.pages_mut();
215
216 while let Some(mut page) = pages.pop_front() {
217 let len = cmp::min(page.len(), at);
218 to.append(page.split_to(len));
219
220 if !page.is_empty() {
221 pages.push_front(page);
222 return;
223 }
224 at -= len;
225 }
226 }
227 if at > 0
228 && let Some(mut page) = self.take()
229 {
230 let len = cmp::min(page.len(), at);
231 to.append(page.split_to(len));
232 self.append(page);
233 }
234 }
235
236 #[inline]
238 pub fn clear(&mut self) {
239 while self.take().is_some() {}
240 }
241
242 #[inline]
244 #[must_use]
245 pub fn freeze(&mut self) -> Bytes {
246 let pages = self.num_pages();
247 if pages == 0 || self.is_empty() {
248 Bytes::new()
249 } else if pages == 1 {
250 self.take().unwrap().freeze()
251 } else {
252 let mut buf = BytesMut::with_capacity(self.len());
253 while let Some(p) = self.take() {
254 buf.extend_from_slice(&p);
255 }
256 buf.freeze()
257 }
258 }
259
260 #[inline]
261 pub fn try_get_current_from(&mut self, pages: &mut BytePages) {
262 if self.pages().is_empty() && self.current.len() == 0 && pages.current.len() != 0 {
263 self.current =
264 mem::replace(&mut pages.current, StorageVec::sized(self.page_size()));
265 }
266 }
267
268 pub fn with_bytes_mut<F, R>(&mut self, f: F) -> R
270 where
271 F: FnOnce(&mut BytesMut) -> R,
272 {
273 let cap = self.current.capacity();
274 let mut buf = BytesMut {
275 storage: StorageVec(self.current.0),
276 };
277
278 let res = f(&mut buf);
279
280 self.current.0 = buf.storage.0;
282 if buf.capacity() != cap {
283 buf.storage.unsize();
284 }
285 mem::forget(buf);
287
288 if self.current.len() >= self.page_size().capacity() {
290 let storage = StorageVec::sized(self.page_size());
291 let page = BytePage::from(mem::replace(&mut self.current, storage));
292 self.pages_mut().push_back(page);
293 }
294
295 res
296 }
297}
298
299impl Drop for BytePages {
300 fn drop(&mut self) {
301 CACHE.with(move |c| {
302 let mut cache = c.take().unwrap();
303 if cache.len() < CACHE_SIZE {
304 let mut st = self.st.take().unwrap();
305 st.pages.clear();
306 cache.push(st);
307 }
308 c.set(Some(cache));
309 });
310 }
311}
312
313impl fmt::Debug for BytePages {
314 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
315 let mut f = fmt.debug_tuple("BytePages");
316 for p in self.pages() {
317 f.field(p);
318 }
319 if self.current.len() != 0 {
320 f.field(&crate::debug::BsDebug(self.current.as_ref()));
321 }
322 f.finish()
323 }
324}
325
326impl Default for BytePages {
327 fn default() -> Self {
328 BytePages::new(BytePageSize::Size16)
329 }
330}
331
332impl BufMut for BytePages {
333 #[inline]
334 fn remaining_mut(&self) -> usize {
335 self.current.remaining()
336 }
337
338 #[inline]
339 unsafe fn advance_mut(&mut self, cnt: usize) {
340 self.current.set_len(self.current.len() + cnt);
342 }
343
344 #[inline]
345 fn chunk_mut(&mut self) -> &mut UninitSlice {
346 unsafe {
347 let ptr = &mut self.current.as_ptr();
349 UninitSlice::from_raw_parts_mut(
350 ptr.add(self.current.len()),
351 self.remaining_mut(),
352 )
353 }
354 }
355
356 fn put_slice(&mut self, mut src: &[u8]) {
357 while !src.is_empty() {
358 let amount = cmp::min(src.len(), self.current.remaining());
359 unsafe {
360 ptr::copy_nonoverlapping(
361 src.as_ptr(),
362 self.chunk_mut().as_mut_ptr(),
363 amount,
364 );
365 self.advance_mut(amount);
366 }
367 src = &src[amount..];
368
369 if self.current.is_full() {
371 let storage = StorageVec::sized(self.page_size());
372 let page = BytePage::from(mem::replace(&mut self.current, storage));
373 self.pages_mut().push_back(page);
374 }
375 }
376 }
377
378 #[inline]
379 fn put_u8(&mut self, n: u8) {
380 self.current.put_u8(n);
381 if self.current.is_full() {
382 let storage = StorageVec::sized(self.page_size());
383 let page: BytePage = From::from(mem::replace(&mut self.current, storage));
384 self.pages_mut().push_back(page);
385 }
386 }
387
388 #[inline]
389 fn put_i8(&mut self, n: i8) {
390 self.put_u8(n as u8);
391 }
392}
393
394impl Clone for BytePages {
395 fn clone(&self) -> Self {
396 let size = self.page_size();
397 let mut pages = BytePages::new(size);
398 self.copy_to(&mut pages);
399 pages
400 }
401}
402
403impl io::Write for BytePages {
404 fn write(&mut self, src: &[u8]) -> Result<usize, io::Error> {
405 self.put_slice(src);
406 Ok(src.len())
407 }
408
409 fn flush(&mut self) -> Result<(), io::Error> {
410 Ok(())
411 }
412}
413
414impl From<BytePages> for Bytes {
415 fn from(pages: BytePages) -> Bytes {
416 BytesMut::from(pages).freeze()
417 }
418}
419
420impl From<BytePages> for BytesMut {
421 fn from(mut pages: BytePages) -> BytesMut {
422 let mut buf = BytesMut::with_capacity(pages.len());
423 while let Some(p) = pages.take() {
424 buf.extend_from_slice(&p);
425 }
426 buf
427 }
428}
429
430pub struct BytePage {
431 inner: StorageType,
432}
433
434enum StorageType {
435 Bytes(Bytes),
436 Storage(StorageVec),
437 Vec(Vec<u8>),
438}
439
440impl BytePage {
441 #[inline]
442 pub fn len(&self) -> usize {
444 match &self.inner {
445 StorageType::Bytes(b) => b.len(),
446 StorageType::Storage(b) => b.len(),
447 StorageType::Vec(b) => b.len(),
448 }
449 }
450
451 #[inline]
452 pub fn is_empty(&self) -> bool {
454 match &self.inner {
455 StorageType::Bytes(b) => b.is_empty(),
456 StorageType::Storage(b) => b.len() == 0,
457 StorageType::Vec(b) => b.is_empty(),
458 }
459 }
460
461 pub fn as_ptr(&self) -> *const u8 {
463 unsafe {
464 match &self.inner {
465 StorageType::Bytes(b) => b.storage.as_ptr(),
466 StorageType::Storage(b) => b.as_ptr(),
467 StorageType::Vec(b) => b.as_ptr(),
468 }
469 }
470 }
471
472 #[must_use]
480 pub fn split_to(&mut self, at: usize) -> BytePage {
481 match &mut self.inner {
482 StorageType::Bytes(b) => {
483 let buf = b.split_to(cmp::min(at, b.len()));
484 BytePage {
485 inner: StorageType::Bytes(buf),
486 }
487 }
488 StorageType::Storage(_) => {
489 let inner = mem::replace(&mut self.inner, StorageType::Bytes(Bytes::new()));
490 if let StorageType::Storage(st) = inner {
491 self.inner = StorageType::Bytes(Bytes {
492 storage: st.freeze(),
493 });
494 self.split_to(at)
495 } else {
496 unreachable!()
497 }
498 }
499 StorageType::Vec(_) => {
500 let inner = mem::replace(&mut self.inner, StorageType::Bytes(Bytes::new()));
501 if let StorageType::Vec(b) = inner {
502 self.inner = StorageType::Bytes(Bytes::copy_from_slice(&b));
503 self.split_to(at)
504 } else {
505 unreachable!()
506 }
507 }
508 }
509 }
510
511 #[inline]
520 pub fn advance_to(&mut self, cnt: usize) {
521 match &mut self.inner {
522 StorageType::Bytes(b) => b.advance_to(cnt),
523 StorageType::Storage(b) => unsafe { b.set_start(cnt as u32) },
524 StorageType::Vec(b) => {
525 self.inner = StorageType::Bytes(Bytes::copy_from_slice(&b[cnt..]));
526 }
527 }
528 }
529
530 #[inline]
532 #[must_use]
533 pub fn freeze(self) -> Bytes {
534 match self.inner {
535 StorageType::Bytes(b) => b,
536 StorageType::Storage(st) => Bytes {
537 storage: st.freeze(),
538 },
539 StorageType::Vec(v) => Bytes::from(v),
540 }
541 }
542
543 fn into_storage(self) -> Result<StorageVec, Self> {
544 if let StorageType::Storage(mut st) = self.inner {
545 if !st.is_full() && st.is_unique() {
547 Ok(st)
548 } else {
549 Err(Self {
550 inner: StorageType::Storage(st),
551 })
552 }
553 } else {
554 Err(self)
555 }
556 }
557}
558
559impl Clone for BytePage {
560 fn clone(&self) -> Self {
561 let inner = match &self.inner {
562 StorageType::Bytes(b) => StorageType::Bytes(b.clone()),
563 StorageType::Storage(st) => {
564 StorageType::Storage(unsafe { st.clone() })
567 }
568 StorageType::Vec(b) => StorageType::Bytes(Bytes::copy_from_slice(b)),
569 };
570
571 Self { inner }
572 }
573}
574
575impl AsRef<[u8]> for BytePage {
576 #[inline]
577 fn as_ref(&self) -> &[u8] {
578 match &self.inner {
579 StorageType::Bytes(b) => b.as_ref(),
580 StorageType::Storage(b) => b.as_ref(),
581 StorageType::Vec(b) => b.as_ref(),
582 }
583 }
584}
585
586impl Borrow<[u8]> for BytePage {
587 #[inline]
588 fn borrow(&self) -> &[u8] {
589 self.as_ref()
590 }
591}
592
593impl From<Bytes> for BytePage {
594 fn from(buf: Bytes) -> Self {
595 BytePage {
596 inner: StorageType::Bytes(buf),
597 }
598 }
599}
600
601impl<'a> From<&'a Bytes> for BytePage {
602 fn from(buf: &'a Bytes) -> Self {
603 BytePage {
604 inner: StorageType::Bytes(buf.clone()),
605 }
606 }
607}
608
609impl From<BytesMut> for BytePage {
610 fn from(buf: BytesMut) -> Self {
611 BytePage {
612 inner: StorageType::Storage(buf.storage),
613 }
614 }
615}
616
617impl From<ByteString> for BytePage {
618 fn from(s: ByteString) -> Self {
619 s.into_bytes().into()
620 }
621}
622
623impl<'a> From<&'a ByteString> for BytePage {
624 fn from(s: &'a ByteString) -> Self {
625 s.clone().into_bytes().into()
626 }
627}
628
629impl From<StorageVec> for BytePage {
630 fn from(buf: StorageVec) -> Self {
631 BytePage {
632 inner: StorageType::Storage(buf),
633 }
634 }
635}
636
637impl From<Vec<u8>> for BytePage {
638 fn from(buf: Vec<u8>) -> Self {
639 BytePage {
640 inner: StorageType::Vec(buf),
641 }
642 }
643}
644
645impl From<&'static str> for BytePage {
646 fn from(buf: &'static str) -> Self {
647 BytePage::from(Bytes::from_static(buf.as_bytes()))
648 }
649}
650
651impl From<&'static [u8]> for BytePage {
652 fn from(buf: &'static [u8]) -> Self {
653 BytePage::from(Bytes::from_static(buf))
654 }
655}
656
657impl<const N: usize> From<&'static [u8; N]> for BytePage {
658 fn from(src: &'static [u8; N]) -> Self {
659 BytePage::from(Bytes::from_static(src))
660 }
661}
662
663impl From<BytePage> for Bytes {
664 fn from(page: BytePage) -> Self {
665 match page.inner {
666 StorageType::Bytes(b) => b,
667 StorageType::Storage(storage) => BytesMut { storage }.freeze(),
668 StorageType::Vec(v) => Bytes::copy_from_slice(&v),
669 }
670 }
671}
672
673impl From<BytePage> for BytesMut {
674 fn from(page: BytePage) -> Self {
675 match page.inner {
676 StorageType::Bytes(b) => b.into(),
677 StorageType::Storage(storage) => BytesMut { storage },
678 StorageType::Vec(v) => BytesMut::copy_from_slice(&v),
679 }
680 }
681}
682
683impl PartialEq for BytePage {
684 fn eq(&self, other: &BytePage) -> bool {
685 self.as_ref() == other.as_ref()
686 }
687}
688
689impl<'a> PartialEq<&'a [u8]> for BytePage {
690 fn eq(&self, other: &&'a [u8]) -> bool {
691 self.as_ref() == *other
692 }
693}
694
695impl<'a, const N: usize> PartialEq<&'a [u8; N]> for BytePage {
696 fn eq(&self, other: &&'a [u8; N]) -> bool {
697 self.as_ref() == other.as_ref()
698 }
699}
700
701impl io::Read for BytePage {
702 fn read(&mut self, dst: &mut [u8]) -> io::Result<usize> {
703 let len = cmp::min(self.len(), dst.len());
704 if len > 0 {
705 dst[..len].copy_from_slice(&self[..len]);
706 self.advance_to(len);
707 }
708 Ok(len)
709 }
710}
711
712impl ops::Deref for BytePage {
713 type Target = [u8];
714
715 #[inline]
716 fn deref(&self) -> &[u8] {
717 self.as_ref()
718 }
719}
720
721impl fmt::Debug for BytePage {
722 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
723 fmt::Debug::fmt(&crate::debug::BsDebug(self.as_ref()), fmt)
724 }
725}
726
727#[cfg(test)]
728mod tests {
729 use rand::Rng;
730
731 use super::*;
732
733 #[test]
734 fn pages() {
735 let mut pages = BytePages::new(BytePageSize::Size8);
737 assert!(pages.is_empty());
738 assert_eq!(pages.len(), 0);
739 assert_eq!(pages.num_pages(), 0);
740 pages.extend_from_slice(b"b");
741 assert_eq!(pages.len(), 1);
742 assert_eq!(pages.num_pages(), 1);
743 pages.extend_from_slice("a".repeat(9 * 1024).as_bytes());
744 assert_eq!(pages.len(), 9217);
745 assert_eq!(pages.num_pages(), 2);
746 assert!(!pages.is_empty());
747
748 let mut pgs = BytePages::new(BytePageSize::Size8);
749 pgs.put_i8(b'a' as i8);
750 let p = pgs.take().unwrap();
751 assert_eq!(p.len(), 1);
752 assert_eq!(p.as_ref(), b"a");
753
754 pgs.extend_from_slice("a".repeat(8 * 1024 - 1).as_bytes());
755 assert_eq!(pgs.num_pages(), 1);
756 pgs.put_u8(b'a');
757 assert_eq!(pgs.num_pages(), 1);
758 assert_eq!(pgs.current.len(), 0);
759
760 pgs.put_u8(b'a');
761 assert_eq!(pgs.num_pages(), 2);
762
763 pgs.append(Bytes::copy_from_slice("a".repeat(8 * 1024).as_bytes()));
764 assert_eq!(pgs.num_pages(), 3);
765 assert_eq!(pgs.current.len(), 0);
766
767 let p = pages.take().unwrap();
769 assert_eq!(p.len(), 8192);
770 let p = pages.take().unwrap();
771 assert_eq!(p.len(), 1025);
772 assert!(!p.is_empty());
773 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
774 assert_eq!(p.as_ref(), "a".repeat(1025).as_bytes());
775 assert!(pages.take().is_none());
776
777 let p = BytePage::from(Bytes::copy_from_slice(b"123"));
778 assert_eq!(p.len(), 3);
779 assert!(!p.is_empty());
780 assert_eq!(p.as_ref(), b"123");
781 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
782
783 let p = BytePage::from(&b"123"[..]);
784 assert_eq!(p.len(), 3);
785 assert!(!p.is_empty());
786 assert_eq!(p.as_ref(), b"123");
787 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
788
789 let p = BytePage::from(b"123");
790 assert_eq!(p.len(), 3);
791 assert!(!p.is_empty());
792 assert_eq!(p.as_ref(), b"123");
793 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
794
795 let p = BytePage::from("123");
796 assert_eq!(p.len(), 3);
797 assert!(!p.is_empty());
798 assert_eq!(p.as_ref(), b"123");
799 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
800 assert_eq!(p.freeze(), b"123");
801
802 let p = BytePage::from(vec![b'1', b'2', b'3']);
803 assert_eq!(p.len(), 3);
804 assert!(!p.is_empty());
805 assert_eq!(p.as_ref(), b"123");
806 assert_eq!(p.as_ref().as_ptr(), p.as_ptr());
807 assert_eq!(p.freeze(), b"123");
808
809 let mut p = BytePage::from(vec![b'1', b'2', b'3']);
810 p.advance_to(1);
811 assert_eq!(p.len(), 2);
812 assert!(!p.is_empty());
813 assert_eq!(p.as_ref(), b"23");
814
815 let mut pages = BytePages::new(BytePageSize::Size8);
817 pages.extend_from_slice(b"b");
818 assert_eq!(format!("{pages:?}"), "BytePages(b\"b\")");
819 let p = pages.take().unwrap();
820 assert_eq!(p.as_ref(), b"b");
821
822 let mut pages = BytePages::new(BytePageSize::Size8);
823 pages.extend_from_slice(b"a");
824 pages.append(Bytes::copy_from_slice(b"123"));
825 pages.pages_mut().push_back(p);
826 assert_eq!(format!("{pages:?}"), "BytePages(b\"b\", b\"a123\")");
827
828 assert_eq!(pages.len(), 5);
829 pages.clear();
830 assert_eq!(pages.len(), 0);
831 }
832
833 #[test]
834 fn pages_copy_to() {
835 let mut pages = BytePages::default();
836 let mut pages2 = BytePages::default();
837 pages.put_slice(b"456");
838 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
839 pages.copy_to(&mut pages2);
840 let p = pages.freeze();
841 assert_eq!(p, b"123456");
842 let p2 = pages2.freeze();
843 assert_eq!(p2, b"123456");
844
845 let mut pages = BytePages::default();
846 let mut pages2 = BytePages::default();
847 pages.put_slice(b"456");
848 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
849 pages.copy_to(&mut pages2);
850 pages.put_u8(b'7');
851 let p = pages.freeze();
852 assert_eq!(p, b"1234567");
853 let p2 = pages2.freeze();
854 assert_eq!(p2, b"123456");
855
856 let mut pages = BytePages::default();
857 pages.put_slice(b"456");
858 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
859 let mut pages2 = pages.clone();
860 pages.put_u8(b'7');
861 let p = pages.freeze();
862 assert_eq!(p, b"1234567");
863 let p2 = pages2.freeze();
864 assert_eq!(p2, b"123456");
865 }
866
867 #[test]
868 fn pages_methods() {
869 let mut pages = BytePages::default();
871 pages.put_slice(b"456");
872 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
873 let mut pages2 = pages.split_to(1);
874 let p = pages.freeze();
875 assert_eq!(p, b"23456");
876 let p2 = pages2.freeze();
877 assert_eq!(p2, b"1");
878
879 let mut pages = BytePages::default();
880 pages.put_slice(b"456");
881 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
882 let mut pages2 = pages.split_to(4);
883 let p = pages.freeze();
884 assert_eq!(p, b"56");
885 let p2 = pages2.freeze();
886 assert_eq!(p2, b"1234");
887
888 let mut pages = BytePages::default();
890 pages.put_slice(b"456");
891 pages.prepend(BytePage::from(Bytes::copy_from_slice(b"123")));
892 let mut pages2 = BytePages::default();
893 pages.split_into(1, &mut pages2);
894 let p = pages.freeze();
895 assert_eq!(p, b"23456");
896 let p2 = pages2.freeze();
897 assert_eq!(p2, b"1");
898
899 let mut pages = BytePages::default();
901 pages.with_bytes_mut(|buf| buf.extend_from_slice(b"123"));
902 assert_eq!(pages.len(), 3);
903 let p = pages.freeze();
904 assert_eq!(p, b"123");
905
906 let data = rand::rng()
907 .sample_iter(&rand::distr::Alphanumeric)
908 .take(65_536)
909 .map(char::from)
910 .collect::<String>();
911
912 let mut pages = BytePages::default();
913 pages.with_bytes_mut(|buf| buf.extend_from_slice(data.as_bytes()));
914 assert_eq!(pages.len(), 65_536);
915 let p = pages.freeze();
916 assert_eq!(p, data.as_bytes());
917 }
918
919 #[test]
920 fn page_clone() {
921 let p = BytePage::from(Bytes::copy_from_slice(b"123"));
923 let p2 = p.clone();
924 assert_eq!(p, p2);
925
926 let mut p = BytePage::from(BytesMut::copy_from_slice(b"123"));
928 if let StorageType::Storage(ref mut st) = p.inner {
929 assert!(st.is_unique());
930 } else {
931 panic!()
932 }
933 let p2 = p.clone();
934 assert_eq!(p, p2);
935 if let StorageType::Storage(mut st) = p.inner {
936 assert!(!st.is_unique());
937 } else {
938 panic!()
939 }
940
941 let p = BytePage::from(vec![b'1', b'2', b'3']);
943 let p2 = p.clone();
944 assert_eq!(p, p2);
945 if let StorageType::Bytes(_) = p2.inner {
946 } else {
947 panic!()
948 }
949 }
950
951 #[test]
952 fn page_split_to() {
953 let mut p = BytePage::from(Bytes::copy_from_slice(b"123"));
955 let p2 = p.split_to(1);
956 assert_eq!(p, b"23");
957 assert_eq!(p2, b"1");
958
959 let mut p = BytePage::from(BytesMut::copy_from_slice(b"123"));
961 let p2 = p.split_to(1);
962 assert_eq!(p, b"23");
963 assert_eq!(p2, b"1");
964
965 let mut p = BytePage::from(vec![b'1', b'2', b'3']);
967 let p2 = p.split_to(1);
968 assert_eq!(p, b"23");
969 assert_eq!(p2, b"1");
970 }
971
972 #[test]
973 fn page_read() {
974 use std::io::Read;
975
976 let mut page = BytePage::from(Bytes::copy_from_slice(b"123"));
977
978 let mut buf = [0; 10];
979 assert_eq!(page.read(&mut buf).unwrap(), 3);
980 assert_eq!(page.len(), 0);
981 assert_eq!(buf, [49, 50, 51, 0, 0, 0, 0, 0, 0, 0]);
982 }
983}