1use std::{borrow, cmp, fmt, hash, io, mem, ops};
2
3use crate::{Buf, BytesMut, buf::IntoIter, debug, storage::INLINE_CAP, storage::Storage};
4
5pub struct Bytes {
98 pub(crate) storage: Storage,
99}
100
101impl Bytes {
108 #[inline]
121 pub const fn new() -> Bytes {
122 Bytes {
123 storage: Storage::empty(),
124 }
125 }
126
127 #[inline]
141 #[must_use]
142 pub const fn from_static(bytes: &'static [u8]) -> Bytes {
143 Bytes {
144 storage: Storage::from_static(bytes),
145 }
146 }
147
148 #[inline]
159 pub fn len(&self) -> usize {
160 self.storage.len()
161 }
162
163 #[inline]
174 pub fn is_empty(&self) -> bool {
175 self.storage.is_empty()
176 }
177
178 pub fn is_inline(&self) -> bool {
189 self.storage.is_inline()
190 }
191
192 #[must_use]
196 pub fn copy_from_slice(data: &[u8]) -> Self {
197 Bytes {
198 storage: Storage::from_slice(data),
199 }
200 }
201
202 #[must_use]
227 pub fn slice(&self, range: impl ops::RangeBounds<usize>) -> Bytes {
228 self.slice_checked(range)
229 .expect("Requires that `begin <= end` and `end <= self.len()`")
230 }
231
232 #[must_use]
236 pub fn slice_checked(&self, range: impl ops::RangeBounds<usize>) -> Option<Bytes> {
237 use std::ops::Bound;
238
239 let len = self.len();
240
241 let begin = match range.start_bound() {
242 Bound::Included(&n) => n,
243 Bound::Excluded(&n) => n + 1,
244 Bound::Unbounded => 0,
245 };
246
247 let end = match range.end_bound() {
248 Bound::Included(&n) => n + 1,
249 Bound::Excluded(&n) => n,
250 Bound::Unbounded => len,
251 };
252
253 if begin <= end && end <= len {
254 if end - begin <= INLINE_CAP {
255 Some(Bytes {
256 storage: Storage::from_slice(&self[begin..end]),
257 })
258 } else {
259 let mut ret = self.clone();
260 unsafe {
261 ret.storage.set_end(end);
262 ret.storage.set_start(begin);
263 }
264 Some(ret)
265 }
266 } else {
267 None
268 }
269 }
270
271 #[must_use]
297 pub fn slice_ref(&self, subset: &[u8]) -> Bytes {
298 self.slice_ref_checked(subset)
299 .expect("Given `sub` slice is not contained within the `Bytes` buffer")
300 }
301
302 #[must_use]
304 pub fn slice_ref_checked(&self, subset: &[u8]) -> Option<Bytes> {
305 let bytes_p = self.as_ptr() as usize;
306 let bytes_len = self.len();
307
308 let sub_p = subset.as_ptr() as usize;
309 let sub_len = subset.len();
310
311 if sub_p >= bytes_p && sub_p + sub_len <= bytes_p + bytes_len {
312 let sub_offset = sub_p - bytes_p;
313 Some(self.slice(sub_offset..(sub_offset + sub_len)))
314 } else {
315 None
316 }
317 }
318
319 #[must_use]
343 pub fn split_off(&mut self, at: usize) -> Bytes {
344 self.split_off_checked(at)
345 .expect("at value must be <= self.len()`")
346 }
347
348 #[must_use]
352 pub fn split_off_checked(&mut self, at: usize) -> Option<Bytes> {
353 if at <= self.len() {
354 if at == self.len() {
355 Some(Bytes::new())
356 } else if at == 0 {
357 Some(mem::take(self))
358 } else {
359 Some(Bytes {
360 storage: self.storage.split_off(at, true),
361 })
362 }
363 } else {
364 None
365 }
366 }
367
368 #[must_use]
392 pub fn split_to(&mut self, at: usize) -> Bytes {
393 self.split_to_checked(at)
394 .expect("at value must be <= self.len()`")
395 }
396
397 #[must_use]
401 pub fn split_to_checked(&mut self, at: usize) -> Option<Bytes> {
402 if at <= self.len() {
403 if at == self.len() {
404 Some(mem::take(self))
405 } else if at == 0 {
406 Some(Bytes::new())
407 } else {
408 Some(Bytes {
409 storage: self.storage.split_to(at),
410 })
411 }
412 } else {
413 None
414 }
415 }
416
417 #[inline]
437 pub fn advance_to(&mut self, cnt: usize) {
438 unsafe {
439 self.storage.set_start(cnt);
440 }
441 }
442
443 #[inline]
464 pub fn truncate(&mut self, len: usize) {
465 self.storage.truncate(len);
466 }
467
468 #[inline]
482 pub fn trimdown(&mut self) {
483 self.storage.trimdown();
484 }
485
486 #[inline]
498 pub fn clear(&mut self) {
499 self.storage = Storage::empty();
500 }
501
502 pub fn iter(&'_ self) -> std::slice::Iter<'_, u8> {
518 self.chunk().iter()
519 }
520
521 #[inline]
522 #[doc(hidden)]
523 pub fn info(&self) -> crate::info::Info {
524 self.storage.info()
525 }
526}
527
528impl Buf for Bytes {
529 #[inline]
530 fn remaining(&self) -> usize {
531 self.len()
532 }
533
534 #[inline]
535 fn chunk(&self) -> &[u8] {
536 self.storage.as_ref()
537 }
538
539 #[inline]
540 fn advance(&mut self, cnt: usize) {
541 self.advance_to(cnt);
542 }
543
544 #[inline]
545 fn get_u8(&mut self) -> u8 {
546 self.storage.get_u8()
547 }
548}
549
550impl bytes::buf::Buf for Bytes {
551 #[inline]
552 fn remaining(&self) -> usize {
553 self.len()
554 }
555
556 #[inline]
557 fn chunk(&self) -> &[u8] {
558 self.storage.as_ref()
559 }
560
561 #[inline]
562 fn advance(&mut self, cnt: usize) {
563 self.advance_to(cnt);
564 }
565
566 #[inline]
567 fn get_u8(&mut self) -> u8 {
568 self.storage.get_u8()
569 }
570}
571
572impl Clone for Bytes {
573 fn clone(&self) -> Bytes {
574 Bytes {
575 storage: self.storage.clone(),
576 }
577 }
578}
579
580impl AsRef<[u8]> for Bytes {
581 #[inline]
582 fn as_ref(&self) -> &[u8] {
583 self.storage.as_ref()
584 }
585}
586
587impl ops::Deref for Bytes {
588 type Target = [u8];
589
590 #[inline]
591 fn deref(&self) -> &[u8] {
592 self.storage.as_ref()
593 }
594}
595
596impl From<&Bytes> for Bytes {
597 fn from(src: &Bytes) -> Bytes {
598 src.clone()
599 }
600}
601
602impl From<Vec<u8>> for Bytes {
603 fn from(src: Vec<u8>) -> Bytes {
605 Bytes {
606 storage: Storage::from_slice(&src),
607 }
608 }
609}
610
611impl From<String> for Bytes {
612 fn from(src: String) -> Bytes {
613 Bytes {
614 storage: Storage::from_slice(src.as_bytes()),
615 }
616 }
617}
618
619impl From<&'static [u8]> for Bytes {
620 fn from(src: &'static [u8]) -> Bytes {
621 Bytes::from_static(src)
622 }
623}
624
625impl From<&'static str> for Bytes {
626 fn from(src: &'static str) -> Bytes {
627 Bytes::from_static(src.as_bytes())
628 }
629}
630
631impl<'a, const N: usize> From<&'a [u8; N]> for Bytes {
632 fn from(src: &'a [u8; N]) -> Bytes {
633 Bytes::copy_from_slice(src)
634 }
635}
636
637impl FromIterator<u8> for Bytes {
638 fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
639 BytesMut::from_iter(into_iter).freeze()
640 }
641}
642
643impl<'a> FromIterator<&'a u8> for Bytes {
644 fn from_iter<T: IntoIterator<Item = &'a u8>>(into_iter: T) -> Self {
645 BytesMut::from_iter(into_iter).freeze()
646 }
647}
648
649impl Eq for Bytes {}
650
651impl PartialEq for Bytes {
652 fn eq(&self, other: &Bytes) -> bool {
653 self.storage.as_ref() == other.storage.as_ref()
654 }
655}
656
657impl PartialOrd for Bytes {
658 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
659 Some(self.cmp(other))
660 }
661}
662
663impl Ord for Bytes {
664 fn cmp(&self, other: &Bytes) -> cmp::Ordering {
665 self.storage.as_ref().cmp(other.storage.as_ref())
666 }
667}
668
669impl Default for Bytes {
670 #[inline]
671 fn default() -> Bytes {
672 Bytes::new()
673 }
674}
675
676impl io::Read for Bytes {
677 fn read(&mut self, dst: &mut [u8]) -> io::Result<usize> {
678 let len = cmp::min(self.len(), dst.len());
679 if len > 0 {
680 dst[..len].copy_from_slice(&self[..len]);
681 self.advance_to(len);
682 }
683 Ok(len)
684 }
685}
686
687impl fmt::Debug for Bytes {
688 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
689 fmt::Debug::fmt(&debug::BsDebug(self.storage.as_ref()), fmt)
690 }
691}
692
693impl hash::Hash for Bytes {
694 fn hash<H>(&self, state: &mut H)
695 where
696 H: hash::Hasher,
697 {
698 let s: &[u8] = self.as_ref();
699 s.hash(state);
700 }
701}
702
703impl borrow::Borrow<[u8]> for Bytes {
704 fn borrow(&self) -> &[u8] {
705 self.as_ref()
706 }
707}
708
709impl IntoIterator for Bytes {
710 type Item = u8;
711 type IntoIter = IntoIter<Bytes>;
712
713 fn into_iter(self) -> Self::IntoIter {
714 IntoIter::new(self)
715 }
716}
717
718impl<'a> IntoIterator for &'a Bytes {
719 type Item = &'a u8;
720 type IntoIter = std::slice::Iter<'a, u8>;
721
722 fn into_iter(self) -> Self::IntoIter {
723 self.as_ref().iter()
724 }
725}
726
727impl PartialEq<[u8]> for Bytes {
734 fn eq(&self, other: &[u8]) -> bool {
735 self.storage.as_ref() == other
736 }
737}
738
739impl<const N: usize> PartialEq<[u8; N]> for Bytes {
740 fn eq(&self, other: &[u8; N]) -> bool {
741 self.storage.as_ref() == other.as_ref()
742 }
743}
744
745impl PartialOrd<[u8]> for Bytes {
746 fn partial_cmp(&self, other: &[u8]) -> Option<cmp::Ordering> {
747 self.storage.as_ref().partial_cmp(other)
748 }
749}
750
751impl<const N: usize> PartialOrd<[u8; N]> for Bytes {
752 fn partial_cmp(&self, other: &[u8; N]) -> Option<cmp::Ordering> {
753 self.storage.as_ref().partial_cmp(other.as_ref())
754 }
755}
756
757impl PartialEq<Bytes> for [u8] {
758 fn eq(&self, other: &Bytes) -> bool {
759 *other == *self
760 }
761}
762
763impl<const N: usize> PartialEq<Bytes> for [u8; N] {
764 fn eq(&self, other: &Bytes) -> bool {
765 *other == *self
766 }
767}
768
769impl<const N: usize> PartialEq<Bytes> for &[u8; N] {
770 fn eq(&self, other: &Bytes) -> bool {
771 *other == *self
772 }
773}
774
775impl PartialOrd<Bytes> for [u8] {
776 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
777 other.partial_cmp(self)
778 }
779}
780
781impl<const N: usize> PartialOrd<Bytes> for [u8; N] {
782 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
783 other.partial_cmp(self)
784 }
785}
786
787impl PartialEq<str> for Bytes {
788 fn eq(&self, other: &str) -> bool {
789 self.storage.as_ref() == other.as_bytes()
790 }
791}
792
793impl PartialOrd<str> for Bytes {
794 fn partial_cmp(&self, other: &str) -> Option<cmp::Ordering> {
795 self.storage.as_ref().partial_cmp(other.as_bytes())
796 }
797}
798
799impl PartialEq<Bytes> for str {
800 fn eq(&self, other: &Bytes) -> bool {
801 *other == *self
802 }
803}
804
805impl PartialOrd<Bytes> for str {
806 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
807 other.partial_cmp(self)
808 }
809}
810
811impl PartialEq<Vec<u8>> for Bytes {
812 fn eq(&self, other: &Vec<u8>) -> bool {
813 *self == other[..]
814 }
815}
816
817impl PartialOrd<Vec<u8>> for Bytes {
818 fn partial_cmp(&self, other: &Vec<u8>) -> Option<cmp::Ordering> {
819 self.storage.as_ref().partial_cmp(&other[..])
820 }
821}
822
823impl PartialEq<Bytes> for Vec<u8> {
824 fn eq(&self, other: &Bytes) -> bool {
825 *other == *self
826 }
827}
828
829impl PartialOrd<Bytes> for Vec<u8> {
830 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
831 other.partial_cmp(self)
832 }
833}
834
835impl PartialEq<String> for Bytes {
836 fn eq(&self, other: &String) -> bool {
837 *self == other[..]
838 }
839}
840
841impl PartialOrd<String> for Bytes {
842 fn partial_cmp(&self, other: &String) -> Option<cmp::Ordering> {
843 self.storage.as_ref().partial_cmp(other.as_bytes())
844 }
845}
846
847impl PartialEq<Bytes> for String {
848 fn eq(&self, other: &Bytes) -> bool {
849 *other == *self
850 }
851}
852
853impl PartialOrd<Bytes> for String {
854 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
855 other.partial_cmp(self)
856 }
857}
858
859impl PartialEq<Bytes> for &[u8] {
860 fn eq(&self, other: &Bytes) -> bool {
861 *other == *self
862 }
863}
864
865impl PartialOrd<Bytes> for &[u8] {
866 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
867 other.partial_cmp(self)
868 }
869}
870
871impl PartialEq<Bytes> for &str {
872 fn eq(&self, other: &Bytes) -> bool {
873 *other == *self
874 }
875}
876
877impl PartialOrd<Bytes> for &str {
878 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
879 other.partial_cmp(self)
880 }
881}
882
883impl<'a, T: ?Sized> PartialEq<&'a T> for Bytes
884where
885 Bytes: PartialEq<T>,
886{
887 fn eq(&self, other: &&'a T) -> bool {
888 *self == **other
889 }
890}
891
892impl<'a, T: ?Sized> PartialOrd<&'a T> for Bytes
893where
894 Bytes: PartialOrd<T>,
895{
896 fn partial_cmp(&self, other: &&'a T) -> Option<cmp::Ordering> {
897 self.partial_cmp(&**other)
898 }
899}
900
901#[cfg(test)]
902#[allow(unused_must_use)]
903mod tests {
904 use std::collections::HashMap;
905
906 use super::*;
907 use crate::BufMut;
908
909 const LONG: &[u8] = b"mary had a1 little la2mb, little lamb, little lamb, little lamb, little lamb, little lamb \
910 mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
911 mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \0";
912
913 #[test]
914 #[allow(
915 clippy::op_ref,
916 clippy::len_zero,
917 clippy::nonminimal_bool,
918 clippy::unnecessary_fallible_conversions
919 )]
920 fn bytes() {
921 let mut b = Bytes::from(LONG.to_vec());
922 b.advance_to(10);
923 assert_eq!(&b, &LONG[10..]);
924 b.advance_to(10);
925 assert_eq!(&b[..], &LONG[20..]);
926 assert_eq!(&b, &LONG[20..]);
927 b.clear();
928 assert!(b.is_inline());
929 assert!(b.is_empty());
930 assert!(b.len() == 0);
931
932 let mut b = Bytes::from(LONG);
933 b.advance_to(10);
934 assert_eq!(&b, &LONG[10..]);
935 b.advance_to(10);
936 assert_eq!(&b[..], &LONG[20..]);
937 assert_eq!(&b, &LONG[20..]);
938 b.clear();
939 assert!(b.is_empty());
940 assert!(b.len() == 0);
941
942 let mut b = Bytes::from(LONG);
943 b.split_off(10);
944 assert_eq!(&b, &LONG[..10]);
945 b.advance_to(5);
946 assert_eq!(&b, &LONG[5..10]);
947
948 let mut b = Bytes::copy_from_slice(&LONG[..15]);
949 assert!(b.is_inline());
950 b.split_off(10);
951 assert_eq!(&b, &LONG[..10]);
952 b.advance_to(1);
953 assert_eq!(&b, &LONG[1..10]);
954
955 let mut b = Bytes::from(b"123");
956 assert!(&b"12"[..] > &b);
957 assert!("123" == &b);
958 assert!("12" > &b);
959 assert!("12" > b);
960 assert_eq!(b.get_u8(), b'1');
961 assert_eq!("23", &b);
962
963 let mut b = Bytes::from(&Bytes::from(LONG));
964 assert_eq!(b, LONG);
965 assert_eq!(b.get_u8(), LONG[0]);
966 assert_eq!(b.get_u8(), LONG[1]);
967 assert_eq!(b.len(), LONG.len() - 2);
968
969 let b = Bytes::from(BytesMut::from(LONG));
970 assert_eq!(b, LONG);
971
972 let mut b: Bytes = BytesMut::try_from(b).unwrap().freeze();
973 assert_eq!(b, LONG);
974 assert!(!(b > b));
975 assert_eq!(<Bytes as Buf>::remaining(&b), LONG.len());
976 assert_eq!(<Bytes as Buf>::chunk(&b), LONG);
977 <Bytes as Buf>::advance(&mut b, 10);
978 assert_eq!(Buf::chunk(&b), &LONG[10..]);
979 <Bytes as Buf>::advance(&mut b, 10);
980 assert_eq!(Buf::chunk(&b), &LONG[20..]);
981
982 let mut h: HashMap<Bytes, usize> = HashMap::default();
983 h.insert(b.clone(), 1);
984 assert_eq!(h.get(&b), Some(&1));
985
986 let mut b = BytesMut::try_from(LONG).unwrap();
987 assert_eq!(b, LONG);
988 assert_eq!(<BytesMut as Buf>::remaining(&b), LONG.len());
989 assert_eq!(<BytesMut as BufMut>::remaining_mut(&b), 0);
990 assert_eq!(<BytesMut as Buf>::chunk(&b), LONG);
991 <BytesMut as Buf>::advance(&mut b, 10);
992 assert_eq!(<BytesMut as Buf>::chunk(&b), &LONG[10..]);
993
994 let mut b = BytesMut::with_capacity(12);
995 <BytesMut as BufMut>::put_i8(&mut b, 1);
996 assert_eq!(b, b"\x01".as_ref());
997 <BytesMut as BufMut>::put_u8(&mut b, 2);
998 assert_eq!(b, b"\x01\x02".as_ref());
999 <BytesMut as BufMut>::put_slice(&mut b, b"12345");
1000 assert_eq!(b, b"\x01\x0212345".as_ref());
1001 <BytesMut as BufMut>::chunk_mut(&mut b).write_byte(0, b'1');
1002 unsafe { <BytesMut as BufMut>::advance_mut(&mut b, 1) };
1003 assert_eq!(b, b"\x01\x02123451".as_ref());
1004
1005 let mut iter = Bytes::from(LONG.to_vec()).into_iter();
1006 assert_eq!(iter.next(), Some(LONG[0]));
1007 assert_eq!(iter.next(), Some(LONG[1]));
1008 assert_eq!(iter.next(), Some(LONG[2]));
1009 assert_eq!(iter.next(), Some(LONG[3]));
1010 assert_eq!(iter.get_ref(), &LONG[4..]);
1011 assert_eq!(iter.get_mut(), &LONG[4..]);
1012 let b = iter.into_inner();
1013 assert_eq!(b, &LONG[4..]);
1014
1015 let mut b = Bytes::copy_from_slice(b"123");
1016 assert!(b.is_inline());
1017 assert_eq!(b.storage.capacity(), 23);
1018 b.truncate(2);
1019 assert_eq!(b, *b"12");
1020 assert_eq!(bytes::buf::Buf::get_u8(&mut b), 49);
1021 assert_eq!(b.len(), 1);
1022 }
1023
1024 #[test]
1025 fn bytes_read() {
1026 use std::io::Read;
1027
1028 let mut b = Bytes::copy_from_slice(b"123");
1029
1030 let mut buf = [0; 10];
1031 assert_eq!(b.read(&mut buf).unwrap(), 3);
1032 assert_eq!(b.len(), 0);
1033 assert_eq!(buf, [49, 50, 51, 0, 0, 0, 0, 0, 0, 0]);
1034 }
1035}