1use std::{borrow, cmp, fmt, hash, mem, ops};
2
3use crate::{Buf, BytesMut, buf::IntoIter, debug, storage::INLINE_CAP, storage::Storage};
4
5pub struct Bytes {
98 pub(crate) storage: Storage,
99}
100
101impl Bytes {
108 #[inline]
121 pub const fn new() -> Bytes {
122 Bytes {
123 storage: Storage::empty(),
124 }
125 }
126
127 #[inline]
141 #[must_use]
142 pub const fn from_static(bytes: &'static [u8]) -> Bytes {
143 Bytes {
144 storage: Storage::from_static(bytes),
145 }
146 }
147
148 #[inline]
159 pub fn len(&self) -> usize {
160 self.storage.len()
161 }
162
163 #[inline]
174 pub fn is_empty(&self) -> bool {
175 self.storage.is_empty()
176 }
177
178 pub fn is_inline(&self) -> bool {
189 self.storage.is_inline()
190 }
191
192 #[must_use]
196 pub fn copy_from_slice(data: &[u8]) -> Self {
197 Bytes {
198 storage: Storage::from_slice(data),
199 }
200 }
201
202 #[must_use]
227 pub fn slice(&self, range: impl ops::RangeBounds<usize>) -> Bytes {
228 self.slice_checked(range)
229 .expect("Requires that `begin <= end` and `end <= self.len()`")
230 }
231
232 #[must_use]
236 pub fn slice_checked(&self, range: impl ops::RangeBounds<usize>) -> Option<Bytes> {
237 use std::ops::Bound;
238
239 let len = self.len();
240
241 let begin = match range.start_bound() {
242 Bound::Included(&n) => n,
243 Bound::Excluded(&n) => n + 1,
244 Bound::Unbounded => 0,
245 };
246
247 let end = match range.end_bound() {
248 Bound::Included(&n) => n + 1,
249 Bound::Excluded(&n) => n,
250 Bound::Unbounded => len,
251 };
252
253 if begin <= end && end <= len {
254 if end - begin <= INLINE_CAP {
255 Some(Bytes {
256 storage: Storage::from_slice(&self[begin..end]),
257 })
258 } else {
259 let mut ret = self.clone();
260 unsafe {
261 ret.storage.set_end(end);
262 ret.storage.set_start(begin);
263 }
264 Some(ret)
265 }
266 } else {
267 None
268 }
269 }
270
271 #[must_use]
297 pub fn slice_ref(&self, subset: &[u8]) -> Bytes {
298 self.slice_ref_checked(subset)
299 .expect("Given `sub` slice is not contained within the `Bytes` buffer")
300 }
301
302 #[must_use]
304 pub fn slice_ref_checked(&self, subset: &[u8]) -> Option<Bytes> {
305 let bytes_p = self.as_ptr() as usize;
306 let bytes_len = self.len();
307
308 let sub_p = subset.as_ptr() as usize;
309 let sub_len = subset.len();
310
311 if sub_p >= bytes_p && sub_p + sub_len <= bytes_p + bytes_len {
312 let sub_offset = sub_p - bytes_p;
313 Some(self.slice(sub_offset..(sub_offset + sub_len)))
314 } else {
315 None
316 }
317 }
318
319 #[must_use]
343 pub fn split_off(&mut self, at: usize) -> Bytes {
344 self.split_off_checked(at)
345 .expect("at value must be <= self.len()`")
346 }
347
348 #[must_use]
352 pub fn split_off_checked(&mut self, at: usize) -> Option<Bytes> {
353 if at <= self.len() {
354 if at == self.len() {
355 Some(Bytes::new())
356 } else if at == 0 {
357 Some(mem::take(self))
358 } else {
359 Some(Bytes {
360 storage: self.storage.split_off(at, true),
361 })
362 }
363 } else {
364 None
365 }
366 }
367
368 #[must_use]
392 pub fn split_to(&mut self, at: usize) -> Bytes {
393 self.split_to_checked(at)
394 .expect("at value must be <= self.len()`")
395 }
396
397 #[must_use]
401 pub fn split_to_checked(&mut self, at: usize) -> Option<Bytes> {
402 if at <= self.len() {
403 if at == self.len() {
404 Some(mem::take(self))
405 } else if at == 0 {
406 Some(Bytes::new())
407 } else {
408 Some(Bytes {
409 storage: self.storage.split_to(at),
410 })
411 }
412 } else {
413 None
414 }
415 }
416
417 #[inline]
437 pub fn advance_to(&mut self, cnt: usize) {
438 unsafe {
439 self.storage.set_start(cnt);
440 }
441 }
442
443 #[inline]
464 pub fn truncate(&mut self, len: usize) {
465 self.storage.truncate(len, true);
466 }
467
468 #[inline]
482 pub fn trimdown(&mut self) {
483 self.storage.trimdown();
484 }
485
486 #[inline]
498 pub fn clear(&mut self) {
499 self.storage = Storage::empty();
500 }
501
502 pub fn iter(&'_ self) -> std::slice::Iter<'_, u8> {
518 self.chunk().iter()
519 }
520
521 #[inline]
522 #[doc(hidden)]
523 pub fn info(&self) -> crate::info::Info {
524 self.storage.info()
525 }
526}
527
528impl Buf for Bytes {
529 #[inline]
530 fn remaining(&self) -> usize {
531 self.len()
532 }
533
534 #[inline]
535 fn chunk(&self) -> &[u8] {
536 self.storage.as_ref()
537 }
538
539 #[inline]
540 fn advance(&mut self, cnt: usize) {
541 self.advance_to(cnt);
542 }
543}
544
545impl bytes::buf::Buf for Bytes {
546 #[inline]
547 fn remaining(&self) -> usize {
548 self.len()
549 }
550
551 #[inline]
552 fn chunk(&self) -> &[u8] {
553 self.storage.as_ref()
554 }
555
556 #[inline]
557 fn advance(&mut self, cnt: usize) {
558 self.advance_to(cnt);
559 }
560}
561
562impl Clone for Bytes {
563 fn clone(&self) -> Bytes {
564 Bytes {
565 storage: self.storage.clone(),
566 }
567 }
568}
569
570impl AsRef<[u8]> for Bytes {
571 #[inline]
572 fn as_ref(&self) -> &[u8] {
573 self.storage.as_ref()
574 }
575}
576
577impl ops::Deref for Bytes {
578 type Target = [u8];
579
580 #[inline]
581 fn deref(&self) -> &[u8] {
582 self.storage.as_ref()
583 }
584}
585
586impl From<&Bytes> for Bytes {
587 fn from(src: &Bytes) -> Bytes {
588 src.clone()
589 }
590}
591
592impl From<Vec<u8>> for Bytes {
593 fn from(src: Vec<u8>) -> Bytes {
595 Bytes {
596 storage: Storage::from_slice(&src),
597 }
598 }
599}
600
601impl From<String> for Bytes {
602 fn from(src: String) -> Bytes {
603 Bytes {
604 storage: Storage::from_slice(src.as_bytes()),
605 }
606 }
607}
608
609impl From<&'static [u8]> for Bytes {
610 fn from(src: &'static [u8]) -> Bytes {
611 Bytes::from_static(src)
612 }
613}
614
615impl From<&'static str> for Bytes {
616 fn from(src: &'static str) -> Bytes {
617 Bytes::from_static(src.as_bytes())
618 }
619}
620
621impl<'a, const N: usize> From<&'a [u8; N]> for Bytes {
622 fn from(src: &'a [u8; N]) -> Bytes {
623 Bytes::copy_from_slice(src)
624 }
625}
626
627impl FromIterator<u8> for Bytes {
628 fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
629 BytesMut::from_iter(into_iter).freeze()
630 }
631}
632
633impl<'a> FromIterator<&'a u8> for Bytes {
634 fn from_iter<T: IntoIterator<Item = &'a u8>>(into_iter: T) -> Self {
635 BytesMut::from_iter(into_iter).freeze()
636 }
637}
638
639impl Eq for Bytes {}
640
641impl PartialEq for Bytes {
642 fn eq(&self, other: &Bytes) -> bool {
643 self.storage.as_ref() == other.storage.as_ref()
644 }
645}
646
647impl PartialOrd for Bytes {
648 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
649 Some(self.cmp(other))
650 }
651}
652
653impl Ord for Bytes {
654 fn cmp(&self, other: &Bytes) -> cmp::Ordering {
655 self.storage.as_ref().cmp(other.storage.as_ref())
656 }
657}
658
659impl Default for Bytes {
660 #[inline]
661 fn default() -> Bytes {
662 Bytes::new()
663 }
664}
665
666impl fmt::Debug for Bytes {
667 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
668 fmt::Debug::fmt(&debug::BsDebug(self.storage.as_ref()), fmt)
669 }
670}
671
672impl hash::Hash for Bytes {
673 fn hash<H>(&self, state: &mut H)
674 where
675 H: hash::Hasher,
676 {
677 let s: &[u8] = self.as_ref();
678 s.hash(state);
679 }
680}
681
682impl borrow::Borrow<[u8]> for Bytes {
683 fn borrow(&self) -> &[u8] {
684 self.as_ref()
685 }
686}
687
688impl IntoIterator for Bytes {
689 type Item = u8;
690 type IntoIter = IntoIter<Bytes>;
691
692 fn into_iter(self) -> Self::IntoIter {
693 IntoIter::new(self)
694 }
695}
696
697impl<'a> IntoIterator for &'a Bytes {
698 type Item = &'a u8;
699 type IntoIter = std::slice::Iter<'a, u8>;
700
701 fn into_iter(self) -> Self::IntoIter {
702 self.as_ref().iter()
703 }
704}
705
706impl PartialEq<[u8]> for Bytes {
713 fn eq(&self, other: &[u8]) -> bool {
714 self.storage.as_ref() == other
715 }
716}
717
718impl<const N: usize> PartialEq<[u8; N]> for Bytes {
719 fn eq(&self, other: &[u8; N]) -> bool {
720 self.storage.as_ref() == other.as_ref()
721 }
722}
723
724impl PartialOrd<[u8]> for Bytes {
725 fn partial_cmp(&self, other: &[u8]) -> Option<cmp::Ordering> {
726 self.storage.as_ref().partial_cmp(other)
727 }
728}
729
730impl<const N: usize> PartialOrd<[u8; N]> for Bytes {
731 fn partial_cmp(&self, other: &[u8; N]) -> Option<cmp::Ordering> {
732 self.storage.as_ref().partial_cmp(other.as_ref())
733 }
734}
735
736impl PartialEq<Bytes> for [u8] {
737 fn eq(&self, other: &Bytes) -> bool {
738 *other == *self
739 }
740}
741
742impl<const N: usize> PartialEq<Bytes> for [u8; N] {
743 fn eq(&self, other: &Bytes) -> bool {
744 *other == *self
745 }
746}
747
748impl<const N: usize> PartialEq<Bytes> for &[u8; N] {
749 fn eq(&self, other: &Bytes) -> bool {
750 *other == *self
751 }
752}
753
754impl PartialOrd<Bytes> for [u8] {
755 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
756 other.partial_cmp(self)
757 }
758}
759
760impl<const N: usize> PartialOrd<Bytes> for [u8; N] {
761 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
762 other.partial_cmp(self)
763 }
764}
765
766impl PartialEq<str> for Bytes {
767 fn eq(&self, other: &str) -> bool {
768 self.storage.as_ref() == other.as_bytes()
769 }
770}
771
772impl PartialOrd<str> for Bytes {
773 fn partial_cmp(&self, other: &str) -> Option<cmp::Ordering> {
774 self.storage.as_ref().partial_cmp(other.as_bytes())
775 }
776}
777
778impl PartialEq<Bytes> for str {
779 fn eq(&self, other: &Bytes) -> bool {
780 *other == *self
781 }
782}
783
784impl PartialOrd<Bytes> for str {
785 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
786 other.partial_cmp(self)
787 }
788}
789
790impl PartialEq<Vec<u8>> for Bytes {
791 fn eq(&self, other: &Vec<u8>) -> bool {
792 *self == other[..]
793 }
794}
795
796impl PartialOrd<Vec<u8>> for Bytes {
797 fn partial_cmp(&self, other: &Vec<u8>) -> Option<cmp::Ordering> {
798 self.storage.as_ref().partial_cmp(&other[..])
799 }
800}
801
802impl PartialEq<Bytes> for Vec<u8> {
803 fn eq(&self, other: &Bytes) -> bool {
804 *other == *self
805 }
806}
807
808impl PartialOrd<Bytes> for Vec<u8> {
809 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
810 other.partial_cmp(self)
811 }
812}
813
814impl PartialEq<String> for Bytes {
815 fn eq(&self, other: &String) -> bool {
816 *self == other[..]
817 }
818}
819
820impl PartialOrd<String> for Bytes {
821 fn partial_cmp(&self, other: &String) -> Option<cmp::Ordering> {
822 self.storage.as_ref().partial_cmp(other.as_bytes())
823 }
824}
825
826impl PartialEq<Bytes> for String {
827 fn eq(&self, other: &Bytes) -> bool {
828 *other == *self
829 }
830}
831
832impl PartialOrd<Bytes> for String {
833 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
834 other.partial_cmp(self)
835 }
836}
837
838impl PartialEq<Bytes> for &[u8] {
839 fn eq(&self, other: &Bytes) -> bool {
840 *other == *self
841 }
842}
843
844impl PartialOrd<Bytes> for &[u8] {
845 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
846 other.partial_cmp(self)
847 }
848}
849
850impl PartialEq<Bytes> for &str {
851 fn eq(&self, other: &Bytes) -> bool {
852 *other == *self
853 }
854}
855
856impl PartialOrd<Bytes> for &str {
857 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
858 other.partial_cmp(self)
859 }
860}
861
862impl<'a, T: ?Sized> PartialEq<&'a T> for Bytes
863where
864 Bytes: PartialEq<T>,
865{
866 fn eq(&self, other: &&'a T) -> bool {
867 *self == **other
868 }
869}
870
871impl<'a, T: ?Sized> PartialOrd<&'a T> for Bytes
872where
873 Bytes: PartialOrd<T>,
874{
875 fn partial_cmp(&self, other: &&'a T) -> Option<cmp::Ordering> {
876 self.partial_cmp(&**other)
877 }
878}
879
880#[cfg(test)]
881#[allow(unused_must_use)]
882mod tests {
883 use std::collections::HashMap;
884
885 use super::*;
886 use crate::BufMut;
887
888 const LONG: &[u8] = b"mary had a1 little la2mb, little lamb, little lamb, little lamb, little lamb, little lamb \
889 mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
890 mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \0";
891
892 #[test]
893 #[allow(
894 clippy::op_ref,
895 clippy::len_zero,
896 clippy::nonminimal_bool,
897 clippy::unnecessary_fallible_conversions
898 )]
899 fn bytes() {
900 let mut b = Bytes::from(LONG.to_vec());
901 b.advance_to(10);
902 assert_eq!(&b, &LONG[10..]);
903 b.advance_to(10);
904 assert_eq!(&b[..], &LONG[20..]);
905 assert_eq!(&b, &LONG[20..]);
906 b.clear();
907 assert!(b.is_inline());
908 assert!(b.is_empty());
909 assert!(b.len() == 0);
910
911 let mut b = Bytes::from(LONG);
912 b.advance_to(10);
913 assert_eq!(&b, &LONG[10..]);
914 b.advance_to(10);
915 assert_eq!(&b[..], &LONG[20..]);
916 assert_eq!(&b, &LONG[20..]);
917 b.clear();
918 assert!(b.is_empty());
919 assert!(b.len() == 0);
920
921 let mut b = Bytes::from(LONG);
922 b.split_off(10);
923 assert_eq!(&b, &LONG[..10]);
924 b.advance_to(5);
925 assert_eq!(&b, &LONG[5..10]);
926
927 let mut b = Bytes::copy_from_slice(&LONG[..15]);
928 assert!(b.is_inline());
929 b.split_off(10);
930 assert_eq!(&b, &LONG[..10]);
931 b.advance_to(1);
932 assert_eq!(&b, &LONG[1..10]);
933
934 let b = Bytes::from(b"123");
935 assert!(&b"12"[..] > &b);
936 assert!("123" == &b);
937 assert!("12" > &b);
938
939 let b = Bytes::from(&Bytes::from(LONG));
940 assert_eq!(b, LONG);
941
942 let b = Bytes::from(BytesMut::from(LONG));
943 assert_eq!(b, LONG);
944
945 let mut b: Bytes = BytesMut::try_from(b).unwrap().freeze();
946 assert_eq!(b, LONG);
947 assert!(!(b > b));
948 assert_eq!(<Bytes as Buf>::remaining(&b), LONG.len());
949 assert_eq!(<Bytes as Buf>::chunk(&b), LONG);
950 <Bytes as Buf>::advance(&mut b, 10);
951 assert_eq!(Buf::chunk(&b), &LONG[10..]);
952 <Bytes as Buf>::advance(&mut b, 10);
953 assert_eq!(Buf::chunk(&b), &LONG[20..]);
954
955 let mut h: HashMap<Bytes, usize> = HashMap::default();
956 h.insert(b.clone(), 1);
957 assert_eq!(h.get(&b), Some(&1));
958
959 let mut b = BytesMut::try_from(LONG).unwrap();
960 assert_eq!(b, LONG);
961 assert_eq!(<BytesMut as Buf>::remaining(&b), LONG.len());
962 assert_eq!(<BytesMut as BufMut>::remaining_mut(&b), 0);
963 assert_eq!(<BytesMut as Buf>::chunk(&b), LONG);
964 <BytesMut as Buf>::advance(&mut b, 10);
965 assert_eq!(<BytesMut as Buf>::chunk(&b), &LONG[10..]);
966
967 let mut b = BytesMut::with_capacity(12);
968 <BytesMut as BufMut>::put_i8(&mut b, 1);
969 assert_eq!(b, b"\x01".as_ref());
970 <BytesMut as BufMut>::put_u8(&mut b, 2);
971 assert_eq!(b, b"\x01\x02".as_ref());
972 <BytesMut as BufMut>::put_slice(&mut b, b"12345");
973 assert_eq!(b, b"\x01\x0212345".as_ref());
974 <BytesMut as BufMut>::chunk_mut(&mut b).write_byte(0, b'1');
975 unsafe { <BytesMut as BufMut>::advance_mut(&mut b, 1) };
976 assert_eq!(b, b"\x01\x02123451".as_ref());
977
978 let mut iter = Bytes::from(LONG.to_vec()).into_iter();
979 assert_eq!(iter.next(), Some(LONG[0]));
980 assert_eq!(iter.next(), Some(LONG[1]));
981 assert_eq!(iter.next(), Some(LONG[2]));
982 assert_eq!(iter.next(), Some(LONG[3]));
983 assert_eq!(iter.get_ref(), &LONG[4..]);
984 assert_eq!(iter.get_mut(), &LONG[4..]);
985 let b = iter.into_inner();
986 assert_eq!(b, &LONG[4..]);
987 }
988}