1use core::hash::{Hash, Hasher};
4use core::borrow::Borrow;
5use core::cmp::Ordering;
6use core::ptr::NonNull;
7use core::cell::Cell;
8use core::ops::Deref;
9use core::fmt;
10
11const ALIGN: usize = align_of::<usize>();
12
13trait Counter {
14 fn increment(&self);
15 fn decrement(&self) -> usize;
16}
17
18impl Counter for Cell<usize> {
19 fn increment(&self) {
20 self.set(self.get() + 1);
21 }
22 fn decrement(&self) -> usize {
23 let res = self.get() - 1;
24 self.set(res);
25 res
26 }
27}
28
29#[cfg(target_has_atomic = "ptr")]
30impl Counter for core::sync::atomic::AtomicUsize {
31 fn increment(&self) {
32 self.fetch_add(1, core::sync::atomic::Ordering::Relaxed);
33 }
34 fn decrement(&self) -> usize {
35 self.fetch_sub(1, core::sync::atomic::Ordering::AcqRel) - 1
36 }
37}
38
39macro_rules! make_comrade {
40 ($(#[$m:meta])? $vis:vis struct $name:ident : $counter:ty) => {
41 $(#[$m])?
42 $vis struct $name(NonNull<u8>);
45
46 impl From<&[u8]> for $name {
47 fn from(value: &[u8]) -> Self {
48 debug_assert!(align_of::<$counter>() == ALIGN && size_of::<$counter>() == ALIGN);
49
50 unsafe {
51 let ptr = alloc::alloc::alloc(alloc::alloc::Layout::from_size_align(2 * ALIGN + value.len(), ALIGN).unwrap_unchecked());
52 *(ptr as *mut $counter) = <$counter>::new(1);
53 *(ptr.add(ALIGN) as *mut usize) = value.len();
54 ptr.add(2 * ALIGN).copy_from_nonoverlapping(value.as_ptr(), value.len());
55 Self(NonNull::new_unchecked(ptr))
56 }
57 }
58 }
59
60 impl Clone for $name {
61 fn clone(&self) -> Self {
62 unsafe { (*(self.0.as_ptr() as *const $counter)).increment(); }
63 Self(self.0)
64 }
65 }
66
67 impl Drop for $name {
68 fn drop(&mut self) {
69 unsafe {
70 if (*(self.0.as_ptr() as *const $counter)).decrement() == 0 {
71 alloc::alloc::dealloc(self.0.as_ptr(), alloc::alloc::Layout::from_size_align(2 * ALIGN + *(self.0.as_ptr().add(ALIGN) as *const usize), ALIGN).unwrap_unchecked());
72 }
73 }
74 }
75 }
76
77 impl Deref for $name {
78 type Target = [u8];
79 fn deref(&self) -> &Self::Target {
80 unsafe { core::slice::from_raw_parts(self.0.as_ptr().add(2 * ALIGN), *(self.0.as_ptr().add(ALIGN) as *const usize)) }
81 }
82 }
83
84 impl Default for $name {
85 fn default() -> Self {
86 Self::from([].as_slice())
87 }
88 }
89
90 impl AsRef<[u8]> for $name {
91 fn as_ref(&self) -> &[u8] {
92 self
93 }
94 }
95
96 impl Borrow<[u8]> for $name {
97 fn borrow(&self) -> &[u8] {
98 self
99 }
100 }
101
102 impl Hash for $name {
103 fn hash<H: Hasher>(&self, state: &mut H) {
104 (**self).hash(state);
105 }
106 }
107
108 impl fmt::Debug for $name {
109 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
110 write!(f, "{:?}", &**self)
111 }
112 }
113
114 impl<T: AsRef<[u8]>> PartialEq<T> for $name {
115 fn eq(&self, other: &T) -> bool {
116 (**self).eq(other.as_ref())
117 }
118 }
119
120 impl Eq for $name {}
121
122 impl<T: AsRef<[u8]>> PartialOrd<T> for $name {
123 fn partial_cmp(&self, other: &T) -> Option<Ordering> {
124 (**self).partial_cmp(other.as_ref())
125 }
126 }
127
128 impl Ord for $name {
129 fn cmp(&self, other: &Self) -> Ordering {
130 (**self).cmp(&**other)
131 }
132 }
133
134 impl crate::Comrade for $name {
135 fn from_slice(s: &[u8]) -> Self {
136 s.into()
137 }
138 fn as_slice(&self) -> &[u8] {
139 self
140 }
141 }
142 };
143}
144make_comrade!(#[doc = "Basically `Rc<[u8]>` but only takes up half the stack space."] pub struct RcBytes : Cell<usize>);
145#[cfg(target_has_atomic = "ptr")]
146make_comrade!(#[doc = "Basically `Arc<[u8]>` but only takes up half the stack space."] pub struct ArcBytes : core::sync::atomic::AtomicUsize);
147
148#[cfg(target_has_atomic = "ptr")]
149unsafe impl Send for ArcBytes {}
150#[cfg(target_has_atomic = "ptr")]
151unsafe impl Sync for ArcBytes {}