xstring/
xstring.rs

1use core::{
2    alloc::Layout,
3    borrow::Borrow,
4    cell::UnsafeCell,
5    ffi::CStr,
6    fmt::{Debug, Display},
7    hash::Hash,
8    marker::PhantomData,
9    mem::MaybeUninit,
10    ops::{Deref, Index},
11    slice,
12    sync::atomic::{AtomicUsize, Ordering},
13};
14
15use crate::{
16    allocator::{Allocator, Global},
17    str::Str,
18};
19
20#[repr(transparent)]
21pub struct XString<S: Str + ?Sized = str, A: Allocator = Global> {
22    repr: Repr<A>,
23    _marker: PhantomData<S>,
24}
25
26unsafe impl<S: Str + ?Sized + Send, A: Allocator> Send for XString<S, A> {}
27unsafe impl<S: Str + ?Sized + Sync, A: Allocator> Sync for XString<S, A> {}
28impl<S: Str + ?Sized + Unpin, A: Allocator> Unpin for XString<S, A> {}
29
30impl<S: Str + ?Sized> XString<S> {
31    #[inline(always)]
32    pub fn new(source: &S) -> Self {
33        Self::new_in(source, Global)
34    }
35
36    #[inline]
37    pub fn from_static(source: &'static S) -> Self {
38        let source = source.as_bytes();
39        let repr = unsafe {
40            Repr {
41                r#static: Static::new(source),
42            }
43        };
44        Self {
45            repr,
46            _marker: PhantomData,
47        }
48    }
49}
50
51impl<S: Str + ?Sized, A: Allocator> XString<S, A> {
52    #[inline]
53    pub fn new_in(source: &S, allocator: A) -> Self {
54        let source = source.as_bytes();
55        let repr = unsafe {
56            if source.len() <= INLINE_SIZE {
57                Repr {
58                    inline: Inlined::new(source),
59                }
60            } else {
61                Repr {
62                    alloc: Allocated::new(source, allocator),
63                }
64            }
65        };
66        Self {
67            repr,
68            _marker: PhantomData,
69        }
70    }
71
72    /// # Safety
73    ///
74    /// It is the caller's responsibility to ensure that the `dst` is derived from `self`
75    #[inline]
76    pub unsafe fn slice_as(&self, dst: &S) -> Self {
77        let src = self.deref();
78
79        let repr = if self.repr.is_inlined() {
80            Repr {
81                inline: unsafe { Inlined::new(dst.as_bytes()) },
82            }
83        } else if self.repr.is_static() {
84            Repr {
85                r#static: unsafe { Static::new(dst.as_bytes()) },
86            }
87        } else {
88            let src = src.as_bytes().as_ptr_range();
89            let dst = dst.as_bytes().as_ptr_range();
90            let start_offset = unsafe { dst.start.offset_from(src.start) } as i32;
91            let end_offset = unsafe { dst.end.offset_from(src.end) } as i32;
92            debug_assert!(start_offset >= 0);
93            debug_assert!(end_offset <= 0);
94
95            let mut target = self.clone();
96
97            unsafe {
98                target.repr.alloc.start = self.repr.alloc.start + start_offset;
99                target.repr.alloc.end = self.repr.alloc.end + end_offset;
100            }
101
102            return target;
103        };
104
105        Self {
106            repr,
107            _marker: PhantomData,
108        }
109    }
110
111    #[inline(always)]
112    pub fn slice<T>(&self, idx: T) -> Self
113    where
114        S: Index<T, Output = S>,
115    {
116        unsafe { self.slice_as(&self[idx]) }
117    }
118}
119
120impl<A: Allocator> XString<str, A> {
121    #[inline(always)]
122    pub fn as_str(&self) -> &str {
123        self
124    }
125}
126
127impl<A: Allocator> XString<CStr, A> {
128    #[inline(always)]
129    pub fn as_c_str(&self) -> &CStr {
130        self
131    }
132}
133
134impl<A: Allocator> XString<[u8], A> {
135    #[inline(always)]
136    pub fn as_bytes(&self) -> &[u8] {
137        self
138    }
139}
140
141macro_rules! impl_for_refs {
142    ($s:ident, $($t:ty),* $(,)?) => {$(
143        impl<$s: Str + ?Sized + PartialEq, A: Allocator> PartialEq<$t> for XString<$s, A> {
144            #[inline(always)]
145            fn eq(&self, other: &$t) -> bool {
146                $s::eq(self, other)
147            }
148        }
149
150        impl<$s: Str + ?Sized + PartialOrd, A: Allocator> PartialOrd<$t> for XString<$s, A> {
151            #[inline(always)]
152            fn partial_cmp(&self, other: &$t) -> Option<core::cmp::Ordering> {
153                $s::partial_cmp(self, other)
154            }
155        }
156    )*};
157}
158
159impl_for_refs!(
160    S,
161    Self,
162    S,
163    &S,
164    ::alloc::boxed::Box<S>,
165    ::alloc::rc::Rc<S>,
166    ::alloc::sync::Arc<S>,
167);
168
169impl<S: Str + ?Sized + Eq, A: Allocator> Eq for XString<S, A> {}
170
171impl<S: Str + ?Sized + Ord, A: Allocator> Ord for XString<S, A> {
172    #[inline(always)]
173    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
174        S::cmp(self, other)
175    }
176}
177
178impl<S: Str + ?Sized + Hash, A: Allocator> Hash for XString<S, A> {
179    #[inline(always)]
180    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
181        S::hash(self, state)
182    }
183}
184
185impl<S: Str + ?Sized + Debug, A: Allocator> Debug for XString<S, A> {
186    #[inline(always)]
187    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
188        Debug::fmt(&**self, f)
189    }
190}
191
192impl<S: Str + ?Sized + Display, A: Allocator> Display for XString<S, A> {
193    #[inline(always)]
194    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
195        Display::fmt(&**self, f)
196    }
197}
198
199impl<S: Str + ?Sized, A: Allocator> Deref for XString<S, A> {
200    type Target = S;
201
202    #[inline]
203    fn deref(&self) -> &Self::Target {
204        unsafe {
205            let bytes = if self.repr.is_inlined() {
206                if let Ok(s) = CStr::from_bytes_until_nul(&self.repr.inline.data) {
207                    if S::CONTAINS_NUL {
208                        s.to_bytes_with_nul()
209                    } else {
210                        s.to_bytes()
211                    }
212                } else {
213                    &self.repr.inline.data
214                }
215            } else if self.repr.is_static() {
216                slice::from_raw_parts(self.repr.r#static.data, (-self.repr.r#static.len) as _)
217            } else {
218                slice::from_raw_parts(
219                    (self.repr.alloc.ptr.add(1) as *const u8).add(self.repr.alloc.start as _),
220                    (self.repr.alloc.end - self.repr.alloc.start) as _,
221                )
222            };
223            S::from_bytes_unchecked(bytes)
224        }
225    }
226}
227
228impl<S: Str + ?Sized, A: Allocator> Drop for XString<S, A> {
229    fn drop(&mut self) {
230        if self.repr.is_inlined() || self.repr.is_static() {
231            return;
232        }
233
234        unsafe {
235            if (*self.repr.alloc.ptr).rc.fetch_sub(1, Ordering::Relaxed) > 1 {
236                return;
237            }
238
239            let (layout, _) = Layout::new::<AllocatedRepr<A>>()
240                .extend(Layout::array::<u8>((*self.repr.alloc.ptr).size).unwrap())
241                .unwrap();
242            let mut allocator = core::mem::replace(
243                &mut *(*self.repr.alloc.ptr).allocator.get(),
244                MaybeUninit::uninit(),
245            )
246            .assume_init();
247
248            allocator.deallocate(self.repr.alloc.ptr as _, layout);
249        }
250    }
251}
252
253impl<S: Str + ?Sized, A: Allocator> Clone for XString<S, A> {
254    fn clone(&self) -> Self {
255        if !(self.repr.is_inlined() || self.repr.is_static()) {
256            unsafe { (*self.repr.alloc.ptr).rc.fetch_add(1, Ordering::Relaxed) };
257        }
258
259        Self {
260            repr: self.repr,
261            _marker: PhantomData,
262        }
263    }
264}
265
266impl<S: Str + ?Sized, A: Allocator> AsRef<S> for XString<S, A> {
267    #[inline(always)]
268    fn as_ref(&self) -> &S {
269        self
270    }
271}
272
273impl<S: Str + ?Sized, A: Allocator> Borrow<S> for XString<S, A> {
274    #[inline(always)]
275    fn borrow(&self) -> &S {
276        self
277    }
278}
279
280impl<A: Allocator> AsRef<[u8]> for XString<str, A> {
281    #[inline(always)]
282    fn as_ref(&self) -> &[u8] {
283        self.as_bytes()
284    }
285}
286
287impl<A: Allocator> AsRef<[u8]> for XString<CStr, A> {
288    #[inline(always)]
289    fn as_ref(&self) -> &[u8] {
290        self.to_bytes()
291    }
292}
293
294#[cfg(feature = "std")]
295impl<A: Allocator> AsRef<[u8]> for XString<std::ffi::OsStr, A> {
296    #[inline(always)]
297    fn as_ref(&self) -> &[u8] {
298        self.as_encoded_bytes()
299    }
300}
301
302#[cfg(feature = "std")]
303impl<A: Allocator> AsRef<[u8]> for XString<std::path::Path, A> {
304    #[inline(always)]
305    fn as_ref(&self) -> &[u8] {
306        self.as_os_str().as_encoded_bytes()
307    }
308}
309
310#[cfg(feature = "std")]
311impl<A: Allocator> AsRef<std::ffi::OsStr> for XString<str, A> {
312    #[inline(always)]
313    fn as_ref(&self) -> &std::ffi::OsStr {
314        (**self).as_ref()
315    }
316}
317
318#[cfg(feature = "std")]
319impl<A: Allocator> AsRef<std::path::Path> for XString<str, A> {
320    #[inline(always)]
321    fn as_ref(&self) -> &std::path::Path {
322        (**self).as_ref()
323    }
324}
325
326#[cfg(feature = "std")]
327impl<A: Allocator> AsRef<std::ffi::OsStr> for XString<std::path::Path, A> {
328    #[inline(always)]
329    fn as_ref(&self) -> &std::ffi::OsStr {
330        (**self).as_ref()
331    }
332}
333
334#[cfg(feature = "std")]
335impl<A: Allocator> AsRef<std::path::Path> for XString<std::ffi::OsStr, A> {
336    #[inline(always)]
337    fn as_ref(&self) -> &std::path::Path {
338        (**self).as_ref()
339    }
340}
341
342#[cfg(target_pointer_width = "64")]
343const POINTER_SIZE: usize = 8;
344
345#[cfg(target_pointer_width = "32")]
346const POINTER_SIZE: usize = 4;
347
348#[cfg(target_pointer_width = "16")]
349compile_error!("16 bit platforms are not supported");
350
351const INLINE_SIZE: usize = POINTER_SIZE + 7;
352
353#[repr(C)]
354union Repr<A: Allocator> {
355    inline: Inlined,
356    r#static: Static,
357    alloc: Allocated<A>,
358}
359
360impl<A: Allocator> Clone for Repr<A> {
361    fn clone(&self) -> Self {
362        *self
363    }
364}
365
366impl<A: Allocator> Copy for Repr<A> {}
367
368#[cfg(target_endian = "little")]
369#[repr(C)]
370struct Allocated<A: Allocator> {
371    ptr: *mut AllocatedRepr<A>,
372    start: i32,
373    end: i32,
374}
375
376#[cfg(target_endian = "big")]
377#[repr(C)]
378struct Allocated<A: Allocator> {
379    end: i32,
380    start: i32,
381    ptr: *const AllocatedRepr<A>,
382}
383
384impl<A: Allocator> Copy for Allocated<A> {}
385impl<A: Allocator> Clone for Allocated<A> {
386    #[inline(always)]
387    fn clone(&self) -> Self {
388        *self
389    }
390}
391
392#[cfg(target_endian = "little")]
393#[derive(Clone, Copy)]
394#[repr(C)]
395struct Inlined {
396    data: [u8; INLINE_SIZE], // null-terminated if not full filled
397    indicator: i8,           // inlined while negative
398}
399
400#[cfg(target_endian = "big")]
401#[derive(Clone, Copy)]
402#[repr(C)]
403struct Inlined {
404    indicator: i8,           // inlined while negative
405    data: [u8; INLINE_SIZE], // null-terminated if not full filled
406}
407
408#[cfg(target_endian = "little")]
409#[derive(Clone, Copy)]
410#[repr(C)]
411struct Static {
412    data: *const u8,
413    len: i32, // static while negative
414    _padding: i32,
415}
416
417#[cfg(target_endian = "big")]
418#[derive(Clone, Copy)]
419#[repr(C)]
420struct Static {
421    _padding: i32,
422    len: i32, // static while negative
423    data: *const u8,
424}
425
426struct AllocatedRepr<A: Allocator> {
427    size: usize,
428    rc: AtomicUsize,
429    allocator: UnsafeCell<MaybeUninit<A>>,
430}
431
432impl<A: Allocator> Allocated<A> {
433    unsafe fn new(bytes: &[u8], mut allocator: A) -> Self {
434        let (layout, offset) = Layout::new::<AllocatedRepr<A>>()
435            .extend(Layout::array::<u8>(bytes.len()).unwrap())
436            .unwrap();
437        let ptr = unsafe {
438            let ptr = allocator.allocate(layout) as *mut AllocatedRepr<A>;
439            ptr.write(AllocatedRepr {
440                size: bytes.len(),
441                rc: AtomicUsize::new(1),
442                allocator: UnsafeCell::new(MaybeUninit::new(allocator)),
443            });
444            bytes
445                .as_ptr()
446                .copy_to_nonoverlapping((ptr as *mut u8).add(offset), bytes.len());
447            ptr as _
448        };
449        Self {
450            ptr,
451            start: 0,
452            end: bytes.len() as _,
453        }
454    }
455}
456
457impl Static {
458    unsafe fn new(bytes: &[u8]) -> Self {
459        let len = -(bytes.len() as i32);
460        let data = bytes.as_ptr();
461        Self {
462            data,
463            len,
464            _padding: 0,
465        }
466    }
467}
468
469impl Inlined {
470    unsafe fn new(bytes: &[u8]) -> Self {
471        let mut this = Self {
472            data: [0; INLINE_SIZE],
473            indicator: -1,
474        };
475        unsafe {
476            this.data
477                .as_mut_ptr()
478                .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len())
479        };
480        this
481    }
482}
483
484impl<A: Allocator> Repr<A> {
485    fn is_inlined(&self) -> bool {
486        unsafe { self.inline.indicator < 0 }
487    }
488
489    fn is_static(&self) -> bool {
490        unsafe { self.r#static.len < 0 }
491    }
492}