Skip to main content

fastring/
sync.rs

1//! Atomic-reference-counted string type, with support for zero-cost literals
2//! and more.
3//!
4//! This module is modified from [`arcstr`] crate.
5//!
6//! [`arcstr`]: https://crates.io/crates/arcstr
7
8#![allow(clippy::inline_always, reason = "XXX")]
9#![allow(clippy::must_use_candidate, reason = "XXX")]
10
11use alloc::fmt;
12use core::alloc::Layout;
13use core::borrow::Borrow;
14use core::convert::Infallible;
15use core::hash::{Hash, Hasher};
16use core::mem::{MaybeUninit, align_of, offset_of};
17use core::ptr::NonNull;
18use core::str::Utf8Error;
19#[cfg(not(all(loom, test)))]
20use core::sync::atomic;
21#[cfg(not(all(loom, test)))]
22use core::sync::atomic::{AtomicUsize, Ordering};
23use core::{mem, ops, ptr, slice, str};
24
25#[cfg(all(loom, test))]
26use loom::sync::atomic;
27#[cfg(all(loom, test))]
28use loom::sync::atomic::{AtomicUsize, Ordering};
29
30#[macro_export]
31/// Constructs a static [`ArcStr`] from a string literal.
32///
33/// ## Examples
34///
35/// ```rust
36/// use fastring::{ArcStr, arc};
37///
38/// // Creates an empty, static `ArcStr`
39/// let empty = arc!("");
40/// assert_eq!(&empty, "");
41///
42/// // Creates a non-empty, static `ArcStr`
43/// let foobar = arc!("foobar");
44/// assert_eq!(&foobar, "foobar");
45///
46/// // Cloning the static `ArcStr`.
47/// let foobarr = foobar.clone();
48/// assert!(ArcStr::ptr_eq(&foobar, &foobarr));
49/// assert_eq!(&foobarr, "foobar");
50///
51/// // Dropping the `ArcStr` created by `arc!()` is a no-op.
52/// drop(foobar);
53/// assert_eq!(&foobarr, "foobar");
54///
55/// // Thanks to crate `constcat`, we support concatenation of string literals in `arc!()`.
56/// # let foobar = arc!("testing",);
57/// # assert_eq!(foobar, "testing");
58/// let foobar = arc!("testing", " ", "foobar");
59/// assert_eq!(foobar, "testing foobar");
60/// # let foobar = arc!("testing", " ", "foobar",);
61/// assert_eq!(foobar, "testing foobar");
62/// ```
63macro_rules! arc {
64    ($text:expr $(,)?) => {{
65        const __TEXT: &str = $text;
66
67        const _: () = {
68            $crate::sync::arc_inner_layout_check::<{ __TEXT.len() }>();
69        };
70
71        {
72            const __ARC_INNER: &$crate::sync::ArcInner<usize, { __TEXT.len() }> =
73                &$crate::sync::ArcInner::constified(*__TEXT.as_bytes().as_array().expect(
74                    "infallible: the length of the array is exactly the length of the string",
75                ));
76
77            #[allow(unsafe_code, reason = "XXX")]
78            const __ARC_STR: $crate::sync::ArcStr = unsafe {
79                $crate::sync::ArcStr::from_raw(::core::ptr::NonNull::new_unchecked(
80                    (&raw const *__ARC_INNER)
81                        .cast::<$crate::sync::ArcInner>()
82                        .cast_mut(),
83                ))
84            };
85
86            __ARC_STR
87        }
88    }};
89    ($($text:expr),+ $(,)?) => {
90        $crate::arc!($crate::util::constcat::concat!($($text),+))
91    };
92}
93
94#[macro_export]
95/// Conceptually equivalent to `ArcStr::new(format!("...", args...))`.
96///
97/// If all your arguments are string literals / const strings, consider using
98/// the [`arc!()`](crate::arc) macro instead.
99///
100/// ## Examples
101///
102/// ```rust
103/// let foobar = fastring::arcfmt!("testing {}", "foobar");
104///
105/// assert_eq!(foobar, "testing foobar");
106/// ```
107macro_rules! arcfmt {
108    ($($tt:tt)*) => {
109        $crate::sync::ArcStr::new($crate::util::format(::core::format_args!($($tt)*)))
110    };
111}
112
113wrapper_lite::wrapper!(
114    #[repr(transparent)]
115    /// An atomic-reference-counted string type, with support for zero-cost
116    /// literals and more.
117    pub struct ArcStr(NonNull<ArcInner>);
118);
119
120#[allow(
121    unsafe_code,
122    reason = "Thread safety is guaranteed by atomic reference counting."
123)]
124unsafe impl Send for ArcStr {}
125
126#[allow(
127    unsafe_code,
128    reason = "Thread safety is guaranteed by atomic reference counting."
129)]
130unsafe impl Sync for ArcStr {}
131
132impl ArcStr {
133    #[inline]
134    /// Constructs an empty [`ArcStr`].
135    ///
136    /// ## Examples
137    ///
138    /// ```rust
139    /// use fastring::ArcStr;
140    ///
141    /// let foobar = ArcStr::empty();
142    ///
143    /// assert!(&foobar == "");
144    /// assert!(&foobar != "barfoo");
145    /// ```
146    pub const fn empty() -> Self {
147        crate::arc!("")
148    }
149
150    /// Constructs an [`ArcStr`] and initializes it with the provided string.
151    ///
152    /// If you want to construct an [`ArcStr`] with a string literal, consider
153    /// using the [`arc!()`](crate::arc) macro instead, which can avoid heap
154    /// allocation and reference counting overhead.
155    ///
156    /// ## Examples
157    ///
158    /// ```rust
159    /// use fastring::ArcStr;
160    ///
161    /// let foobar = ArcStr::new("foobar");
162    ///
163    /// assert_eq!(foobar, "foobar");
164    /// ```
165    pub fn new<S>(data: S) -> Self
166    where
167        S: AsRef<str>,
168    {
169        let data = data.as_ref();
170
171        if data.is_empty() {
172            return Self::empty();
173        }
174
175        #[allow(
176            unsafe_code,
177            reason = "`data` is valid UTF-8; the given buf is fully initialized."
178        )]
179        unsafe {
180            Self::unchecked_new_with(data.len(), false, |buf| {
181                debug_assert!(buf.len() == data.len());
182                ptr::copy_nonoverlapping(data.as_ptr().cast(), buf.as_mut_ptr(), data.len());
183            })
184        }
185    }
186
187    /// Constructs an [`ArcStr`] and initializes it with the provided callback.
188    ///
189    /// ## Examples
190    ///
191    /// ```rust
192    /// use fastring::ArcStr;
193    ///
194    /// let foobar = ArcStr::new_with(5, |slice| {
195    ///     slice
196    ///         .iter_mut()
197    ///         .zip(b'0'..b'5')
198    ///         .for_each(|(db, sb)| *db = sb);
199    /// })
200    /// .expect("unexpected: we know this initializer produces valid UTF-8");
201    ///
202    /// assert_eq!(foobar, "01234");
203    /// ```
204    ///
205    /// # Errors
206    ///
207    /// The provided `initializer` produced invalid UTF-8 data.
208    pub fn new_with<F>(n: usize, initializer: F) -> Result<Self, Utf8Error>
209    where
210        F: FnOnce(&mut [u8]),
211    {
212        let mut this = Ok(());
213
214        let initializer = |buf: &mut [MaybeUninit<u8>]| {
215            debug_assert!(buf.len() == n);
216
217            #[allow(unsafe_code, reason = "The `buf` is zero-initialized")]
218            let slice: &mut [u8] =
219                unsafe { slice::from_raw_parts_mut(buf.as_mut_ptr().cast(), buf.len()) };
220
221            initializer(slice);
222
223            if let Err(e) = str::from_utf8(slice) {
224                this = Err(e);
225            }
226        };
227
228        #[allow(unsafe_code, reason = "XXX")]
229        let ret = unsafe { ArcInner::try_new_with(n, true, initializer) }
230            .map_or_else(handle_alloc_error, Self::from_inner);
231
232        this.map(|()| ret)
233    }
234
235    #[allow(
236        unsafe_code,
237        reason = "The caller must uphold the safety contract of this function."
238    )]
239    /// The unchecked version of [`ArcStr::new_with`].
240    ///
241    /// # Safety
242    ///
243    /// The provided `initializer` callback must initialize the provided
244    /// buffer with `n` bytes of valid UTF-8 encoded string bytes.
245    ///
246    /// ## Examples
247    ///
248    /// ```rust
249    /// use std::mem::MaybeUninit;
250    ///
251    /// use fastring::ArcStr;
252    ///
253    /// let foobar = unsafe {
254    ///     ArcStr::unchecked_new_with(10, false, |s| {
255    ///         s.fill(MaybeUninit::new(b'a'));
256    ///     })
257    /// };
258    ///
259    /// assert_eq!(foobar, "aaaaaaaaaa");
260    /// ```
261    pub unsafe fn unchecked_new_with<F>(n: usize, zeroed: bool, initializer: F) -> Self
262    where
263        F: FnOnce(&mut [MaybeUninit<u8>]),
264    {
265        #[allow(
266            unsafe_code,
267            reason = "The caller must uphold the safety contract of this function."
268        )]
269        unsafe { ArcInner::try_new_with(n, zeroed, initializer) }
270            .map_or_else(handle_alloc_error, Self::from_inner)
271    }
272
273    #[deprecated(
274        since = "0.0.0",
275        note = "Saying `len` of a string is confusing in non-ASCII contexts, since it can refer \
276                to either the Unicode chars length or the underlying bytes length. Use \
277                `len_bytes` or `len_chars` instead to disambiguate so."
278    )]
279    #[doc(hidden)]
280    #[inline(always)]
281    pub const fn len(&self) -> usize {
282        self.len_bytes()
283    }
284
285    #[inline]
286    /// Returns the **chars length** of this [`ArcStr`] .
287    ///
288    /// ## Examples
289    ///
290    /// ```rust
291    /// use fastring::ArcStr;
292    ///
293    /// assert_eq!(ArcStr::new("foo").len_chars(), 3);
294    /// assert_eq!(ArcStr::new("你好").len_chars(), 2);
295    /// ```
296    pub fn len_chars(&self) -> usize {
297        self.as_str().chars().count()
298    }
299
300    #[inline]
301    /// Returns the length of this [`ArcStr`] in **bytes**.
302    ///
303    /// ## Examples
304    ///
305    /// ```rust
306    /// use fastring::ArcStr;
307    ///
308    /// assert_eq!(ArcStr::new("foo").len_bytes(), 3);
309    /// assert_eq!(ArcStr::new("你好").len_bytes(), 6);
310    /// ```
311    pub const fn len_bytes(&self) -> usize {
312        self.lf().value()
313    }
314
315    #[inline]
316    /// Returns true if this [`ArcStr`] is empty.
317    ///
318    /// ## Examples
319    ///
320    /// ```rust
321    /// use fastring::ArcStr;
322    ///
323    /// // Empty is empty:
324    /// assert!(ArcStr::empty().is_empty());
325    /// // Empty strings are empty:
326    /// assert!(ArcStr::new("").is_empty());
327    /// // Non-empty strings aren't empty:
328    /// assert!(!ArcStr::new("foo").is_empty());
329    /// ```
330    pub const fn is_empty(&self) -> bool {
331        self.len_bytes() == 0
332    }
333
334    #[inline]
335    /// Extracts a string slice containing the entire [`ArcStr`].
336    ///
337    /// ## Examples
338    ///
339    /// ```
340    /// use fastring::ArcStr;
341    ///
342    /// assert_eq!(ArcStr::new("abc").as_str(), "abc");
343    /// ```
344    pub const fn as_str(&self) -> &str {
345        #[allow(unsafe_code, reason = "The data is guaranteed to be valid UTF-8.")]
346        unsafe {
347            str::from_utf8_unchecked(self.as_bytes())
348        }
349    }
350
351    /// Extracts a bytes slice containing the entire [`ArcStr`].
352    ///
353    /// ## Examples
354    ///
355    /// ```
356    /// use fastring::ArcStr;
357    ///
358    /// assert_eq!(ArcStr::new("foobar").as_bytes(), b"foobar");
359    /// ```
360    #[inline]
361    pub const fn as_bytes(&self) -> &[u8] {
362        #[allow(unsafe_code, reason = "XXX")]
363        unsafe {
364            slice::from_raw_parts(self.data(), self.len_bytes())
365        }
366    }
367
368    /// Consumes the [`ArcStr`], returning the wrapped pointer.
369    ///
370    /// To avoid a memory leak the pointer must be converted back to an
371    /// [`ArcStr`] using [`ArcStr::from_raw`].
372    ///
373    /// Note that in addition to the `NonNull` constraint expressed in the type
374    /// signature, we also guarantee the pointer has an alignment of at least 8
375    /// bytes, even on platforms where a lower alignment would be acceptable.
376    ///
377    /// ## Examples
378    ///
379    /// ```rust
380    /// use fastring::ArcStr;
381    ///
382    /// let foobar = ArcStr::new("foobar");
383    /// // ...
384    /// let foobar_ptr = ArcStr::into_raw(foobar);
385    /// // Some time later...
386    /// let barfoo = unsafe { ArcStr::from_raw(foobar_ptr) };
387    /// // Oh, we get the same string back!
388    /// assert_eq!(barfoo, "foobar");
389    /// ```
390    #[inline]
391    pub const fn into_raw(this: Self) -> NonNull<ArcInner> {
392        let p = this.inner;
393
394        #[allow(clippy::mem_forget, reason = "XXX")]
395        mem::forget(this);
396
397        p
398    }
399
400    #[allow(
401        unsafe_code,
402        reason = "The caller must uphold the safety contract of this function."
403    )]
404    #[inline]
405    /// Constructs an [`ArcStr`] from a raw pointer.
406    ///
407    /// # Safety
408    ///
409    /// The raw pointer must have been previously returned by a call to
410    /// [`ArcStr::into_raw`]; and the [`ArcStr`] instance can be only dropped
411    /// once.
412    ///
413    /// ## Examples
414    ///
415    /// See [`ArcStr::into_raw`].
416    pub const unsafe fn from_raw(ptr: NonNull<ArcInner>) -> Self {
417        Self::from_inner(ptr)
418    }
419
420    #[inline]
421    /// Returns whether this [`ArcStr`] is a static one.
422    ///
423    /// ## Examples
424    ///
425    /// ```rust
426    /// # use fastring::{ArcStr, arc};
427    /// #
428    /// let foobar = arc!("foobar");
429    /// assert!(ArcStr::is_static(&foobar));
430    /// let barfoo = ArcStr::new("barfoo");
431    /// assert!(!ArcStr::is_static(&barfoo));
432    /// ```
433    pub const fn is_static(this: &Self) -> bool {
434        this.lf().flag()
435    }
436
437    #[inline]
438    /// Returns the reference count of this [`ArcStr`].
439    ///
440    /// When the [`ArcStr`] is originally static, we return None.
441    ///
442    /// ## Examples
443    ///
444    /// ```rust
445    /// use fastring::{ArcStr, arc};
446    ///
447    /// # {
448    /// let foobar = arc!("foobar");
449    /// assert_eq!(ArcStr::reference_count(&foobar), None);
450    /// # }
451    /// # {
452    /// let barfoo = ArcStr::new("barfoo");
453    /// assert_eq!(ArcStr::reference_count(&barfoo), Some(1));
454    /// let barfoor = barfoo.clone();
455    /// assert_eq!(ArcStr::reference_count(&barfoo), Some(2));
456    /// assert_eq!(ArcStr::reference_count(&barfoor), Some(2));
457    /// # }
458    /// ```
459    pub fn reference_count(this: &Self) -> Option<usize> {
460        this.rf().map(|rf| rf.load(Ordering::Acquire).value())
461    }
462
463    /// Returns whether the two [`ArcStr`]s point to the same allocation.
464    ///
465    /// ## Examples
466    ///
467    /// ```rust
468    /// use fastring::{ArcStr, arc};
469    ///
470    /// # {
471    /// // * Creating static `ArcStr`.
472    /// let foobar = arc!("foobar");
473    /// // * Cloning `foobar` gives us another pointer to the same allocation
474    /// let foobarr = foobar.clone();
475    /// assert!(ArcStr::ptr_eq(&foobar, &foobarr));
476    /// // * Creating another static `ArcStr` with the same contents.
477    /// let barfoo = arc!("foobar");
478    /// // * The LLVM backend will merge the two same string literals into the same allocation in some
479    /// // * circumstance (e.g., LTO is enabled). However, it's not guaranteed.
480    /// // assert!(ArcStr::ptr_eq(&foobar, &barfoo));
481    /// # }
482    ///
483    /// # {
484    /// // * Creating non-static `ArcStr`.
485    /// let foobar = ArcStr::new("foobar");
486    /// // * Cloning `foobar` gives us another pointer to the same allocation:
487    /// let foobarr = foobar.clone();
488    /// assert!(ArcStr::ptr_eq(&foobar, &foobarr));
489    /// // * Creating another `ArcStr` with the same contents.
490    /// let barfoo = ArcStr::new("foobar");
491    /// // * The two `ArcStr`s are different allocations, so they should not be pointer-equal.
492    /// assert!(!ArcStr::ptr_eq(&foobar, &barfoo));
493    /// # }
494    /// ```
495    #[inline]
496    pub fn ptr_eq(lhs: &Self, rhs: &Self) -> bool {
497        lhs.inner == rhs.inner
498    }
499
500    // === Internal APIs ===
501
502    #[inline]
503    /// Reads a copy of the field `lf`.
504    const fn lf(&self) -> FlaggedUint<usize> {
505        ArcInner::lf(self.inner)
506    }
507
508    #[inline]
509    /// Gets a reference to the field `rf`.
510    ///
511    /// When the [`ArcStr`] is originally a static one, returns `None`.
512    const fn rf(&self) -> Option<&FlaggedUint<AtomicUsize>> {
513        if self.lf().flag() {
514            None
515        } else {
516            #[allow(
517                unsafe_code,
518                reason = "`ArcStr` is not static; the lifetime of the returned reference is bound \
519                          to `self`."
520            )]
521            Some(unsafe { ArcInner::rf(self.inner) })
522        }
523    }
524
525    #[inline]
526    /// Gets a pointer to the start of the string data.
527    const fn data(&self) -> *const u8 {
528        ArcInner::data(self.inner).cast()
529    }
530}
531
532impl Default for ArcStr {
533    #[inline]
534    fn default() -> Self {
535        Self::empty()
536    }
537}
538
539impl fmt::Debug for ArcStr {
540    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
541        self.as_str().fmt(f)
542    }
543}
544
545impl fmt::Display for ArcStr {
546    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
547        self.as_str().fmt(f)
548    }
549}
550
551impl Clone for ArcStr {
552    #[inline]
553    /// Makes a clone of the [`ArcStr`].
554    ///
555    /// This creates another pointer to the same allocation, increasing the
556    /// strong reference count if the [`ArcStr`] is not a static one.
557    ///
558    /// ## Examples
559    ///
560    /// ```
561    /// # use fastring::ArcStr;
562    /// #
563    /// let five = ArcStr::new("five");
564    /// let evif = ArcStr::clone(&five);
565    /// assert_eq!(five, evif);
566    /// ```
567    fn clone(&self) -> Self {
568        const MAX_REFCOUNT: usize = FlaggedUint::MAX;
569
570        if let Some(rf) = self.rf() {
571            // Using a relaxed ordering is alright here, as knowledge of the
572            // original reference prevents other threads from erroneously deleting
573            // the object.
574            //
575            // As explained in the [Boost documentation][1], Increasing the
576            // reference counter can always be done with memory_order_relaxed: New
577            // references to an object can only be formed from an existing
578            // reference, and passing an existing reference from one thread to
579            // another must already provide any required synchronization.
580            //
581            // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
582            let n = rf.increment(Ordering::Relaxed);
583
584            #[allow(
585                clippy::manual_assert,
586                reason = "We may change to `abort` in the future; for now we want to panic in \
587                          debug mode to alert the programmer of the issue."
588            )]
589            if !n.flag() && n.value() > MAX_REFCOUNT {
590                // When overflow happens, leaving the reference count in an overflowed state is
591                // actually not too bad: the `is_static` flag is set and we will leak the
592                // allocation instead of trying to destroy it, which is arguably better than
593                // accidentally destroying it too early. However, we still want to panic in
594                // debug mode to alert the programmer of the issue.
595                #[allow(clippy::panic, reason = "XXX")]
596                #[cfg(debug_assertions)]
597                {
598                    panic!("reference count overflow");
599                }
600            }
601        }
602
603        Self::from_inner(self.inner)
604    }
605}
606
607impl Drop for ArcStr {
608    #[inline]
609    fn drop(&mut self) {
610        if let Some(rf) = self.rf() {
611            let rf = rf.decrement(Ordering::Release);
612
613            if rf.value() > 1 {
614                return;
615            }
616
617            // This fence is needed to prevent reordering of use of the data and
618            // deletion of the data. Because it is marked `Release`, the decreasing
619            // of the reference count synchronizes with this `Acquire` fence. This
620            // means that use of the data happens before decreasing the reference
621            // count, which happens before this fence, which happens before the
622            // deletion of the data.
623            //
624            // As explained in the [Boost documentation][1],
625            //
626            // > It is important to enforce any possible access to the object in one
627            // > thread (through an existing reference) to *happen before* deleting
628            // > the object in a different thread. This is achieved by a "release"
629            // > operation after dropping a reference (any access to the object
630            // > through this reference must obviously happened before), and an
631            // > "acquire" operation before deleting the object.
632            //
633            // In particular, while the contents of an Arc are usually immutable, it's
634            // possible to have interior writes to something like a Mutex<T>. Since a
635            // Mutex is not acquired when it is deleted, we can't rely on its
636            // synchronization logic to make writes in thread A visible to a destructor
637            // running in thread B.
638            //
639            // Also note that the Acquire fence here could probably be replaced with an
640            // Acquire load, which could improve performance in highly-contended
641            // situations. See [2].
642            //
643            // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
644            // [2]: (https://github.com/rust-lang/rust/pull/41714)
645            atomic::fence(Ordering::Acquire);
646
647            if !rf.flag() {
648                // Not a static `ArcStr`, destroy it.
649                #[allow(unsafe_code, reason = "XXX")]
650                unsafe {
651                    ArcInner::destroy(self.inner);
652                }
653            }
654        }
655    }
656}
657
658impl AsRef<str> for ArcStr {
659    #[inline]
660    fn as_ref(&self) -> &str {
661        self.as_str()
662    }
663}
664
665impl ops::Deref for ArcStr {
666    type Target = str;
667
668    #[inline]
669    fn deref(&self) -> &Self::Target {
670        self.as_str()
671    }
672}
673
674impl Borrow<str> for ArcStr {
675    #[inline]
676    fn borrow(&self) -> &str {
677        self.as_str()
678    }
679}
680
681impl str::FromStr for ArcStr {
682    type Err = Infallible;
683
684    #[inline]
685    fn from_str(s: &str) -> Result<Self, Self::Err> {
686        Ok(Self::new(s))
687    }
688}
689
690macro_rules! impl_from {
691    ($ty:ty) => {
692        impl From<$ty> for ArcStr {
693            #[inline]
694            #[allow(clippy::string_slice, reason = "XXX")]
695            fn from(data: $ty) -> Self {
696                Self::new(&data[..])
697            }
698        }
699
700        impl From<&$ty> for ArcStr {
701            #[inline]
702            #[allow(clippy::string_slice, reason = "XXX")]
703            fn from(data: &$ty) -> Self {
704                Self::new(&data[..])
705            }
706        }
707    };
708}
709
710impl_from!(&str);
711impl_from!(alloc::borrow::Cow<'_, str>);
712impl_from!(alloc::boxed::Box<str>);
713impl_from!(alloc::boxed::Box<alloc::string::String>);
714impl_from!(alloc::string::String);
715impl_from!(alloc::rc::Rc<str>);
716impl_from!(alloc::rc::Rc<alloc::string::String>);
717impl_from!(alloc::sync::Arc<str>);
718impl_from!(alloc::sync::Arc<alloc::string::String>);
719
720impl PartialEq for ArcStr {
721    #[inline]
722    fn eq(&self, other: &Self) -> bool {
723        ArcStr::ptr_eq(self, other) || self.as_str() == other.as_str()
724    }
725}
726
727impl Eq for ArcStr {}
728
729impl Hash for ArcStr {
730    #[inline]
731    fn hash<H: Hasher>(&self, state: &mut H) {
732        self.as_str().hash(state);
733    }
734}
735
736impl PartialOrd for ArcStr {
737    #[inline]
738    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
739        Some(self.cmp(other))
740    }
741}
742
743impl Ord for ArcStr {
744    #[inline]
745    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
746        Ord::cmp(self.as_str(), other.as_str())
747    }
748}
749
750macro_rules! impl_partial_eq_partial_ord_ord {
751    ($ty:ty) => {
752        impl PartialEq<$ty> for &ArcStr {
753            #[inline]
754            #[allow(clippy::string_slice, reason = "XXX")]
755            fn eq(&self, other: &$ty) -> bool {
756                self.as_str() == &other[..]
757            }
758        }
759
760        impl PartialEq<$ty> for ArcStr {
761            #[inline]
762            #[allow(clippy::string_slice, reason = "XXX")]
763            fn eq(&self, other: &$ty) -> bool {
764                self.as_str() == &other[..]
765            }
766        }
767
768        impl PartialEq<&$ty> for ArcStr {
769            #[inline]
770            #[allow(clippy::string_slice, reason = "XXX")]
771            fn eq(&self, other: &&$ty) -> bool {
772                self.as_str() == &other[..]
773            }
774        }
775
776        impl PartialEq<&&$ty> for ArcStr {
777            #[inline]
778            #[allow(clippy::string_slice, reason = "XXX")]
779            fn eq(&self, other: &&&$ty) -> bool {
780                self.as_str() == &other[..]
781            }
782        }
783
784        impl PartialOrd<$ty> for ArcStr {
785            #[inline]
786            #[allow(clippy::string_slice, reason = "XXX")]
787            fn partial_cmp(&self, other: &$ty) -> Option<core::cmp::Ordering> {
788                PartialOrd::partial_cmp(self.as_str(), &other[..])
789            }
790        }
791
792        impl PartialOrd<&$ty> for ArcStr {
793            #[inline]
794            #[allow(clippy::string_slice, reason = "XXX")]
795            fn partial_cmp(&self, other: &&$ty) -> Option<core::cmp::Ordering> {
796                PartialOrd::partial_cmp(self.as_str(), &other[..])
797            }
798        }
799    };
800}
801
802impl_partial_eq_partial_ord_ord!(str);
803impl_partial_eq_partial_ord_ord!(alloc::borrow::Cow<'_, str>);
804impl_partial_eq_partial_ord_ord!(alloc::boxed::Box<str>);
805impl_partial_eq_partial_ord_ord!(alloc::boxed::Box<alloc::string::String>);
806impl_partial_eq_partial_ord_ord!(alloc::string::String);
807impl_partial_eq_partial_ord_ord!(alloc::rc::Rc<str>);
808impl_partial_eq_partial_ord_ord!(alloc::rc::Rc<alloc::string::String>);
809impl_partial_eq_partial_ord_ord!(alloc::sync::Arc<str>);
810impl_partial_eq_partial_ord_ord!(alloc::sync::Arc<alloc::string::String>);
811
812impl ops::Index<ops::RangeFull> for ArcStr {
813    type Output = str;
814
815    #[inline]
816    fn index(&self, _: ops::RangeFull) -> &Self::Output {
817        self.as_str()
818    }
819}
820
821impl ops::Index<ops::Range<usize>> for ArcStr {
822    type Output = str;
823
824    #[inline]
825    /// Returns a string slice containing the specified range of chars in this
826    /// [`ArcStr`].
827    ///
828    /// ## Notes
829    ///
830    /// The range indices here are in terms of chars, not bytes! This is
831    /// different from the standard behavior of string slicing in Rust, but it
832    /// is more intuitive in non-ASCII contexts. If you want to slice by
833    /// byte indices, you can use `as_str()` to get a `&str` and then
834    /// slice that.
835    ///
836    /// ## Examples
837    ///
838    /// ```rust
839    /// use fastring::ArcStr;
840    ///
841    /// let s = ArcStr::new("Hello, 世界!");
842    /// # assert_eq!(&s[0..0], "");
843    /// # assert_eq!(&s[0..1], "H");
844    /// assert_eq!(&s[0..5], "Hello");
845    /// assert_eq!(&s[7..9], "世界");
846    /// # assert_eq!(&s[7..10], "世界!");
847    /// ```
848    fn index(&self, range: ops::Range<usize>) -> &Self::Output {
849        assert!(
850            range.start <= range.end,
851            "invalid range: start index must not be greater than end index"
852        );
853
854        let this = self.as_str();
855
856        let mut iter = this.chars();
857
858        let mut bs = 0;
859
860        {
861            let mut cs = 0;
862
863            while cs < range.start {
864                let Some(c) = iter.next() else {
865                    #[allow(clippy::panic, reason = "XXX")]
866                    {
867                        panic!("invalid range: the start index is out of bounds");
868                    }
869                };
870
871                bs += c.len_utf8();
872                cs += 1;
873            }
874        }
875
876        if range.start == range.end {
877            return "";
878        }
879
880        let mut be = bs;
881
882        {
883            let mut ce = range.start;
884
885            while ce < range.end {
886                let Some(c) = iter.next() else {
887                    #[allow(clippy::panic, reason = "XXX")]
888                    {
889                        panic!("invalid range: the end index is out of bounds");
890                    }
891                };
892
893                be += c.len_utf8();
894                ce += 1;
895            }
896        }
897
898        #[allow(unsafe_code, reason = "XXX")]
899        unsafe {
900            this.get_unchecked(bs..be)
901        }
902    }
903}
904
905impl ops::Index<ops::RangeFrom<usize>> for ArcStr {
906    type Output = str;
907
908    #[inline]
909    /// Returns a string slice containing the specified range of chars in this
910    /// [`ArcStr`].
911    ///
912    /// ## Notes
913    ///
914    /// The range indices here are in terms of chars, not bytes! This is
915    /// different from the standard behavior of string slicing in Rust, but it
916    /// is more intuitive in non-ASCII contexts. If you want to slice by
917    /// byte indices, you can use `as_str()` to get a `&str` and then
918    /// slice that.
919    ///
920    /// ## Examples
921    ///
922    /// ```rust
923    /// use fastring::ArcStr;
924    ///
925    /// let s = ArcStr::new("Hello, 世界!");
926    ///
927    /// # assert_eq!(&s[0..], "Hello, 世界!");
928    /// # assert_eq!(&s[1..], "ello, 世界!");
929    /// assert_eq!(&s[7..], "世界!");
930    /// assert_eq!(&s[8..], "界!");
931    /// # assert_eq!(&s[9..], "!");
932    /// ```
933    fn index(&self, range: ops::RangeFrom<usize>) -> &Self::Output {
934        let this = self.as_str();
935
936        let mut iter = this.chars();
937
938        let mut bs = 0;
939
940        {
941            let mut cs = 0;
942
943            while cs < range.start {
944                let Some(c) = iter.next() else {
945                    #[allow(clippy::panic, reason = "XXX")]
946                    {
947                        panic!("invalid range: the start index is out of bounds");
948                    }
949                };
950
951                bs += c.len_utf8();
952                cs += 1;
953            }
954        }
955
956        #[allow(unsafe_code, reason = "XXX")]
957        unsafe {
958            this.get_unchecked(bs..)
959        }
960    }
961}
962
963impl ops::Index<ops::RangeTo<usize>> for ArcStr {
964    type Output = str;
965
966    #[inline]
967    /// Returns a string slice containing the specified range of chars in this
968    /// [`ArcStr`].
969    ///
970    /// ## Notes
971    ///
972    /// The range indices here are in terms of chars, not bytes! This is
973    /// different from the standard behavior of string slicing in Rust, but it
974    /// is more intuitive in non-ASCII contexts. If you want to slice by
975    /// byte indices, you can use `as_str()` to get a `&str` and then
976    /// slice that.
977    ///
978    /// ## Examples
979    ///
980    /// ```rust
981    /// use fastring::ArcStr;
982    ///
983    /// let s = ArcStr::new("Hello, 世界!");
984    ///
985    /// # assert_eq!(&s[..0], "");
986    /// # assert_eq!(&s[..1], "H");
987    /// assert_eq!(&s[..5], "Hello");
988    /// # assert_eq!(&s[..7], "Hello, ");
989    /// assert_eq!(&s[..8], "Hello, 世");
990    /// assert_eq!(&s[..9], "Hello, 世界");
991    /// # assert_eq!(&s[..10], "Hello, 世界!");
992    /// ```
993    fn index(&self, range: ops::RangeTo<usize>) -> &Self::Output {
994        if range.end == 0 {
995            return "";
996        }
997
998        let this = self.as_str();
999
1000        let mut iter = this.chars();
1001
1002        let mut be = 0;
1003
1004        {
1005            let mut ce = 0;
1006
1007            while ce < range.end {
1008                let Some(c) = iter.next() else {
1009                    #[allow(clippy::panic, reason = "XXX")]
1010                    {
1011                        panic!("invalid range: the end index is out of bounds");
1012                    }
1013                };
1014
1015                be += c.len_utf8();
1016                ce += 1;
1017            }
1018        }
1019
1020        #[allow(unsafe_code, reason = "XXX")]
1021        unsafe {
1022            this.get_unchecked(..be)
1023        }
1024    }
1025}
1026
1027impl ops::Index<ops::RangeInclusive<usize>> for ArcStr {
1028    type Output = str;
1029
1030    #[inline]
1031    /// Returns a string slice containing the specified range of chars in this
1032    /// [`ArcStr`].
1033    ///
1034    /// ## Notes
1035    ///
1036    /// The range indices here are in terms of chars, not bytes! This is
1037    /// different from the standard behavior of string slicing in Rust, but it
1038    /// is more intuitive in non-ASCII contexts. If you want to slice by
1039    /// byte indices, you can use `as_str()` to get a `&str` and then
1040    /// slice that.
1041    ///
1042    /// ## Examples
1043    ///
1044    /// ```rust
1045    /// use fastring::ArcStr;
1046    ///
1047    /// let s = ArcStr::new("Hello, 世界!");
1048    ///
1049    /// # assert_eq!(&s[0..=0], "H");
1050    /// assert_eq!(&s[0..=4], "Hello");
1051    /// assert_eq!(&s[7..=8], "世界");
1052    /// # assert_eq!(&s[7..=9], "世界!");
1053    /// ```
1054    fn index(&self, range: ops::RangeInclusive<usize>) -> &Self::Output {
1055        assert!(
1056            range.start() <= range.end(),
1057            "invalid range: the start index is greater than the end index"
1058        );
1059
1060        let this = self.as_str();
1061
1062        let mut iter = this.chars();
1063
1064        let mut bs = 0;
1065
1066        {
1067            let mut cs = 0;
1068
1069            while cs < *range.start() {
1070                let Some(c) = iter.next() else {
1071                    #[allow(clippy::panic, reason = "XXX")]
1072                    {
1073                        panic!("invalid range: the start index is out of bounds");
1074                    }
1075                };
1076
1077                bs += c.len_utf8();
1078                cs += 1;
1079            }
1080        }
1081
1082        let mut be = bs;
1083
1084        {
1085            let mut ce = *range.start();
1086
1087            while ce <= *range.end() {
1088                let Some(c) = iter.next() else {
1089                    #[allow(clippy::panic, reason = "XXX")]
1090                    {
1091                        panic!("invalid range: the end index is out of bounds");
1092                    }
1093                };
1094
1095                be += c.len_utf8();
1096                ce += 1;
1097            }
1098        }
1099
1100        #[allow(unsafe_code, reason = "XXX")]
1101        unsafe {
1102            this.get_unchecked(bs..be)
1103        }
1104    }
1105}
1106
1107impl ops::Index<ops::RangeToInclusive<usize>> for ArcStr {
1108    type Output = str;
1109
1110    #[inline]
1111    /// Returns a string slice containing the specified range of chars in this
1112    /// [`ArcStr`].
1113    ///
1114    /// ## Notes
1115    ///
1116    /// The range indices here are in terms of chars, not bytes! This is
1117    /// different from the standard behavior of string slicing in Rust, but it
1118    /// is more intuitive in non-ASCII contexts. If you want to slice by
1119    /// byte indices, you can use `as_str()` to get a `&str` and then
1120    /// slice that.
1121    ///
1122    /// ## Examples
1123    ///
1124    /// ```rust
1125    /// use fastring::ArcStr;
1126    ///
1127    /// let s = ArcStr::new("Hello, 世界!");
1128    ///
1129    /// # assert_eq!(&s[..=0], "H");
1130    /// assert_eq!(&s[..=4], "Hello");
1131    /// assert_eq!(&s[..=6], "Hello, ");
1132    /// assert_eq!(&s[..=7], "Hello, 世");
1133    /// assert_eq!(&s[..=8], "Hello, 世界");
1134    /// # assert_eq!(&s[..=9], "Hello, 世界!");
1135    /// ```
1136    fn index(&self, range: ops::RangeToInclusive<usize>) -> &Self::Output {
1137        let this = self.as_str();
1138
1139        let mut iter = this.chars();
1140
1141        let mut be = 0;
1142
1143        {
1144            let mut ce = 0;
1145
1146            while ce <= range.end {
1147                let Some(c) = iter.next() else {
1148                    #[allow(clippy::panic, reason = "XXX")]
1149                    {
1150                        panic!("invalid range: the end index is out of bounds");
1151                    }
1152                };
1153
1154                be += c.len_utf8();
1155                ce += 1;
1156            }
1157        }
1158
1159        #[allow(unsafe_code, reason = "XXX")]
1160        unsafe {
1161            this.get_unchecked(..be)
1162        }
1163    }
1164}
1165
1166impl ops::Index<usize> for ArcStr {
1167    type Output = str;
1168
1169    #[inline]
1170    /// Returns a string slice containing the specified char in this
1171    /// [`ArcStr`].
1172    ///
1173    /// ## Notes
1174    ///
1175    /// The index here is in terms of chars, not bytes! This is different from
1176    /// the standard behavior of string indexing in Rust, but it is more
1177    /// intuitive in non-ASCII contexts. If you want to index by byte indices,
1178    /// you can use `as_str()` to get a `&str` and then index that.
1179    ///
1180    /// ## Examples
1181    ///
1182    /// ```rust
1183    /// use fastring::ArcStr;
1184    ///
1185    /// let s = ArcStr::new("Hello, 世界!");
1186    ///
1187    /// assert_eq!(&s[0], "H");
1188    /// assert_eq!(&s[1], "e");
1189    /// assert_eq!(&s[2], "l");
1190    /// assert_eq!(&s[3], "l");
1191    /// assert_eq!(&s[4], "o");
1192    /// assert_eq!(&s[5], ",");
1193    /// assert_eq!(&s[6], " ");
1194    /// assert_eq!(&s[7], "世");
1195    /// assert_eq!(&s[8], "界");
1196    /// assert_eq!(&s[9], "!");
1197    /// ```
1198    fn index(&self, index: usize) -> &Self::Output {
1199        let this = self.as_str();
1200
1201        let mut iter = this.chars();
1202
1203        let mut bs = 0;
1204        let mut be = iter
1205            .next()
1206            .expect("invalid index: indexing into empty string")
1207            .len_utf8();
1208
1209        {
1210            let mut ce = 1;
1211
1212            while ce <= index {
1213                let Some(c) = iter.next() else {
1214                    #[allow(clippy::panic, reason = "XXX")]
1215                    {
1216                        panic!("invalid index: the index is out of bounds");
1217                    }
1218                };
1219
1220                bs = be;
1221                be += c.len_utf8();
1222                ce += 1;
1223            }
1224        }
1225
1226        #[allow(unsafe_code, reason = "XXX")]
1227        unsafe {
1228            this.get_unchecked(bs..be)
1229        }
1230    }
1231}
1232
1233#[cfg(feature = "serde")]
1234impl serde::Serialize for ArcStr {
1235    #[inline]
1236    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1237        self.as_str().serialize(serializer)
1238    }
1239}
1240
1241#[cfg(feature = "serde")]
1242#[allow(clippy::elidable_lifetime_names, reason = "XXX")]
1243impl<'de> serde::Deserialize<'de> for ArcStr {
1244    #[inline]
1245    fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1246        // Visitor for string
1247        struct ArcStrVisitor;
1248
1249        impl<'de> serde::de::Visitor<'de> for ArcStrVisitor {
1250            type Value = ArcStr;
1251
1252            #[inline]
1253            fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
1254                Ok(ArcStr::new(v))
1255            }
1256
1257            #[inline]
1258            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
1259                formatter.write_str("string")
1260            }
1261        }
1262
1263        deserializer.deserialize_str(ArcStrVisitor)
1264    }
1265}
1266
1267#[allow(missing_debug_implementations, reason = "XXX")]
1268#[doc(hidden)]
1269#[repr(C, align(8))]
1270/// Caveat on the `static`/`strong` fields: "is_static" indicates if we're
1271/// located in static data (as with empty string). is_static being false meanse
1272/// we are a normal arc-ed string.
1273///
1274///
1275/// We do this by keeping a flag in `len_flag` flag to indicate which case we're
1276/// in, and maintaining the invariant that if we're a `StaticArcStrInner` **we
1277/// may never access `.strong` in any way or produce a `&ThinInner` pointing to
1278/// our data**.
1279///
1280/// This is more subtle than you might think, sinc AFAIK we're not legally
1281/// allowed to create an `&ThinInner` until we're 100% sure it's nonstatic, and
1282/// prior to determining it, we are forced to work from entirely behind a raw
1283/// pointer...
1284///
1285/// That said, a bit of this hoop jumping might be not required in the future,
1286/// but for now what we're doing works and is apparently sound:
1287/// https://github.com/rust-lang/unsafe-code-guidelines/issues/246
1288pub struct ArcInner<RF = AtomicUsize, const N: usize = 0> {
1289    /// The length of the data.
1290    ///
1291    /// The flag bit is used to indicate if the data is *static*.
1292    lf: FlaggedUint<usize>,
1293
1294    /// The (atomic) reference count.
1295    ///
1296    /// The flag bit is used to indicate if the data is *static*, including the
1297    /// circumstance that we have leaked the allocation.
1298    ///
1299    /// ## Caveat
1300    ///
1301    /// While `ArcStr` claims to hold a pointer to a `ThinInner`, for the
1302    /// static case we actually are using a pointer to a
1303    /// `StaticArcStrInner<[u8; N]>`. These have almost identical layouts,
1304    /// except the static contains a explicit trailing array, and does not
1305    /// have a `AtomicUsize` The issue is: We kind of want the static ones
1306    /// to not have any interior mutability, so that `const`s can use them,
1307    /// and so that they may be stored in read-only memory.
1308    ///
1309    /// When `lf.flag` is true, this field is not actually a valid
1310    /// [`AtomicUsize`]: doing atomic reading against read-only memory is
1311    /// technically undefined behavior (UB).
1312    rf: FlaggedUint<RF>,
1313
1314    /// The string data.
1315    ///
1316    /// - In the static case, this is actually a trailing array.
1317    /// - In the heap-allocated case, this is just the start of the data, and
1318    ///   the actual length is stored in `lf`.
1319    data: [u8; N],
1320}
1321
1322impl<RF, const N: usize> ArcInner<RF, N> {
1323    #[inline(always)]
1324    /// Reads a copy of the field `lf`.
1325    const fn lf(this: NonNull<Self>) -> FlaggedUint<usize> {
1326        #[allow(unsafe_code, reason = "`this` is non-null; field `lf` is initialized.")]
1327        unsafe {
1328            (*this.as_ptr()).lf
1329        }
1330    }
1331
1332    #[inline(always)]
1333    /// Gets a pointer to the start of the string data.
1334    const fn data(this: NonNull<Self>) -> *const MaybeUninit<u8> {
1335        #[allow(unsafe_code, reason = "XXX")]
1336        unsafe {
1337            (&raw const (*this.as_ptr()).data).cast()
1338        }
1339    }
1340}
1341
1342impl<const N: usize> ArcInner<AtomicUsize, N> {
1343    #[allow(unsafe_code, reason = "See below.")]
1344    #[inline(always)]
1345    /// Gets a reference to the field `rf`.
1346    ///
1347    /// Safety:
1348    ///
1349    /// The caller must ensure that:
1350    ///
1351    /// - the returned reference's lifetime does not outlive the [`ArcInner`]
1352    ///   instance;
1353    /// - the [`ArcInner`] instance is not a static one.
1354    const unsafe fn rf<'a>(this: NonNull<Self>) -> &'a FlaggedUint<AtomicUsize> {
1355        #[allow(unsafe_code, reason = "`this` is non-null; field `rf` is initialized.")]
1356        unsafe {
1357            &(*this.as_ptr()).rf
1358        }
1359    }
1360}
1361
1362impl<const N: usize> ArcInner<usize, N> {
1363    #[doc(hidden)]
1364    /// Not a public API. Use [`arc!()`](crate::arc) instead.
1365    pub const fn constified(data: [u8; N]) -> Self {
1366        Self {
1367            lf: FlaggedUint::<usize>::unchecked_new(true, N),
1368            rf: FlaggedUint::<usize>::unchecked_new(true, 1),
1369            data,
1370        }
1371    }
1372}
1373
1374impl ArcInner {
1375    #[allow(
1376        unsafe_code,
1377        reason = "The caller must ensure that the given `initializer` will write `len` bytes of \
1378                  valid UTF-8 encoded string into the provided buffer"
1379    )]
1380    #[inline]
1381    unsafe fn try_new_with<F>(
1382        len: usize,
1383        zeroed: bool,
1384        initializer: F,
1385    ) -> Result<NonNull<Self>, Option<Layout>>
1386    where
1387        F: FnOnce(&mut [MaybeUninit<u8>]),
1388    {
1389        #[allow(unsafe_code, reason = "XXX")]
1390        let this = unsafe { Self::try_allocate(len, zeroed) }?;
1391
1392        #[allow(unsafe_code, reason = "XXX")]
1393        initializer(unsafe {
1394            slice::from_raw_parts_mut(
1395                this.as_ptr()
1396                    .cast::<MaybeUninit<u8>>()
1397                    .add(offset_of!(Self, data)),
1398                len,
1399            )
1400        });
1401
1402        Ok(this)
1403    }
1404
1405    #[allow(unsafe_code, reason = "XXX")]
1406    /// Allocates memory for an [`ArcInner`] with exact `capacity` reserved for
1407    /// string data.
1408    ///
1409    /// The field `lf` is set to `capacity`, while the field `rf` is set to 1.
1410    /// Both flags are set to false.
1411    ///
1412    /// # Safety
1413    ///
1414    /// The caller must ensure that all reserved capacity is properly
1415    /// initialized before the `ArcInner` is used in any way.
1416    unsafe fn try_allocate(capacity: usize, zeroed: bool) -> Result<NonNull<Self>, Option<Layout>> {
1417        let Ok(layout) =
1418            Layout::from_size_align(offset_of!(Self, data) + capacity, align_of::<Self>())
1419        else {
1420            return Err(None);
1421        };
1422
1423        #[allow(
1424            unsafe_code,
1425            reason = "Allocating memory according to the given layout"
1426        )]
1427        let ptr = unsafe {
1428            if zeroed {
1429                alloc::alloc::alloc_zeroed(layout)
1430            } else {
1431                alloc::alloc::alloc(layout)
1432            }
1433        };
1434
1435        #[allow(clippy::cast_ptr_alignment, reason = "XXX")]
1436        let Some(ptr) = NonNull::new(ptr.cast::<Self>()) else {
1437            return Err(Some(layout));
1438        };
1439
1440        #[allow(unsafe_code, reason = "Initializing fields `lf` / `rf`")]
1441        unsafe {
1442            (&raw mut ((*ptr.as_ptr()).lf))
1443                .write(FlaggedUint::<usize>::unchecked_new(false, capacity));
1444            (&raw mut ((*ptr.as_ptr()).rf))
1445                .write(FlaggedUint::<AtomicUsize>::unchecked_new(false, 1));
1446        };
1447
1448        Ok(ptr)
1449    }
1450
1451    #[allow(unsafe_code, reason = "XXX")]
1452    #[inline(never)]
1453    /// Destroys the allocated `ArcInner`.
1454    unsafe fn destroy(this: NonNull<Self>) {
1455        let lf = Self::lf(this);
1456
1457        debug_assert!(
1458            !lf.flag(),
1459            "implementation bug: cannot destroy static `ArcInner`"
1460        );
1461
1462        #[allow(
1463            unsafe_code,
1464            reason = "Deallocating memory according to how it was allocated"
1465        )]
1466        unsafe {
1467            alloc::alloc::dealloc(
1468                this.as_ptr().cast(),
1469                Layout::from_size_align_unchecked(
1470                    offset_of!(Self, data) + lf.value(),
1471                    align_of::<Self>(),
1472                ),
1473            );
1474        };
1475    }
1476}
1477
1478#[repr(transparent)]
1479#[derive(Clone, Copy)]
1480/// A `usize` whose MSB is used as a flag.
1481struct FlaggedUint<V = usize> {
1482    value: V,
1483}
1484
1485impl FlaggedUint {
1486    /// The maximum value of the uint part.
1487    const MAX: usize = Self::MSB - 1;
1488    /// The flag bit, which is the MSB of the `usize`.
1489    const MSB: usize = 1 << (usize::BITS - 1);
1490
1491    // #[inline(always)]
1492    // const fn new(flag: bool, value: usize) -> Option<Self> {
1493    //     if value <= Self::MAX {
1494    //         Some(Self::unchecked_new(flag, value))
1495    //     } else {
1496    //         None
1497    //     }
1498    // }
1499
1500    #[inline(always)]
1501    /// Creates a new [`Flagged`] without checking if `value` is within
1502    /// bounds.
1503    const fn unchecked_new(flag: bool, value: usize) -> Self {
1504        debug_assert!(value <= Self::MAX);
1505
1506        Self { value }.flagged(flag)
1507    }
1508
1509    #[inline(always)]
1510    const fn value(self) -> usize {
1511        self.value & Self::MAX
1512    }
1513
1514    #[inline(always)]
1515    const fn flag(self) -> bool {
1516        (self.value & Self::MSB) != 0
1517    }
1518
1519    #[inline(always)]
1520    const fn flagged(mut self, flag: bool) -> Self {
1521        if flag {
1522            self.value |= Self::MSB;
1523        } else {
1524            self.value &= !Self::MSB;
1525        }
1526
1527        self
1528    }
1529}
1530
1531impl FlaggedUint<AtomicUsize> {
1532    // #[inline(always)]
1533    // const fn new(flag: bool, value: usize) -> Option<Self> {
1534    //     if value <= Flagged::MAX {
1535    //         Some(Self::unchecked_new(flag, value))
1536    //     } else {
1537    //         None
1538    //     }
1539    // }
1540
1541    #[inline(always)]
1542    /// Creates a new [`Flagged`] without checking if `value` is within
1543    /// bounds.
1544    const fn unchecked_new(flag: bool, value: usize) -> Self {
1545        debug_assert!(value <= FlaggedUint::MAX);
1546
1547        Self {
1548            value: AtomicUsize::new(FlaggedUint::<usize>::unchecked_new(flag, value).value),
1549        }
1550    }
1551
1552    #[inline(always)]
1553    /// Reads a snapshot.
1554    fn load(&self, order: Ordering) -> FlaggedUint<usize> {
1555        FlaggedUint::<usize> {
1556            value: self.value.load(order),
1557        }
1558    }
1559
1560    // #[inline(always)]
1561    // /// Stores a new value.
1562    // fn store(&self, val: Flagged, order: Ordering) {
1563    //     self.value.store(val.value, order);
1564    // }
1565
1566    #[inline(always)]
1567    /// Increments the value by 1, returning the previous value.
1568    fn increment(&self, order: Ordering) -> FlaggedUint {
1569        FlaggedUint {
1570            value: self.value.fetch_add(1, order),
1571        }
1572    }
1573
1574    #[inline(always)]
1575    /// Decrements the value by 1, returning the previous value.
1576    fn decrement(&self, order: Ordering) -> FlaggedUint {
1577        FlaggedUint {
1578            value: self.value.fetch_sub(1, order),
1579        }
1580    }
1581}
1582
1583#[cold]
1584#[inline(never)]
1585fn handle_alloc_error<T>(layout: Option<Layout>) -> T {
1586    match layout {
1587        Some(layout) => alloc::alloc::handle_alloc_error(layout),
1588        None => {
1589            #[allow(clippy::panic, reason = "XXX")]
1590            {
1591                // TODO: panic or abort?
1592                panic!("unlikely: capacity overflow");
1593            }
1594        }
1595    }
1596}
1597
1598#[doc(hidden)]
1599/// For sanity check.
1600pub const fn arc_inner_layout_check<const N: usize>() {
1601    assert!(align_of::<ArcInner<AtomicUsize, N>>() == align_of::<ArcInner>());
1602    assert!(align_of::<ArcInner>() >= 8);
1603
1604    assert!(offset_of!(ArcInner<AtomicUsize, N>, lf) == offset_of!(ArcInner, lf),);
1605    assert!(offset_of!(ArcInner<AtomicUsize, N>, rf) == offset_of!(ArcInner, rf),);
1606    assert!(offset_of!(ArcInner<AtomicUsize, N>, data) == offset_of!(ArcInner, data),);
1607}
1608
1609#[cfg(all(test, not(loom)))]
1610mod tests {
1611    use alloc::borrow::Cow;
1612    use alloc::boxed::Box;
1613    use alloc::rc::Rc;
1614    use alloc::string::String;
1615    use alloc::sync::Arc;
1616    use alloc::vec::Vec;
1617    use alloc::{format, vec};
1618    use std::collections::{BTreeMap, HashMap};
1619
1620    use super::{ArcStr, arc_inner_layout_check};
1621
1622    const _: () = {
1623        arc_inner_layout_check::<0>();
1624        arc_inner_layout_check::<1>();
1625        arc_inner_layout_check::<2>();
1626        arc_inner_layout_check::<3>();
1627        arc_inner_layout_check::<4>();
1628        arc_inner_layout_check::<5>();
1629        arc_inner_layout_check::<6>();
1630        arc_inner_layout_check::<7>();
1631        arc_inner_layout_check::<8>();
1632        arc_inner_layout_check::<15>();
1633        arc_inner_layout_check::<16>();
1634        arc_inner_layout_check::<31>();
1635        arc_inner_layout_check::<32>();
1636        arc_inner_layout_check::<63>();
1637        arc_inner_layout_check::<64>();
1638        arc_inner_layout_check::<128>();
1639        arc_inner_layout_check::<1024>();
1640        arc_inner_layout_check::<4095>();
1641        arc_inner_layout_check::<4096>();
1642    };
1643
1644    #[test]
1645    fn test_loose_ends() {
1646        assert_eq!(ArcStr::default(), "");
1647        assert_eq!("abc".parse::<ArcStr>().unwrap(), "abc");
1648
1649        let abc_arc = ArcStr::from("abc");
1650        let abc_str: &str = abc_arc.as_ref();
1651        let abc_bytes: &[u8] = abc_arc.as_bytes();
1652
1653        assert_eq!(abc_str, "abc");
1654        assert_eq!(abc_bytes, b"abc");
1655    }
1656
1657    #[allow(unsafe_code, reason = "XXX")]
1658    #[test]
1659    fn test_from_into_raw() {
1660        let foo = vec![
1661            ArcStr::default(),
1662            ArcStr::new("1234"),
1663            ArcStr::new(format!("test {}", 1)),
1664        ]
1665        .into_iter()
1666        .cycle()
1667        .take(100)
1668        .collect::<Vec<ArcStr>>();
1669
1670        let bar = foo
1671            .iter()
1672            .map(|s| ArcStr::into_raw(s.clone()))
1673            .collect::<Vec<_>>();
1674
1675        drop(foo);
1676
1677        let barfoo = bar
1678            .iter()
1679            .map(|s| unsafe { ArcStr::from_raw(*s) })
1680            .collect::<Vec<_>>();
1681
1682        let foobar = [
1683            ArcStr::default(),
1684            ArcStr::from("1234"),
1685            ArcStr::from(format!("test {}", 1)),
1686        ]
1687        .iter()
1688        .cloned()
1689        .cycle()
1690        .take(100)
1691        .collect::<Vec<_>>();
1692
1693        assert_eq!(barfoo, foobar);
1694
1695        drop(barfoo);
1696    }
1697
1698    #[test]
1699    fn test_static_include_str() {
1700        const APACHE: ArcStr = arc!(include_str!("../LICENSE-APACHE"));
1701        assert!(APACHE.len_bytes() > 10000);
1702        assert!(APACHE.trim_start().starts_with("Apache License"));
1703        assert!(
1704            APACHE
1705                .trim_end()
1706                .ends_with("limitations under the License.")
1707        );
1708    }
1709
1710    #[test]
1711    fn test_inherent_overrides() {
1712        let s = ArcStr::from("abc");
1713        assert_eq!(s.as_str(), "abc");
1714
1715        let a = ArcStr::from("foo");
1716        assert_eq!(a.len_chars(), 3);
1717        assert_eq!(a.len_bytes(), 3);
1718
1719        assert!(!ArcStr::from("foo").is_empty());
1720        assert!(ArcStr::empty().is_empty());
1721    }
1722
1723    #[test]
1724    fn test_clone() {
1725        let count = if cfg!(miri) { 64 } else { 256 };
1726
1727        for count in 0..count {
1728            {
1729                let string = ArcStr::new("");
1730                drop(vec![string; count]);
1731            }
1732
1733            {
1734                let string = ArcStr::new("foobar");
1735                drop(vec![string; count]);
1736            }
1737
1738            {
1739                let literal = arc!("");
1740                drop(vec![literal; count]);
1741            }
1742
1743            {
1744                let literal = arc!("foobar");
1745                drop(vec![literal; count]);
1746            }
1747        }
1748
1749        drop(vec![ArcStr::empty(); count]);
1750    }
1751
1752    #[test]
1753    fn test_fmt() {
1754        macro_rules! test {
1755            ($text:expr) => {
1756                assert_eq!(format!("{}", arc!($text)), $text);
1757                assert_eq!(format!("{}", ArcStr::new($text)), $text);
1758            };
1759        }
1760
1761        test!("");
1762        test!("foobar");
1763        test!("Hello, 世界");
1764    }
1765
1766    #[test]
1767    fn test_from() {
1768        macro_rules! test {
1769            ($expected:expr) => {
1770                assert_eq!(ArcStr::from(Cow::from($expected)), $expected);
1771                assert_eq!(ArcStr::from(Box::<str>::from($expected)), $expected);
1772                assert_eq!(ArcStr::from(String::from($expected)), $expected);
1773                assert_eq!(ArcStr::from(Rc::<str>::from($expected)), $expected);
1774                assert_eq!(ArcStr::from(Arc::<str>::from($expected)), $expected);
1775            };
1776        }
1777
1778        test!("");
1779        test!("foobar");
1780    }
1781
1782    #[test]
1783    fn test_ord() {
1784        let mut arr = [ArcStr::new("foo"), ArcStr::new("bar"), ArcStr::new("baz")];
1785
1786        // Sort the array in-place.
1787        arr.sort();
1788
1789        assert_eq!(
1790            &arr,
1791            &[ArcStr::new("bar"), ArcStr::new("baz"), ArcStr::new("foo")]
1792        );
1793    }
1794
1795    #[test]
1796    fn test_partial_eq() {
1797        macro_rules! test {
1798            ($this:expr, $expected:expr) => {
1799                assert_eq!($this, $expected);
1800                assert_eq!(&$this, $expected);
1801                assert_eq!($this, &$expected);
1802                assert_eq!(&$this, &$expected);
1803
1804                assert_eq!($this, Cow::from($expected));
1805                assert_eq!(&$this, Cow::from($expected));
1806                assert_eq!($this, &Cow::from($expected));
1807                assert_eq!(&$this, &Cow::from($expected));
1808
1809                assert_eq!($this, Box::<str>::from($expected));
1810                assert_eq!(&$this, Box::<str>::from($expected));
1811                assert_eq!($this, &Box::<str>::from($expected));
1812                assert_eq!(&$this, &Box::<str>::from($expected));
1813
1814                assert_eq!($this, String::from($expected));
1815                assert_eq!(&$this, String::from($expected));
1816                assert_eq!($this, &String::from($expected));
1817                assert_eq!(&$this, &String::from($expected));
1818
1819                assert_eq!($this, Rc::<str>::from($expected));
1820                assert_eq!(&$this, Rc::<str>::from($expected));
1821                assert_eq!($this, &Rc::<str>::from($expected));
1822                assert_eq!(&$this, &Rc::<str>::from($expected));
1823
1824                assert_eq!($this, Arc::<str>::from($expected));
1825                assert_eq!(&$this, Arc::<str>::from($expected));
1826                assert_eq!($this, &Arc::<str>::from($expected));
1827                assert_eq!(&$this, &Arc::<str>::from($expected));
1828            };
1829        }
1830
1831        let this = ArcStr::new("");
1832        test!(this, "");
1833
1834        let this = ArcStr::new("foobar");
1835        test!(this, "foobar");
1836
1837        let this = arc!("");
1838        test!(this, "");
1839
1840        let this = arc!("foobar");
1841        test!(this, "foobar");
1842    }
1843
1844    #[test]
1845    fn test_hash_map() {
1846        let mut m = HashMap::new();
1847        for i in 0..100 {
1848            let prev = m.insert(ArcStr::new(format!("key {i}")), i);
1849            assert_eq!(prev, None);
1850        }
1851        for i in 0..100 {
1852            let key = format!("key {i}");
1853            let search = key.as_str();
1854            assert_eq!(m[search], i);
1855            assert_eq!(m.remove(search), Some(i));
1856        }
1857    }
1858
1859    #[test]
1860    fn test_btree_map() {
1861        let mut m = BTreeMap::new();
1862
1863        for i in 0..100 {
1864            let prev = m.insert(ArcStr::new(format!("key {i}")), i);
1865            assert_eq!(prev, None);
1866        }
1867
1868        for i in 0..100 {
1869            let s = format!("key {i}");
1870            assert_eq!(m.remove(s.as_str()), Some(i));
1871        }
1872    }
1873
1874    #[cfg(feature = "serde")]
1875    #[test]
1876    fn test_serde() {
1877        use serde_test::{Token, assert_de_tokens, assert_tokens};
1878
1879        let teststr = ArcStr::from("test test 123 456");
1880
1881        assert_tokens(&teststr, &[Token::BorrowedStr("test test 123 456")]);
1882        assert_tokens(&teststr.clone(), &[Token::BorrowedStr("test test 123 456")]);
1883        assert_tokens(&ArcStr::default(), &[Token::BorrowedStr("")]);
1884
1885        let checks = &[
1886            [Token::Str("123")],
1887            [Token::BorrowedStr("123")],
1888            [Token::String("123")],
1889        ];
1890        for check in checks {
1891            assert_de_tokens(&ArcStr::from("123"), check);
1892        }
1893    }
1894}
1895
1896#[cfg(all(test, loom))]
1897mod loomtest {
1898    use loom::sync::Arc;
1899    use loom::thread;
1900
1901    use super::ArcStr;
1902
1903    #[test]
1904    fn cloning_threads() {
1905        loom::model(|| {
1906            let a = ArcStr::new("abcdefgh");
1907            let addr = a.as_ptr() as usize;
1908
1909            let a1 = Arc::new(a);
1910            let a2 = Arc::clone(&a1);
1911
1912            let t1 = thread::spawn(move || {
1913                let b: ArcStr = (*a1).clone();
1914                assert_eq!(b.as_ptr() as usize, addr);
1915            });
1916            let t2 = thread::spawn(move || {
1917                let b: ArcStr = (*a2).clone();
1918                assert_eq!(b.as_ptr() as usize, addr);
1919            });
1920
1921            t1.join().unwrap();
1922            t2.join().unwrap();
1923        });
1924    }
1925
1926    #[test]
1927    fn drop_timing() {
1928        loom::model(|| {
1929            let a1 = alloc::vec![
1930                ArcStr::from("s1"),
1931                ArcStr::from("s2"),
1932                ArcStr::from("s3"),
1933                ArcStr::from("s4"),
1934            ];
1935            let a2 = a1.clone();
1936
1937            let t1 = thread::spawn(move || {
1938                let mut a1 = a1;
1939                while let Some(s) = a1.pop() {
1940                    assert!(s.starts_with("s"));
1941                }
1942            });
1943            let t2 = thread::spawn(move || {
1944                let mut a2 = a2;
1945                while let Some(s) = a2.pop() {
1946                    assert!(s.starts_with("s"));
1947                }
1948            });
1949
1950            t1.join().unwrap();
1951            t2.join().unwrap();
1952        });
1953    }
1954
1955    // #[test]
1956    // fn leak_drop() {
1957    //     loom::model(|| {
1958    //         let a1 = ArcStr::from("foo");
1959    //         let a2 = a1.clone();
1960
1961    //         let t1 = thread::spawn(move || {
1962    //             drop(a1);
1963    //         });
1964    //         let t2 = thread::spawn(move || a2.leak());
1965    //         t1.join().unwrap();
1966    //         let leaked: &'static str = t2.join().unwrap();
1967    //         assert_eq!(leaked, "foo");
1968    //     });
1969    // }
1970}