hybrid_rc/
lib.rs

1/*!
2 * Thread-safe hybrid reference counting pointers
3 *
4 * Loosely based on the algorithm described in
5 * ["Biased reference counting: minimizing atomic operations in garbage collection"][doi:10.1145/3243176.3243195]
6 * by Jiho Choi et. al. but adapted to Rust's type system and its lack of a managed runtime
7 * environment.
8 *
9 * The type `HybridRc<T, State>` provides thread-safe shared ownership of a value of type `T`
10 * allocated on the heap, just like `std::sync::Arc<T>` does. The main difference is that one
11 * thread at a time can use non-atomic reference counting for better performance. That means that
12 * `HybridRc` is especially suited for workloads where one thread accesses the shared value
13 * significantly more often than others.
14 *
15 * There a two variants of [`HybridRc`]:
16 * - `HybridRc<T, `[`Local`]`>` (type aliased as [`Rc`]): very fast but only usable on one thread.
17 * - `HybridRc<T, `[`Shared`]`>` (type aliased as [`Arc`]): slower but universally usable.
18 *
19 * Instances of both variants are convertible into each other. Especially, an `Rc` can always be
20 * converted into an `Arc` using [`HybridRc::to_shared(&rc)`] or [`.into()`].
21 *
22 * An `Arc` on the other hand can only be converted into an `Rc` using [`HybridRc::to_local(&arc)`]
23 * or [`.try_into()`] if no other thread has `Rc`s for the same value. The thread holding `Rc`s to
24 * a value is called the "owner thread". Once all `Rc`s are dropped, the shared value becomes
25 * ownerless again.
26 *
27 * `HybridRc` is designed as a drop-in replacement for `std::sync::Arc` and `std::rc::Rc`, so except
28 * for the conversion functionality outlined above the usage is similar to these and other smart
29 * pointers.
30 *
31 * # Thread Safety
32 *
33 * `HybridRc` uses two separate reference counters - one modified non-atomically and one using
34 * atomic operations - and keeps track of a owner thread that is allowed to modify the "local"
35 * reference counter. This means that it is thread-safe, while one thread is exempted from
36 * the disadvantage of atomic operations being more expensive than ordinary memory accesses.
37 *
38 * # `no_std` Support
39 *
40 * This crate provides limited support for `no_std` environments. In this mode `Arc::to_local()` and
41 * `Weak::upgrade_local()` only succeed if no `Rc` exists on *any* thread, as threads cannot be
42 * reliably identified without `std`.
43 *
44 * To enable `no_std` mode, disable the default enabled `std` feature in Cargo.toml. A global
45 * allocator is required.
46 *
47 * ```toml
48 * [dependencies]
49 * hybrid-rc = { version = "…", default-features = false }
50 * ```
51 *
52 * # Examples
53 *
54 * Multiple threads need a reference to a shared value while one thread needs to clone references
55 * to the value significantly more often than the others.
56 * ```
57 * use hybrid_rc::{Rc, Arc};
58 * use std::thread;
59 * use std::sync::mpsc::channel;
60 *
61 * # type SomeComplexType = std::collections::BinaryHeap<()>;
62 * # fn expensive_computation<T>(x: impl AsRef<T>, i: i32) -> i32 { let _ = x.as_ref(); i }
63 * # fn do_something<T>(x: impl AsRef<T>, _i: i32) { let _ = x.as_ref(); }
64 * # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
65 * let local = Rc::new(SomeComplexType::new());
66 * let (sender, receiver) = channel();
67 *
68 * // Spawn of threads for multiple expensive computations
69 * for i in 1..=4 {
70 * 	let sender = sender.clone();
71 * 	let shared = Rc::to_shared(&local);
72 * 	thread::spawn(move || {
73 * 		sender.send(expensive_computation(shared, i));
74 * 	});
75 * }
76 *
77 * // Do something that needs single-thread reference counting
78 * for i in 1..=1000 {
79 * 	do_something(local.clone(), i);
80 * }
81 *
82 * // Collect expensive computation results
83 * for i in 1..=4 {
84 * 	println!("{:?}", receiver.recv().unwrap());
85 * }
86 * # Ok(())
87 * # }
88 * ```
89 *
90 * A library wants to give library consumers flexibility for multithreading but also internally
91 * have the performance of `std::rc::Rc` for e.g. a complex tree structure that is mutated on
92 * the main thread.
93 * ```
94 * use hybrid_rc::Rc;
95 * use std::thread;
96 *
97 * # fn get_local_hybridrc_from_some_library() -> Rc<()> { Rc::default() }
98 * # fn do_something(_: &()) { }
99 * # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
100 * let reference = get_local_hybridrc_from_some_library();
101 * let shared = Rc::to_shared(&reference);
102 *
103 * // do the work in another thread
104 * let worker = thread::spawn(move || {
105 * 	do_something(&*shared);
106 * });
107 *
108 * // Do something useful with the library
109 *
110 * worker.join()?;
111 * # Ok(())
112 * # }
113 * ```
114 *
115 * [`HybridRc::to_shared(&rc)`]: HybridRc::to_shared
116 * [`HybridRc::to_local(&arc)`]: HybridRc::to_local
117 * [`.into()`]: HybridRc#impl-From<HybridRc<T%2C%20Local>>
118 * [`.try_into()`]: HybridRc#impl-TryFrom<HybridRc<T%2C%20Shared>>
119 * [doi:10.1145/3243176.3243195]: https://dl.acm.org/doi/10.1145/3243176.3243195
120 */
121
122#![cfg_attr(not(feature = "std"), no_std)]
123#![deny(unsafe_op_in_unsafe_fn)]
124
125extern crate alloc;
126use alloc::alloc::Layout;
127use alloc::borrow::{Cow, ToOwned};
128use alloc::boxed::Box;
129use alloc::string::String;
130use alloc::vec::Vec;
131use core::any::Any;
132use core::borrow::Borrow;
133use core::cell::Cell;
134use core::convert::Infallible;
135use core::convert::TryFrom;
136use core::hash::{Hash, Hasher};
137use core::marker::PhantomData;
138use core::ops::Deref;
139#[cfg(not(feature = "std"))]
140use core::panic::{RefUnwindSafe, UnwindSafe};
141use core::pin::Pin;
142use core::ptr::NonNull;
143use core::sync::atomic;
144use core::sync::atomic::Ordering;
145use core::{cmp, fmt, iter, mem, ptr};
146#[cfg(feature = "std")]
147use std::panic::{RefUnwindSafe, UnwindSafe};
148
149mod atomic_thread_id;
150use atomic_thread_id::{AtomicOptionThreadId, ThreadId};
151mod slice_builder;
152use slice_builder::SliceBuilder;
153mod tests;
154mod thread_id;
155
156/// Provides a senitel pointer value for dangling `Weak`s.
157///
158/// This is not NULL to allow optimizations through [`NonNull`] but cannot ever be a valid pointer
159/// to a [`RcBox`].
160#[inline]
161const fn senitel<T>() -> NonNull<T> {
162	unsafe { NonNull::new_unchecked(usize::MAX as *mut T) }
163}
164
165/// Checks if the provided pointer is the [`senitel`]
166#[inline]
167fn is_senitel<T: ?Sized>(ptr: *const T) -> bool {
168	ptr.cast::<()>() == senitel().as_ptr()
169}
170
171/// Internal module for non-public definition of `RcState`.
172mod state_trait {
173	use core::fmt::Debug;
174
175	/// Internal trait for type-level enumeration of `Shared` and `Local`.
176	pub trait RcState: Debug {
177		const SHARED: bool;
178	}
179}
180use state_trait::RcState;
181
182/// Marker types for the states of a [`HybridRc`]
183pub mod state {
184	/// Marks a [`HybridRc`] as shared.
185	///
186	/// `HybridRc<_, Shared>` atomically updates the shared reference counter.
187	///
188	/// # See also
189	/// - [`Local`]
190	///
191	/// [`HybridRc`]: super::HybridRc
192	#[derive(Debug, Clone, Copy)]
193	pub enum Shared {}
194	impl super::RcState for Shared {
195		const SHARED: bool = true;
196	}
197
198	/// Marks a [`HybridRc`] as local.
199	///
200	/// `HybridRc<_, Local>` non-atomically updates the local reference counter.
201	///
202	/// # See also
203	/// - [`Shared`]
204	///
205	/// [`HybridRc`]: super::HybridRc
206	#[derive(Debug, Clone, Copy)]
207	pub enum Local {}
208	impl super::RcState for Local {
209		const SHARED: bool = false;
210	}
211}
212use state::{Local, Shared};
213
214/// An enumeration of possible errors when upgrading a [`Weak`].
215#[derive(Debug, PartialEq, Eq, Clone, Copy)]
216pub enum UpgradeError {
217	/// The referenced value was already dropped because no strong references to it exists anymore.
218	ValueDropped,
219	/// The requested action would have created a new [`Rc`] while at least one `Rc` still existed
220	/// on another thread.
221	WrongThread,
222}
223
224impl fmt::Display for UpgradeError {
225	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
226		match *self {
227			Self::ValueDropped => f.write_str("value was already dropped"),
228			Self::WrongThread => {
229				f.write_str("tried to get a local reference while another thread was the owner")
230			}
231		}
232	}
233}
234
235#[cfg(feature = "std")]
236impl std::error::Error for UpgradeError {}
237
238impl From<Infallible> for UpgradeError {
239	fn from(x: Infallible) -> UpgradeError {
240		match x {}
241	}
242}
243
244/// The `AllocError` error indicates an allocation failure when using `try_new()` etc.
245///
246/// Will become a type alias for [`std::alloc::AllocError`] once that is stabilized.
247#[derive(Copy, Clone, PartialEq, Eq, Debug)]
248pub struct AllocError;
249
250impl fmt::Display for AllocError {
251	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
252		f.write_str("memory allocation failed")
253	}
254}
255
256#[cfg(feature = "std")]
257impl std::error::Error for AllocError {}
258
259impl From<Infallible> for AllocError {
260	fn from(_: Infallible) -> AllocError {
261		unreachable!();
262	}
263}
264
265/// Reimplementation of `ptr::set_ptr_value` as long as that one is unstable
266///
267/// Constructs a new pointer to `addr_ptr` with the metadata and type of `meta_ptr`.
268#[inline]
269fn set_ptr_value<T: ?Sized, U>(mut meta_ptr: *const T, addr_ptr: *mut U) -> *mut T {
270	let thin = (&mut meta_ptr as *mut *const T).cast::<*const u8>();
271	// Safety: In case of a thin pointer, this operations is identical
272	// to a simple assignment. In case of a fat pointer, with the current
273	// fat pointer layout implementation, the first field of such a
274	// pointer is always the data pointer, which is likewise assigned.
275	unsafe { *thin = addr_ptr.cast() };
276
277	meta_ptr as *mut T
278}
279
280/// Metadata part of a shared allocation.
281struct RcMeta {
282	/// Id for the thread which may use local references
283	owner: AtomicOptionThreadId,
284	/// Strong local reference count
285	strong_local: Cell<usize>,
286	/// Strong shared reference count (+ 1 for all strong local references combined)
287	strong_shared: atomic::AtomicUsize,
288
289	/// Weak reference count (+ 1 for all strong references combined)
290	///
291	/// If `usize::MAX`, the ability to downgrade strong pointers is temporarily locked to avoid
292	/// races in `get_mut()`.
293	weak: atomic::AtomicUsize,
294}
295
296/// Heap struct for shared allocations of `T`.
297///
298/// `repr(C)` to future-proof against possible layout optimizations which could interfere with
299/// `[into|from]_raw()` of transmutable data types.
300#[repr(C)]
301struct RcBox<T: ?Sized> {
302	meta: RcMeta,
303	data: T,
304}
305
306impl<T: ?Sized> RcBox<T> {
307	/// Deallocates an `RcBox`
308	///
309	/// `meta` will be dropped, but `data` must have already been dropped in place.
310	///
311	/// # Safety
312	/// The allocation must have been previously allocated with `RcBox::allocate_*()`.
313	#[inline]
314	unsafe fn dealloc(ptr: NonNull<RcBox<T>>) {
315		unsafe { ptr::addr_of_mut!((*ptr.as_ptr()).meta).drop_in_place() };
316		let layout = Layout::for_value(unsafe { ptr.as_ref() });
317		unsafe { alloc::alloc::dealloc(ptr.as_ptr().cast(), layout) };
318	}
319
320	/// Tries to allocate an `RcBox` for a possibly dynamically sized value
321	///
322	/// Size and alignment of `example` are used for allocation and if `example` is a fat reference
323	/// the pointer metadata is copied to the resulting pointer.
324	///
325	/// Returns a mutable pointer on success and the memory layout that could not be allocated
326	/// if the allocation failed.
327	#[inline]
328	fn try_allocate_for_val(
329		meta: RcMeta,
330		example: &T,
331		zeroed: bool,
332	) -> Result<NonNull<RcBox<T>>, Layout> {
333		let layout = Layout::new::<RcBox<()>>();
334		let layout = layout
335			.extend(Layout::for_value(example))
336			.map_err(|_| layout)?
337			.0
338			.pad_to_align();
339
340		// Allocate memory
341		let ptr = unsafe {
342			if zeroed {
343				alloc::alloc::alloc_zeroed(layout)
344			} else {
345				alloc::alloc::alloc(layout)
346			}
347		}
348		.cast::<RcBox<()>>();
349
350		// Write RcMeta fields
351		// Safety: Freshly allocated, so valid to write to.
352		unsafe { ptr::addr_of_mut!((*ptr).meta).write(meta) };
353
354		// Combine metadata from `example` with new memory
355		let result = set_ptr_value(example, ptr);
356
357		NonNull::new(result as *mut RcBox<T>).ok_or(layout)
358	}
359
360	/// Allocates an `RcBox` for a possibly dynamically sized value
361	///
362	/// Size and alignment of `example` are used for allocation and if `example` is a fat reference
363	/// the pointer metadata is copied to the resulting pointer.
364	///
365	/// Returns a mutable pointer on success.
366	///
367	/// # Panics
368	/// Panics or aborts if the allocation failed.
369	#[inline]
370	fn allocate_for_val(meta: RcMeta, example: &T, zeroed: bool) -> NonNull<RcBox<T>> {
371		match Self::try_allocate_for_val(meta, example, zeroed) {
372			Ok(result) => result,
373			Err(layout) => alloc::alloc::handle_alloc_error(layout),
374		}
375	}
376
377	/// Get the pointer to a `RcBox<T>` from a pointer to the data
378	///
379	/// # Safety
380	///
381	/// The pointer must point to (and have valid metadata for) the data part of a previously
382	/// valid instance of `RcBox<T>` and it must not be dangling.
383	#[inline]
384	unsafe fn ptr_from_data_ptr(ptr: *const T) -> *const RcBox<T> {
385		// Calculate layout of RcBox<T> without `data` tail, but including padding
386		let base_layout = Layout::new::<RcBox<()>>();
387		// Safety: covered by the safety contract above
388		let value_alignment = mem::align_of_val(unsafe { &*ptr });
389		let value_offset_layout =
390			Layout::from_size_align(0, value_alignment).expect("invalid memory layout");
391		let layout = base_layout
392			.extend(value_offset_layout)
393			.expect("invalid memory layout")
394			.0;
395
396		// Move pointer to point to the start of the original RcBox<T>
397		// Safety: covered by the safety contract above
398		let rcbox = unsafe { ptr.cast::<u8>().offset(-(layout.size() as isize)) };
399		set_ptr_value(ptr, rcbox as *mut u8) as *const RcBox<T>
400	}
401}
402
403impl<T> RcBox<T> {
404	/// Tries to allocate an `RcBox`
405	///
406	/// Returns a mutable reference with arbitrary lifetime on success and the memory layout that
407	/// could not be allocated if the allocation failed.
408	#[inline]
409	fn try_allocate(meta: RcMeta) -> Result<NonNull<RcBox<mem::MaybeUninit<T>>>, Layout> {
410		let layout = Layout::new::<RcBox<T>>();
411
412		let ptr = unsafe { alloc::alloc::alloc(layout) }.cast::<RcBox<mem::MaybeUninit<T>>>();
413		if ptr.is_null() {
414			Err(layout)
415		} else {
416			unsafe { ptr::addr_of_mut!((*ptr).meta).write(meta) };
417			Ok(unsafe { NonNull::new_unchecked(ptr) })
418		}
419	}
420
421	/// Allocates an `RcBox`
422	///
423	/// Returns a mutable reference with arbitrary lifetime on success.
424	///
425	/// # Panics
426	/// Panics or aborts if the allocation failed.
427	#[inline]
428	fn allocate(meta: RcMeta) -> NonNull<RcBox<mem::MaybeUninit<T>>> {
429		match Self::try_allocate(meta) {
430			Ok(result) => result,
431			Err(layout) => alloc::alloc::handle_alloc_error(layout),
432		}
433	}
434
435	/// Tries to allocate an `RcBox` for a slice.
436	///
437	/// Returns a mutable reference with arbitrary lifetime on success and the memory layout that
438	/// could not be allocated if the allocation failed or the layout calculation overflowed.
439	#[inline]
440	fn try_allocate_slice<'a>(
441		meta: RcMeta,
442		len: usize,
443		zeroed: bool,
444	) -> Result<&'a mut RcBox<[mem::MaybeUninit<T>]>, Layout> {
445		// Calculate memory layout
446		let layout = Layout::new::<RcBox<[T; 0]>>();
447		let payload_layout = Layout::array::<T>(len).map_err(|_| layout)?;
448		let layout = layout
449			.extend(payload_layout)
450			.map_err(|_| layout)?
451			.0
452			.pad_to_align();
453
454		// Allocate memory
455		let ptr = unsafe {
456			if zeroed {
457				alloc::alloc::alloc_zeroed(layout)
458			} else {
459				alloc::alloc::alloc(layout)
460			}
461		};
462
463		// Build a fat pointer
464		// The immediate slice reference [MaybeUninit<u8>] *should* be sound
465		let ptr = ptr::slice_from_raw_parts_mut(ptr.cast::<mem::MaybeUninit<u8>>(), len)
466			as *mut RcBox<[mem::MaybeUninit<T>]>;
467
468		if ptr.is_null() {
469			// Allocation failed
470			Err(layout)
471		} else {
472			// Initialize metadata field and return result
473			unsafe { ptr::addr_of_mut!((*ptr).meta).write(meta) };
474			Ok(unsafe { ptr.as_mut().unwrap() })
475		}
476	}
477
478	/// Allocates an `RcBox` for a slice
479	///
480	/// Returns a mutable reference with arbitrary lifetime on success.
481	///
482	/// # Panics
483	/// Panics or aborts if the allocation failed or the memory layout calculation overflowed.
484	#[inline]
485	fn allocate_slice<'a>(
486		meta: RcMeta,
487		len: usize,
488		zeroed: bool,
489	) -> &'a mut RcBox<[mem::MaybeUninit<T>]> {
490		match Self::try_allocate_slice(meta, len, zeroed) {
491			Ok(result) => result,
492			Err(layout) => alloc::alloc::handle_alloc_error(layout),
493		}
494	}
495}
496
497impl<T> RcBox<mem::MaybeUninit<T>> {
498	/// Converts to a mutable reference without the `MaybeUninit` wrapper.
499	///
500	/// # Safety
501	/// The payload must have been fully initialized or this causes immediate undefined behaviour.
502	#[inline]
503	unsafe fn assume_init(&mut self) -> &mut RcBox<T> {
504		unsafe { (self as *mut Self).cast::<RcBox<T>>().as_mut() }.unwrap()
505	}
506}
507
508impl<T> RcBox<[mem::MaybeUninit<T>]> {
509	/// Converts to a mutable reference without the `MaybeUninit` wrapper.
510	///
511	/// # Safety
512	/// The payload slice must have been fully initialized or this causes immediate undefined
513	/// behaviour.
514	#[inline]
515	unsafe fn assume_init(&mut self) -> &mut RcBox<[T]> {
516		unsafe { (self as *mut _ as *mut RcBox<[T]>).as_mut() }.unwrap()
517	}
518}
519
520impl RcMeta {
521	/// Increments the local reference counter unconditionally.
522	///
523	/// *Only safe to use on the owner thread and as long as at least one local reference exists.*
524	///
525	/// # Panics
526	/// Panics if the counter overflowed.
527	#[inline(always)]
528	fn inc_strong_local(&self) {
529		let counter = self.strong_local.get();
530
531		if counter == usize::MAX {
532			panic!("reference counter overflow");
533		}
534
535		self.strong_local.set(counter + 1);
536	}
537
538	/// Increment the local reference counter.
539	///
540	/// Also adjusts the shared reference counter if neccessary.
541	///
542	/// Fails if this would resurrect an already dropped
543	/// value.
544	///
545	/// *Only safe to use on the owner thread.*
546	///
547	/// # Panics
548	/// Panics if one of the counters overflowed.
549	#[inline]
550	fn try_inc_strong_local(&self) -> Result<(), ()> {
551		let counter = self.strong_local.get();
552
553		if counter == usize::MAX {
554			panic!("reference counter overflow");
555		} else if counter == 0 {
556			self.try_inc_strong_shared()?;
557		}
558
559		self.strong_local.set(counter + 1);
560		Ok(())
561	}
562
563	/// Decrements the local reference counter.
564	///
565	/// Also adjusts the shared reference counter and
566	/// the `owner` if neccessary.
567	///
568	/// Returns **true** if no strong references remain at all.
569	///
570	/// *Only safe to use on the owner thread.*
571	///
572	/// # Panics
573	/// Panics if the shared reference counter was already zero.
574	#[inline(always)]
575	fn dec_strong_local(&self) -> bool {
576		let counter = self.strong_local.get();
577		self.strong_local.set(counter - 1);
578		if counter == 1 {
579			self.remove_last_local_reference()
580		} else {
581			false
582		}
583	}
584
585	/// Decrements the shared counter and sets the `owner` to `None`.
586	///
587	/// Used internally by `dec_strong_local()`
588	///
589	/// # Panics
590	/// Panics if the counter was already zero.
591	fn remove_last_local_reference(&self) -> bool {
592		let old_shared = self.strong_shared.fetch_sub(1, Ordering::Release);
593		if old_shared == 0 {
594			panic!("reference counter underflow");
595		}
596		self.owner.store(None, Ordering::Release);
597		old_shared == 1
598	}
599
600	/// Increments the shared reference counter unconditionally.
601	///
602	/// *Only safe to use as long as at least one shared reference exists.*
603	///
604	/// # Panics
605	/// Panics if the counter overflowed.
606	#[inline]
607	fn inc_strong_shared(&self) {
608		let old_counter = self.strong_shared.fetch_add(1, Ordering::Relaxed);
609		if old_counter == usize::MAX {
610			panic!("reference counter overflow");
611		}
612	}
613
614	/// Increments the shared reference counter.
615	///
616	/// Also adjusts the shared reference counter and the `owner` if neccessary.
617	///
618	/// Fails if this would resurrect an already dropped value.
619	///
620	/// # Panics
621	/// Panics if the counter overflowed.
622	#[inline]
623	fn try_inc_strong_shared(&self) -> Result<(), ()> {
624		self.strong_shared
625			.fetch_update(
626				Ordering::Relaxed,
627				Ordering::Relaxed,
628				|old_counter| match old_counter {
629					0 => None,
630					usize::MAX => panic!("reference counter overflow"),
631					_ => Some(old_counter + 1),
632				},
633			)
634			.map(|_| ())
635			.map_err(|_| ())
636	}
637
638	/// Decrements the shared reference counter.
639	///
640	/// Returns **true** if no strong references remain at all.
641	///
642	/// # Panics
643	/// Panics if the counter was already zero.
644	#[inline]
645	fn dec_strong_shared(&self) -> bool {
646		let old_counter = self.strong_shared.fetch_sub(1, Ordering::Release);
647		if old_counter == 0 {
648			panic!("reference counter underflow");
649		}
650		old_counter == 1 && self.owner.load(Ordering::Relaxed).is_none()
651	}
652
653	/// Increments the weak reference counter.
654	///
655	/// # Panics
656	/// Panics if the counter overflowed or was already zero.
657	#[inline]
658	fn inc_weak(&self) {
659		const MAX_COUNT: usize = usize::MAX - 1;
660		let mut counter = self.weak.load(Ordering::Relaxed);
661
662		// CAS loop
663		loop {
664			match counter {
665				usize::MAX => {
666					core::hint::spin_loop();
667					counter = self.weak.load(Ordering::Relaxed);
668					continue;
669				}
670				MAX_COUNT => panic!("weak counter overflow"),
671				0 => panic!("BUG: weak resurrection of dead counted reference"),
672				_ => {
673					let result = self.weak.compare_exchange_weak(
674						counter,
675						counter + 1,
676						Ordering::Acquire,
677						Ordering::Relaxed,
678					);
679					match result {
680						Ok(_) => break,
681						Err(old) => counter = old,
682					}
683				}
684			}
685		}
686	}
687
688	/// Increments the weak reference counter (without a spin loop).
689	///
690	/// # Panics
691	/// Panics if the counter is locked, overflowed or was already zero.
692	#[inline]
693	fn inc_weak_nolock(&self) {
694		const MAX_COUNT: usize = usize::MAX - 1;
695		match self.weak.fetch_add(1, Ordering::Relaxed) {
696			usize::MAX => panic!("BUG: weak counter locked"),
697			MAX_COUNT => panic!("weak counter overflow"),
698			0 => panic!("BUG: weak resurrection of dead counted reference"),
699			_ => (),
700		}
701	}
702
703	/// Decrements the weak reference counter.
704	///
705	/// Returns **true** if the counter reached zero.
706	///
707	/// # Panics
708	/// Panics if the counter was already zero.
709	#[inline]
710	fn dec_weak(&self) -> bool {
711		let old_counter = self.weak.fetch_sub(1, Ordering::Release);
712		if old_counter == 0 {
713			panic!("weak counter underflow");
714		}
715		old_counter == 1
716	}
717
718	/// Checks if there is only one unique reference.
719	///
720	/// If `is_local` is true, it is assumed that we can access the local reference counter
721	///
722	/// Temporarily locks the weak reference counter to prevent race conditions.
723	#[inline]
724	fn has_unique_ref(&self, is_local: bool) -> bool {
725		let result =
726			self.weak
727				.compare_exchange(1, usize::MAX, Ordering::Acquire, Ordering::Relaxed);
728		if result.is_ok() {
729			let mut count = self.strong_shared.load(Ordering::Acquire);
730
731			if count == 1 {
732				let owner = self.owner.load(Ordering::Relaxed);
733				match owner {
734					None => {}
735					Some(tid) if is_local || tid == ThreadId::current_thread() => {
736						count = self.strong_local.get();
737					}
738					Some(_) => {
739						count = 2;
740					}
741				}
742			}
743
744			self.weak.store(1, Ordering::Release);
745
746			count == 1
747		} else {
748			false
749		}
750	}
751}
752
753/// A hybrid reference-counting pointer.
754///
755/// - [`HybridRc<T, Shared>`][Arc] behaves mostly like [`std::sync::Arc`]
756/// - [`HybridRc<T, Local>`][Rc] behaves mostly like [`std::rc::Rc`].
757///
758/// See the [module-level documentation][crate] for more details.
759///
760/// The inherent methods of `HybridRc` are all associated functions, which means that you have to
761/// call them as e.g. [`HybridRc::get_mut(&mut x)`] instead of `x.get_mut()`. This avoids conflicts
762/// with methods of the inner type `T`.
763///
764/// [`HybridRc::get_mut(&mut x)`]: Self::get_mut
765#[must_use]
766pub struct HybridRc<T: ?Sized, State: RcState> {
767	ptr: NonNull<RcBox<T>>,
768	phantom: PhantomData<State>,
769	phantom2: PhantomData<RcBox<T>>,
770}
771
772/// Type alias for a local reference counting pointer.
773///
774/// Provided to ease migrating from [`std::rc::Rc`].
775///
776/// See the [module-level documentation][crate] for more details.
777///
778/// The inherent methods of `Rc` are all associated functions, which means that you have to call
779/// them as e.g. [`Rc::to_shared(&x)`] instead of `x.to_shared()`. This avoids conflicts with
780/// methods of the inner type `T`.
781///
782/// [`Rc::to_shared(&x)`]: Self::to_shared
783pub type Rc<T> = HybridRc<T, Local>;
784
785/// Type alias for a shared reference counting pointer.
786///
787/// Provided to ease migrating from [`std::sync::Arc`].
788///
789/// See the [module-level documentation] for more details.
790///
791/// The inherent methods of `Arc` are all associated functions, which means that you have to call
792/// them as e.g. [`Arc::to_local(&x)`] instead of `x.to_local()`. This avoids conflicts with
793/// methods of the inner type `T`.
794///
795/// [`Arc::to_local(&x)`]: Self::to_local
796/// [module-level documentation]: crate
797pub type Arc<T> = HybridRc<T, Shared>;
798
799impl<T: ?Sized, State: RcState> HybridRc<T, State> {
800	/// Creates a new `HybridRc` from a pointer to a shared allocation.
801	///
802	/// The reference counters must have been updated by the caller.
803	#[inline(always)]
804	fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
805		Self {
806			ptr,
807			phantom: PhantomData,
808			phantom2: PhantomData,
809		}
810	}
811
812	/// Provides a reference to the inner value.
813	#[inline(always)]
814	fn data(&self) -> &T {
815		// Safety: as long as one HybridRc or Weak for this item exists, the memory stays allocated.
816		unsafe { &(*self.ptr.as_ptr()).data }
817	}
818
819	/// Provides a reference to the shared metadata.
820	#[inline(always)]
821	fn meta(&self) -> &RcMeta {
822		// Safety: as long as one HybridRc or Weak for this item exists, the memory stays allocated.
823		unsafe { &(*self.ptr.as_ptr()).meta }
824	}
825
826	/// Provides a reference to the inner `HybridRc` of a `Pin<HybridRc<T>>`
827	///
828	/// # Safety
829	/// The caller must ensure that the reference is not used to move the value out of self.
830	#[inline(always)]
831	unsafe fn pin_get_ref(this: &Pin<Self>) -> &Self {
832		// SAFETY: Pin is repr(transparent) and by contract the caller doesn't use the reference
833		// to move the value.
834		unsafe { &*(this as *const Pin<Self>).cast::<Self>() }
835	}
836
837	/// Returns a mutable reference to the value, without checking for uniqueness.
838	///
839	/// # See also
840	/// - [`get_mut()`], which is safe.
841	///
842	/// # Safety
843	/// No other `HybridRc` or [`Weak`] for the same value must be dereferenced for the duration of
844	/// the returned borrow.
845	///
846	/// # Example
847	/// ```
848	/// use hybrid_rc::Rc;
849	///
850	/// let mut a = Rc::new([1, 2, 3]);
851	/// // We know that there can't be any other references yet, so getting a mutable reference
852	/// // is safe here:
853	/// let mut_ref = unsafe { Rc::get_mut_unchecked(&mut a) };
854	/// mut_ref[0] = 42;
855	///
856	/// assert_eq!(a[..], [42, 2, 3]);
857	/// ```
858	/// [`get_mut()`]: Self::get_mut
859	#[must_use]
860	#[inline]
861	pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
862		unsafe { &mut (*this.ptr.as_ptr()).data }
863	}
864
865	/// Returns a mutable reference to the value, iff the value is not shared
866	/// with another `HybridRc` or [`Weak`].
867	///
868	/// Returns `None` otherwise.
869	#[must_use]
870	#[inline]
871	pub fn get_mut(this: &mut Self) -> Option<&mut T> {
872		if this.meta().has_unique_ref(!State::SHARED) {
873			unsafe { Some(Self::get_mut_unchecked(this)) }
874		} else {
875			None
876		}
877	}
878
879	/// Provides a raw pointer to the referenced value
880	///
881	/// The counts are not affected in any way and the `HybridRc` is not consumed.
882	/// The pointer is valid for as long there exists at least one `HybridRc` for the value.
883	#[must_use]
884	#[inline]
885	pub fn as_ptr(this: &Self) -> *const T {
886		let ptr = this.ptr.as_ptr();
887
888		// Safety: Neccessary for `from_raw()` (when implemented), retains provenance.
889		// Besides that, does basically the same thing as `data()` or `get_mut_unchecked()`.
890		unsafe { ptr::addr_of_mut!((*ptr).data) }
891	}
892
893	/// Consumes the `HybridRc<T, State>`, returning the wrapped pointer.
894	///
895	/// To avoid a memory leak the pointer must be converted back to a `HybridRc` using
896	/// [`HybridRc<T, State>::from_raw()`].
897	#[must_use = "Memory will leak if the result is not used"]
898	pub fn into_raw(this: Self) -> *const T {
899		let ptr = Self::as_ptr(&this);
900		mem::forget(this);
901		ptr
902	}
903
904	/// Reconstructs a `HybridRc<T, State>` from a raw pointer.
905	///
906	/// Creates a `HybridRc<T, State>` from a pointer that has been previously returned by
907	/// a call to [`into_raw()`].
908	///
909	/// # Safety
910	///
911	/// The raw pointer must have been previously returned by a call to
912	/// [`HybridRc<T, State>`][`into_raw()`] for the same `State` *and* the same `T` or another
913	/// compatible type that has the same size and alignment. The latter case amounts to
914	/// [`mem::transmute()`] and is likely to produce undefined behaviour if not handled correctly.
915	///
916	/// The value must not have been dropped yet.
917	///
918	/// [`into_raw()`]: Self::into_raw
919	pub unsafe fn from_raw(ptr: *const T) -> Self {
920		// Safety: covered by the safety contract for this function
921		let box_ptr = unsafe { RcBox::<T>::ptr_from_data_ptr(ptr) };
922
923		Self::from_inner(NonNull::new(box_ptr as *mut _).expect("invalid pointer"))
924	}
925
926	/// Creates a new [`Weak`] for the referenced value.
927	///
928	/// # Example
929	/// ```
930	/// use hybrid_rc::{Rc, Weak};
931	///
932	/// let strong = Rc::new(42i32);
933	/// let weak = Rc::downgrade(&strong);
934	///
935	/// assert_eq!(Rc::as_ptr(&strong), Weak::as_ptr(&weak));
936	/// ```
937	#[inline]
938	pub fn downgrade(this: &Self) -> Weak<T> {
939		this.meta().inc_weak();
940		Weak { ptr: this.ptr }
941	}
942
943	/// Creates a new [`PinWeak`] for the referenced value.
944	///
945	/// # Example
946	/// ```
947	/// use hybrid_rc::{Rc, Weak};
948	///
949	/// let strong = Rc::pin(42i32);
950	/// let weak = Rc::downgrade_pin(&strong);
951	/// ```
952	#[inline]
953	pub fn downgrade_pin(this: &Pin<Self>) -> PinWeak<T> {
954		// Safety: We are not moving anything and we don't expose a non-pinned pointer.
955		let this = unsafe { Self::pin_get_ref(this) };
956		PinWeak(Self::downgrade(this))
957	}
958
959	/// Checks if two `HybridRc`s point to the same allocation.
960	#[inline]
961	pub fn ptr_eq<S: RcState>(this: &Self, other: &HybridRc<T, S>) -> bool {
962		this.ptr.as_ptr() == other.ptr.as_ptr()
963	}
964
965	/// Checks if two pinned `HybridRc`s point to the same allocation.
966	#[inline]
967	pub fn ptr_eq_pin<S: RcState>(this: &Pin<Self>, other: &Pin<HybridRc<T, S>>) -> bool {
968		// SAFETY: we are not moving anything and we don't expose any pointers.
969		let this = unsafe { Self::pin_get_ref(this) };
970		let other = unsafe { HybridRc::<T, S>::pin_get_ref(other) };
971		this.ptr.as_ptr() == other.ptr.as_ptr()
972	}
973
974	/// Gets the approximate number of strong pointers to the inner value.
975	///
976	/// As shared pointers cannot access the local reference counter, `Arc::strong_count()` only
977	/// provides a lower bound on the reference count at the moment of the call.
978	///
979	/// Please also understand that, if the count is greater than one, another thread might change
980	/// the count at any time, including potentially between calling this method and acting on the
981	/// result.
982	///
983	/// # Examples
984	///
985	/// ```
986	/// use hybrid_rc::{Rc, Arc};
987	///
988	/// let reference = Rc::new(42);
989	/// let _2nd_ref = Rc::clone(&reference);
990	/// let shared_ref = Rc::to_shared(&reference);
991	/// let _2nd_shared_ref = Arc::clone(&shared_ref);
992	///
993	/// assert_eq!(Rc::strong_count(&reference), 4);
994	/// // shared_ref only knows the count of shared references and that there is at least one
995	/// // local reference, so it will show 3 instead of 4:
996	/// assert_eq!(Arc::strong_count(&shared_ref), 3);
997	/// ```
998	#[inline]
999	pub fn strong_count(this: &Self) -> usize {
1000		let meta = this.meta();
1001		meta.strong_shared.load(Ordering::SeqCst)
1002			+ if State::SHARED {
1003				0
1004			} else {
1005				meta.strong_local.get() - 1
1006			}
1007	}
1008
1009	/// Gets the approximate number of strong pointers to the pinned inner value.
1010	///
1011	#[inline]
1012	pub fn strong_count_pin(this: &Pin<Self>) -> usize {
1013		// SAFETY: We are not moving anything and we don't expose any pointers.
1014		let this = unsafe { Self::pin_get_ref(this) };
1015		Self::strong_count(this)
1016	}
1017
1018	/// Gets the number of [`Weak`] pointers to this allocation.
1019	///
1020	/// Please understand that another thread may change the weak count at any time, including
1021	/// potentially between calling this method and acting on the result.
1022	///
1023	/// # Examples
1024	///
1025	/// ```
1026	/// use hybrid_rc::{Rc, Weak};
1027	///
1028	/// let reference = Rc::new(42);
1029	/// let weak = Rc::downgrade(&reference);
1030	/// let _weak_2 = weak.clone();
1031	///
1032	/// assert_eq!(Rc::weak_count(&reference), 2);
1033	/// ```
1034	#[inline]
1035	pub fn weak_count(this: &Self) -> usize {
1036		match this.meta().weak.load(Ordering::SeqCst) {
1037			// Lock value => there were zero weak references apart from the implicit one.
1038			usize::MAX => 0,
1039			count => count - 1,
1040		}
1041	}
1042
1043	/// Gets the number of [`PinWeak`] pointers to the pinned inner value.
1044	///
1045	#[inline]
1046	pub fn weak_count_pin(this: &Pin<Self>) -> usize {
1047		// SAFETY: We are not moving anything and we don't expose any pointers.
1048		let this = unsafe { Self::pin_get_ref(this) };
1049		Self::weak_count(this)
1050	}
1051
1052	// Constructs an `RcMeta` structure for a new `HybridRc` allocation
1053	#[inline]
1054	fn build_new_meta() -> RcMeta {
1055		RcMeta {
1056			owner: if State::SHARED {
1057				None.into()
1058			} else {
1059				ThreadId::current_thread().into()
1060			},
1061			strong_local: Cell::new(if State::SHARED { 0 } else { 1 }),
1062			strong_shared: 1.into(),
1063			weak: 1.into(),
1064		}
1065	}
1066
1067	/// Drops the contained value and also drops the shared `RcBox` if there are no other `Weak`
1068	/// references.
1069	///
1070	/// # Safety
1071	/// Only safe to use in `drop()` or a consuming function after verifying that no other strong
1072	/// reference exists. Otherwise after calling this e.g. dereferencing the `HybridRc` WILL
1073	/// cause undefined behaviour and even dropping it MAY cause undefined behaviour.
1074	unsafe fn drop_contents_and_maybe_box(&mut self) {
1075		// Safety: only called if this was the last strong reference
1076		unsafe {
1077			ptr::drop_in_place(Self::get_mut_unchecked(self));
1078		}
1079
1080		if self.meta().dec_weak() {
1081			// Safety: only called if this was the last (weak) reference
1082			unsafe {
1083				RcBox::dealloc(self.ptr);
1084			}
1085		}
1086	}
1087}
1088
1089impl<T, State: RcState> HybridRc<T, State> {
1090	/// Creates a new `Rc<T>`, moving `data` into a reference counted allocation.
1091	///
1092	/// If `State` is `Local`, the shared value is initially owned by the calling thread, so
1093	/// for another thread to assume ownership [`to_shared()`] must be used and all `Rc`s for
1094	/// the value must be dropped.
1095	///
1096	/// If `State` is `Shared`, initially the shared value has no owner thread, so any thread may
1097	/// call [`to_local()`] to assume ownership.
1098	///
1099	/// # Examples
1100	/// ```
1101	/// use hybrid_rc::Rc;
1102	///
1103	/// let rc = Rc::new(42i32);
1104	/// ```
1105	/// ```compile_fail
1106	/// # let rc = hybrid_rc::Rc::new(42i32);
1107	/// // Cannot be used in another thread without using rc.to_shared()
1108	/// std::thread::spawn(move || *rc).join(); // does not compile
1109	/// ```
1110	///
1111	/// ```
1112	/// use hybrid_rc::Arc;
1113	/// # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
1114	///
1115	/// let arc = Arc::new(42i32);
1116	///
1117	/// std::thread::spawn(move || assert!(*arc == 42)).join()?;
1118	/// # Ok(())
1119	/// # }
1120	/// ```
1121	///
1122	/// [`to_shared()`]: Self::to_shared
1123	/// [`to_local()`]: Self::to_local
1124	#[inline]
1125	pub fn new(data: T) -> Self {
1126		let mut inner = RcBox::allocate(Self::build_new_meta());
1127		let inner = unsafe { inner.as_mut() };
1128		inner.data.write(data);
1129		Self::from_inner(unsafe { inner.assume_init() }.into())
1130	}
1131
1132	/// Creates a new `HybridRc` with uninitialized contents.
1133	#[inline]
1134	pub fn new_uninit() -> HybridRc<mem::MaybeUninit<T>, State> {
1135		let inner = RcBox::allocate(Self::build_new_meta());
1136		HybridRc::from_inner(inner)
1137	}
1138
1139	/// Creates a new `HybridRc` with uninitialized contents, with the memory being filled with
1140	/// 0 bytes.
1141	///
1142	/// See [`MaybeUninit::zeroed()`] for examples of correct and incorrect usage of this method.
1143	///
1144	/// [`MaybeUninit::zeroed()`]: mem::MaybeUninit::zeroed
1145	#[inline]
1146	pub fn new_zeroed() -> HybridRc<mem::MaybeUninit<T>, State> {
1147		let mut inner = RcBox::allocate(Self::build_new_meta());
1148		unsafe { inner.as_mut() }.data = mem::MaybeUninit::zeroed();
1149		HybridRc::from_inner(inner)
1150	}
1151
1152	/// Creates a new `HybridRc` with a possibly cyclic reference.
1153	///
1154	/// For this a reference to a [`Weak`] is passed to the closure that – after this function
1155	/// returns – will point to the new value itself. Attempting to upgrade the weak reference
1156	/// before `new_cyclic` returns will result in a `ValueDropped` error. However, the weak
1157	/// reference may be cloned freely and stored for use at a later time.
1158	#[inline]
1159	pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> HybridRc<T, State> {
1160		// Construct metadata for an initially non-upgradable RcBox
1161		let meta = RcMeta {
1162			owner: if State::SHARED {
1163				None.into()
1164			} else {
1165				ThreadId::current_thread().into()
1166			},
1167			strong_local: Cell::new(0),
1168			strong_shared: 0.into(),
1169			weak: 1.into(),
1170		};
1171
1172		// Allocate memory (uninitialized)
1173		let inner = RcBox::<T>::allocate(meta);
1174
1175		// Construct `Weak`
1176		let weak: Weak<T> = Weak { ptr: NonNull::from(inner).cast() };
1177
1178		// Run data function, keeping the ownership of the weak reference.
1179		let data = data_fn(&weak);
1180
1181		// Initialize data in our box
1182		// Not creating an immediate &mut of the whole box to not invalidate the
1183		// weak pointer under Stacked Borrows rules.
1184		unsafe { &mut *ptr::addr_of_mut!((*inner.as_ptr()).data) }.write(data);
1185
1186		// Don't run `Weak`s destructor. The value we just initialized should keep existing and we
1187		// need a weak count of 1 for the strong reference that we are currently constructing.
1188		mem::forget(weak);
1189
1190		// Fix the reference counts
1191		{
1192			let meta = unsafe { &*ptr::addr_of!((*inner.as_ptr()).meta) };
1193			if !State::SHARED {
1194				meta.inc_strong_local()
1195			}
1196			// Must be at least `Release`, so that all threads see the initialized data before
1197			// they can observe a non-zero reference count.
1198			meta.strong_shared.fetch_add(1, Ordering::Release);
1199		}
1200
1201		Self::from_inner(inner.cast())
1202	}
1203
1204	/// Creates a new `Pin<HybridRc<T>>`. If `T` does not implement `Unpin`, then `data` will be
1205	/// pinned in memory and unable to be moved.
1206	#[inline]
1207	pub fn pin(data: T) -> Pin<Self> {
1208		unsafe { Pin::new_unchecked(Self::new(data)) }
1209	}
1210
1211	/// Tries to creates a new `Rc<T>`, moving `data` into a reference counted allocation.
1212	///
1213	/// # Errors
1214	/// Will drop `data` and return `Err(`[`AllocError`]`)` if the allocation fails.
1215	///
1216	/// Please note that the global allocator on some systems may instead abort the process if an
1217	/// allocation failure happens.
1218	#[inline]
1219	pub fn try_new(data: T) -> Result<Self, AllocError> {
1220		let mut inner = RcBox::try_allocate(Self::build_new_meta()).map_err(|_| AllocError)?;
1221		let inner = unsafe { inner.as_mut() };
1222		inner.data.write(data);
1223		Ok(Self::from_inner(unsafe { inner.assume_init() }.into()))
1224	}
1225
1226	/// Tries to construct a new `HybridRc` with uninitialized contents.
1227	///
1228	/// # Errors
1229	/// Will return `Err(`[`AllocError`]`)` if the allocation fails.
1230	///
1231	/// Please note that the global allocator on some systems may instead abort the process if an
1232	/// allocation failure happens.
1233	#[inline]
1234	pub fn try_new_uninit() -> Result<HybridRc<mem::MaybeUninit<T>, State>, AllocError> {
1235		let inner = RcBox::try_allocate(Self::build_new_meta()).map_err(|_| AllocError)?;
1236		Ok(HybridRc::from_inner(inner.into()))
1237	}
1238
1239	/// Tries to construct a new `HybridRc` with uninitialized contents, with the memory being
1240	/// filled with 0 bytes.
1241	///
1242	/// See [`MaybeUninit::zeroed()`] for examples of correct and incorrect usage of this method.
1243	///
1244	/// # Errors
1245	/// Will return `Err(`[`AllocError`]`)` if the allocation fails.
1246	///
1247	/// Please note that the global allocator on some systems may instead abort the process if an
1248	/// allocation failure happens.
1249	///
1250	/// [`MaybeUninit::zeroed()`]: mem::MaybeUninit::zeroed
1251	#[inline]
1252	pub fn try_new_zeroed() -> Result<HybridRc<mem::MaybeUninit<T>, State>, AllocError> {
1253		let mut inner = RcBox::try_allocate(Self::build_new_meta()).map_err(|_| AllocError)?;
1254		unsafe { inner.as_mut() }.data = mem::MaybeUninit::zeroed();
1255		Ok(HybridRc::from_inner(inner))
1256	}
1257
1258	/// Returns the inner value, if this `HybridRc` is the only strong reference to it.
1259	///
1260	/// Any outstanding [`Weak`] references won't be able to upgrade anymore when this succeeds.
1261	///
1262	/// # Errors
1263	/// If this is not the only strong reference to the shared value, an [`Err`] is returned with
1264	/// the same `HybridRc` that was passed in.
1265	///
1266	/// # Examples
1267	///
1268	/// ```
1269	/// use hybrid_rc::Rc;
1270	///
1271	/// let reference = Rc::new(42);
1272	/// let weak = Rc::downgrade(&reference);
1273	///
1274	/// let value = Rc::try_unwrap(reference).unwrap();
1275	/// assert_eq!(value, 42);
1276	/// assert!(weak.upgrade().is_err()); // Weaks cannot upgrade anymore.
1277	/// ```
1278	#[inline]
1279	pub fn try_unwrap(this: Self) -> Result<T, Self> {
1280		if State::SHARED {
1281			Self::try_unwrap_internal(this)
1282		} else {
1283			// If we may access the local counter, first check and decrement that one.
1284			let local_count = this.meta().strong_local.get();
1285			if local_count == 1 {
1286				this.meta().strong_local.set(0);
1287				match Self::try_unwrap_internal(this) {
1288					Ok(result) => Ok(result),
1289					Err(this) => {
1290						this.meta().strong_local.set(local_count);
1291						Err(this)
1292					}
1293				}
1294			} else {
1295				Err(this)
1296			}
1297		}
1298	}
1299
1300	/// Returns the inner value, if this `HybridRc` is the only strong reference to it, assuming
1301	/// that there are no (other) local references to the value.
1302	///
1303	/// Used internally by `try_unwrap()`.
1304	#[inline]
1305	fn try_unwrap_internal(this: Self) -> Result<T, Self> {
1306		let meta = this.meta();
1307		// There is one implicit shared reference for all local references, so if there are no other
1308		// local references or we are a shared shared and the shared counter is 1, we are the only
1309		// strong reference left.
1310		if meta
1311			.strong_shared
1312			.compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
1313			.is_err()
1314		{
1315			Err(this)
1316		} else {
1317			// Relaxed should be enough, as `strong_shared` already hit 0, so no more
1318			// Weak upgrading is possible.
1319			meta.owner.store(None, Ordering::Relaxed);
1320
1321			let copy = unsafe { ptr::read(Self::as_ptr(&this)) };
1322
1323			// Make a weak pointer to clean up the remaining implicit weak reference
1324			let _weak = Weak { ptr: this.ptr };
1325			mem::forget(this);
1326
1327			Ok(copy)
1328		}
1329	}
1330}
1331
1332impl<T, State: RcState> HybridRc<[T], State> {
1333	/// Creates a new reference-counted slice with uninitialized contents.
1334	#[inline]
1335	pub fn new_uninit_slice(len: usize) -> HybridRc<[mem::MaybeUninit<T>], State> {
1336		let inner = RcBox::allocate_slice(Self::build_new_meta(), len, false);
1337		HybridRc::from_inner(inner.into())
1338	}
1339
1340	/// Creates a new reference-counted slice with uninitialized contents, with the memory being
1341	/// filled with 0 bytes.
1342	#[inline]
1343	pub fn new_zeroed_slice(len: usize) -> HybridRc<[mem::MaybeUninit<T>], State> {
1344		let inner = RcBox::allocate_slice(Self::build_new_meta(), len, true);
1345		HybridRc::from_inner(inner.into())
1346	}
1347
1348	/// Copies the contents of a slice into a new `HybridRc`
1349	///
1350	/// # Safety
1351	/// Either `T` is `Copy` or the caller must guarantee that the the source doesn't drop its
1352	/// contents.
1353	#[inline]
1354	unsafe fn copy_from_slice_unchecked(src: &[T]) -> Self {
1355		let len = src.len();
1356		let inner = RcBox::allocate_slice(Self::build_new_meta(), len, false);
1357		let dest = ptr::addr_of_mut!((*inner).data).cast();
1358
1359		// Safety: The freshly allocated `RcBox` can't alias `src` and the payload can be fully
1360		// initialized by copying the slice memory. The copying is also safe as long as the safety
1361		// requirements for calling this are fulfilled.
1362		unsafe {
1363			src.as_ptr().copy_to_nonoverlapping(dest, src.len());
1364			HybridRc::from_inner(inner.assume_init().into())
1365		}
1366	}
1367}
1368
1369impl<T: Copy, State: RcState> HybridRc<[T], State> {
1370	/// Copies the contents of a slice into a new `HybridRc`
1371	///
1372	/// Optimization for copyable types. Will become deprecated once specialization is stablilized.
1373	#[inline]
1374	pub fn copy_from_slice(src: &[T]) -> Self {
1375		// Safety: `T` is `Copy`.
1376		unsafe { Self::copy_from_slice_unchecked(src) }
1377	}
1378}
1379
1380impl<T: ?Sized> Rc<T> {
1381	/// Creates a new shared reference (`Arc`) for the referenced value.
1382	///
1383	/// # Example
1384	/// ```
1385	/// use hybrid_rc::{Rc, Arc};
1386	/// # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
1387	///
1388	/// let local = Rc::new(42i32);
1389	/// let shared = Rc::to_shared(&local);
1390	///
1391	/// // `shared` can be safely transferred to another thread
1392	/// std::thread::spawn(move || assert_eq!(*shared, 42i32)).join()?;
1393	/// # Ok(())
1394	/// # }
1395	/// ```
1396	#[inline]
1397	pub fn to_shared(this: &Self) -> Arc<T> {
1398		this.meta().inc_strong_shared();
1399		Arc::from_inner(this.ptr)
1400	}
1401
1402	/// Creates a new pinned shared reference for the referenced value.
1403	///
1404	/// # Example
1405	/// ```
1406	/// use hybrid_rc::{Rc, Weak};
1407	///
1408	/// let strong = Rc::pin(42i32);
1409	/// let shared = Rc::to_shared_pin(&strong);
1410	/// assert!(Rc::ptr_eq_pin(&strong, &shared));
1411	/// ```
1412	#[inline]
1413	pub fn to_shared_pin(this: &Pin<Self>) -> Pin<Arc<T>> {
1414		// SAFETY: We are not moving anything, we don't expose a non-pinned pointer,
1415		// and we create a Pin-wrapper only for a pinned value.
1416		unsafe {
1417			let this = Self::pin_get_ref(this);
1418			Pin::new_unchecked(Self::to_shared(this))
1419		}
1420	}
1421
1422	/// Increments the local strong reference count on the `Rc<T>` associated by the given pointer
1423	///
1424	/// Increases the local strong reference count as if a new `Rc` was cloned and kept alive.
1425	/// May panic in the unlikely case the platform-specific maximum for the reference count is
1426	/// reached.
1427	///
1428	/// # Safety
1429	/// The pointer must have been obtained through [`HybridRc<T, Local>::into_raw()`], the value
1430	/// must still be live and have a local strong count of at least 1 when this method is invoked
1431	/// and this call must be performed on the same thread as where the original `Rc` was created.
1432	///
1433	/// [`HybridRc<T, Local>::into_raw()`]: `Rc::into_raw`
1434	#[inline]
1435	pub unsafe fn increment_local_strong_count(ptr: *const T) {
1436		unsafe {
1437			let box_ptr = RcBox::<T>::ptr_from_data_ptr(ptr as *mut T);
1438			(*box_ptr).meta.inc_strong_local();
1439		}
1440	}
1441
1442	/// Decrements the local strong reference count on the `Rc<T>` associated by the given pointer
1443	///
1444	/// If the local strong reference counter reaches 0, the value is no longer considered owned
1445	/// by the calling thread and if there are no shared strong references to keep the value alive,
1446	/// it will be dropped.
1447	///
1448	/// # Safety
1449	/// The pointer must have been obtained through [`HybridRc<T, Local>::into_raw()`], the value
1450	/// must still be live and have a local strong count of at least 1 when this method is invoked
1451	/// and this call must be performed on the same thread as where the original `Rc` was created.
1452	///
1453	/// [`HybridRc<T, Local>::into_raw()`]: `Rc::into_raw`
1454	#[inline]
1455	pub unsafe fn decrement_local_strong_count(ptr: *const T) {
1456		mem::drop(unsafe { Rc::from_raw(ptr) });
1457	}
1458}
1459
1460impl<T: ?Sized> Arc<T> {
1461	/// Creates a new local reference (`Rc`) for the referenced value.
1462	///
1463	/// Returns `None` if at least one `Rc` already exists on another thread.
1464	///
1465	/// **Note:** In `no_std` environments `None` is returned if at least one `Rc` exists on *any*
1466	/// thread.
1467	///
1468	/// # Example
1469	/// ```
1470	/// use hybrid_rc::{Rc, Arc};
1471	/// # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
1472	///
1473	/// let local = Rc::new(42i32);
1474	/// let shared = Rc::to_shared(&local);
1475	///
1476	/// // `shared` can be safely transferred to another thread
1477	/// std::thread::spawn(move || assert_eq!(*shared, 42i32)).join()?;
1478	/// # Ok(())
1479	/// # }
1480	/// ```
1481	#[must_use]
1482	#[inline]
1483	pub fn to_local(this: &Self) -> Option<Rc<T>> {
1484		let meta = this.meta();
1485		let current_thread = ThreadId::current_thread();
1486		let owner = match meta.owner.store_if_none(
1487			Some(current_thread),
1488			Ordering::Acquire,
1489			Ordering::Relaxed,
1490		) {
1491			Ok(_) => None,
1492			Err(owner) => owner,
1493		};
1494
1495		match owner {
1496			None => {
1497				meta.try_inc_strong_local()
1498					.expect("inconsistent reference count (shared == 0)");
1499				Some(Rc::from_inner(this.ptr))
1500			}
1501			Some(v) if v == current_thread => {
1502				meta.inc_strong_local();
1503				Some(Rc::from_inner(this.ptr))
1504			}
1505			Some(_) => None,
1506		}
1507	}
1508
1509	/// Creates a new pinned local reference for the referenced value.
1510	///
1511	/// Returns `None` if at least one `Rc` already exists on another thread.
1512	///
1513	/// **Note:** In `no_std` environments `None` is returned if at least one `Rc` exists on *any*
1514	/// thread.
1515	///
1516	/// # Example
1517	/// ```
1518	/// use hybrid_rc::{Arc, Weak};
1519	///
1520	/// let strong = Arc::pin(42i32);
1521	/// let local = Arc::to_local_pin(&strong).unwrap();
1522	/// assert!(Arc::ptr_eq_pin(&strong, &local));
1523	/// ```
1524	#[must_use]
1525	#[inline]
1526	pub fn to_local_pin(this: &Pin<Self>) -> Option<Pin<Rc<T>>> {
1527		// SAFETY: We are not moving anything, we don't expose a non-pinned pointer,
1528		// and we create a Pin-wrapper only for a pinned value.
1529		unsafe {
1530			let this = Self::pin_get_ref(this);
1531			Some(Pin::new_unchecked(Self::to_local(this)?))
1532		}
1533	}
1534
1535	/// Increments the shared strong reference count on the `Arc<T>` associated by the given pointer
1536	///
1537	/// Increases the shared strong reference count as if a new `Arc` was cloned and kept alive.
1538	/// May panic in the unlikely case the platform-specific maximum for the reference count is
1539	/// reached.
1540	///
1541	/// # Safety
1542	/// The pointer must have been obtained through [`HybridRc<T, Shared>::into_raw()`] and the
1543	/// value must still be live when this method is invoked.
1544	///
1545	/// [`HybridRc<T, Shared>::into_raw()`]: `Arc::into_raw`
1546	#[inline]
1547	pub unsafe fn increment_shared_strong_count(ptr: *const T) {
1548		unsafe {
1549			let box_ptr = RcBox::<T>::ptr_from_data_ptr(ptr);
1550			(*box_ptr).meta.inc_strong_shared();
1551		}
1552	}
1553
1554	/// Decrements the shared strong reference count on the `Arc<T>` associated by the given pointer
1555	///
1556	/// If the shared strong reference counter (including the implicit shared reference for local
1557	/// strong references) reaches 0, the value will be dropped.
1558	///
1559	/// # Safety
1560	/// The pointer must have been obtained through [`HybridRc<T, Shared>::into_raw()`] and the
1561	/// value must still be live when this method is invoked.
1562	///
1563	/// [`HybridRc<T, Shared>::into_raw()`]: `Arc::into_raw`
1564	#[inline]
1565	pub unsafe fn decrement_shared_strong_count(ptr: *const T) {
1566		mem::drop(unsafe { Arc::from_raw(ptr) });
1567	}
1568}
1569
1570impl<T: Clone, State: RcState> HybridRc<T, State> {
1571	/// Makes a mutable reference into the given `HybridRc`.
1572	///
1573	/// If there are other strong references to the same value, then `make_mut()` will [`clone`] the
1574	/// inner value to a new allocation to ensure unique ownership.  This is also referred to as
1575	/// clone-on-write.
1576	///
1577	/// However, if there are no other strong references to this allocation, but some [`Weak`]
1578	/// pointers, then the [`Weak`]s will be disassociated and the inner value will not be cloned.
1579	///
1580	/// See also: [`get_mut()`], which will fail rather than cloning the inner value
1581	/// or diassociating [`Weak`]s.
1582	///
1583	/// [`clone`]: Clone::clone
1584	/// [`get_mut()`]: HybridRc::get_mut
1585	///
1586	/// # Example
1587	///
1588	/// ```
1589	/// use hybrid_rc::Rc;
1590	///
1591	/// let mut reference = Rc::new(42);
1592	///
1593	/// *Rc::make_mut(&mut reference) += 2;          // Won't clone anything
1594	/// let mut reference_2 = Rc::clone(&reference); // Won't clone inner value
1595	/// *Rc::make_mut(&mut reference) += 1;         // Clones inner data
1596	/// *Rc::make_mut(&mut reference) *= 2;        // Won't clone anything
1597	/// *Rc::make_mut(&mut reference_2) /= 4;       // Won't clone anything
1598	///
1599	/// // Now `reference` and `reference_2` point to different allocations.
1600	/// assert_eq!(*reference, 90);
1601	/// assert_eq!(*reference_2, 11);
1602	/// ```
1603	#[inline]
1604	pub fn make_mut(this: &mut Self) -> &mut T {
1605		let meta = this.meta();
1606		if State::SHARED {
1607			Self::make_mut_internal(this, false)
1608		} else {
1609			let local_count = meta.strong_local.get();
1610			Self::make_mut_internal(this, local_count > 1)
1611		}
1612	}
1613
1614	/// Makes a mutable reference into the given `HybridRc`, assuming that only the shared strong
1615	/// counter needs to be checked.
1616	///
1617	/// If `force_clone` is true, the counters are ignored and uniqueness will always be ensured
1618	/// by cloning the shared allocation.
1619	///
1620	/// Used internally by `make_mut()`.
1621	#[inline]
1622	fn make_mut_internal(this: &mut Self, force_clone: bool) -> &mut T {
1623		let meta = this.meta();
1624		// There is one implicit shared reference for all local references, so if there are no other
1625		// local references or we are a shared shared and the shared counter is 1, we are the only
1626		// strong reference left.
1627		if force_clone
1628			|| meta
1629				.strong_shared
1630				.compare_exchange(1, 0, Ordering::Acquire, Ordering::Relaxed)
1631				.is_err()
1632		{
1633			// Clone the allocation and make `this` point to the new clone
1634			let mut donor = this.clone_allocation();
1635			mem::swap(&mut this.ptr, &mut donor.ptr);
1636		} else {
1637			// Check if there are Weak references left.
1638			// Relaxed suffices, as if there is a race with a dropping Weak, then it's only a
1639			// missing optimization, but the code keeps being sound.
1640			if meta.weak.load(Ordering::Relaxed) != 1 {
1641				// Acts as a guard to decrement the weak counter
1642				let _weak = Weak { ptr: this.ptr };
1643
1644				// Steal the payload data
1645				let mut donor = Self::new_uninit();
1646				unsafe {
1647					let uninit = HybridRc::get_mut_unchecked(&mut donor);
1648					uninit.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1649					let donor = donor.assume_init();
1650					this.ptr = donor.ptr;
1651					mem::forget(donor);
1652				}
1653			} else {
1654				// There were no Weak references, so we are the unique reference. Bump the counter
1655				// back up.
1656				meta.strong_shared.store(1, Ordering::Release);
1657			}
1658		}
1659
1660		// Safe, because by now we are the only reference to the allocation in `this.ptr`, either
1661		// to begin with, by swapping or by stealing.
1662		unsafe { Self::get_mut_unchecked(this) }
1663	}
1664
1665	/// Clones the shared allocation and returns a `HybridRc` pointing to the clone.
1666	#[inline]
1667	fn clone_allocation(&self) -> Self {
1668		let mut result = Self::new_uninit();
1669		let uninit = unsafe { HybridRc::get_mut_unchecked(&mut result) };
1670		uninit.write((*self.data()).clone());
1671		unsafe { result.assume_init() }
1672	}
1673}
1674
1675impl<T, State: RcState> HybridRc<mem::MaybeUninit<T>, State> {
1676	/// Assumes the value is initialized and converts to `HybridRc<T, State>`.
1677	///
1678	/// # Safety
1679	///
1680	/// You need to provide the same guarantees as for [`MaybeUninit::assume_init()`].
1681	/// Calling this when the value is not yet fully initialized causes immediate undefined
1682	/// behavior.
1683	///
1684	/// # Examples
1685	///
1686	/// ```
1687	/// use hybrid_rc::Rc;
1688	///
1689	/// let mut reference = Rc::<i64>::new_uninit();
1690	///
1691	/// let reference = unsafe {
1692	/// 	// Deferred initialization
1693	/// 	Rc::get_mut_unchecked(&mut reference).as_mut_ptr().write(1337);
1694	///     reference.assume_init()
1695	/// };
1696	///
1697	/// assert_eq!(*reference, 1337)
1698	/// ```
1699	///
1700	/// [`MaybeUninit::assume_init()`]: mem::MaybeUninit::assume_init
1701	#[inline]
1702	pub unsafe fn assume_init(self) -> HybridRc<T, State> {
1703		HybridRc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
1704	}
1705}
1706
1707impl<T, State: RcState> HybridRc<[mem::MaybeUninit<T>], State> {
1708	/// Assumes the values are initialized and converts to `HybridRc<[T], State>`.
1709	///
1710	/// # Safety
1711	///
1712	/// You need to provide the same guarantees as for [`MaybeUninit::assume_init()`].
1713	/// Calling this when the whole slice is not yet fully initialized causes immediate undefined
1714	/// behavior.
1715	///
1716	/// [`MaybeUninit::assume_init()`]: mem::MaybeUninit::assume_init
1717	#[inline]
1718	pub unsafe fn assume_init(self) -> HybridRc<[T], State> {
1719		HybridRc::from_inner(unsafe {
1720			mem::ManuallyDrop::new(self)
1721				.ptr
1722				.as_mut()
1723				.assume_init()
1724				.into()
1725		})
1726	}
1727}
1728
1729impl<State: RcState> HybridRc<dyn Any, State> {
1730	/// Tries to downcast the `HybridRc<dyn Any, _>` to a concrete type.
1731	///
1732	/// # Errors
1733	/// If a downcast failed, the original `HybridRc` is returned in `Err`
1734	///
1735	/// # Example
1736	///
1737	/// ```
1738	/// use std::any::Any;
1739	/// use std::mem::drop;
1740	/// use hybrid_rc::Rc;
1741	///
1742	/// let value = 42i32;
1743	/// let concrete = Rc::new(value);
1744	/// let any: Rc<dyn Any> = Rc::into(concrete);
1745	///
1746	/// let any = any.downcast::<String>().unwrap_err();
1747	///
1748	/// assert_eq!(*any.downcast::<i32>().unwrap(), 42);
1749	/// ```
1750	#[inline]
1751	pub fn downcast<T: Any>(self) -> Result<HybridRc<T, State>, Self> {
1752		if (*self).is::<T>() {
1753			let ptr = self.ptr.cast::<RcBox<T>>();
1754			mem::forget(self);
1755			Ok(HybridRc::from_inner(ptr))
1756		} else {
1757			Err(self)
1758		}
1759	}
1760}
1761
1762impl<State: RcState> HybridRc<dyn Any + Sync + Send, State> {
1763	/// Tries to downcast the `HybridRc<dyn Any + Sync + Send, _>` to a concrete type.
1764	///
1765	/// # Errors
1766	/// If a downcast failed, the original `HybridRc` is returned in `Err`
1767	///
1768	/// # Example
1769	///
1770	/// ```
1771	/// use std::any::Any;
1772	/// use std::mem::drop;
1773	/// use hybrid_rc::Rc;
1774	///
1775	/// let value = 42i32;
1776	/// let concrete = Rc::new(value);
1777	/// let any: Rc<dyn Any + Sync + Send> = Rc::into(concrete);
1778	///
1779	/// let any = any.downcast::<String>().unwrap_err();
1780	///
1781	/// assert_eq!(*any.downcast::<i32>().unwrap(), 42);
1782	/// ```
1783	#[inline]
1784	pub fn downcast<T: Any + Sync + Send>(self) -> Result<HybridRc<T, State>, Self> {
1785		if (*self).is::<T>() {
1786			let ptr = self.ptr.cast::<RcBox<T>>();
1787			mem::forget(self);
1788			Ok(HybridRc::from_inner(ptr))
1789		} else {
1790			Err(self)
1791		}
1792	}
1793}
1794
1795impl<T: ?Sized> Clone for HybridRc<T, Local> {
1796	/// Creates another `Rc` for the same value.
1797	///
1798	/// # Example
1799	/// ```
1800	/// use hybrid_rc::Rc;
1801	///
1802	/// let first = Rc::new(42i32);
1803	/// let second = Rc::clone(&first);
1804	///
1805	/// assert_eq!(Rc::as_ptr(&first), Rc::as_ptr(&second));
1806	/// ```
1807	#[inline]
1808	fn clone(&self) -> Self {
1809		self.meta().inc_strong_local();
1810		Self::from_inner(self.ptr)
1811	}
1812}
1813
1814impl<T: ?Sized> Clone for HybridRc<T, Shared> {
1815	/// Creates another `Arc` for the same value.
1816	///
1817	/// # Example
1818	/// ```
1819	/// use hybrid_rc::Arc;
1820	/// # fn main() -> Result<(), Box<dyn std::any::Any + Send + 'static>> {
1821	///
1822	/// let first = Arc::new(42i32);
1823	/// let second = Arc::clone(&first);
1824	///
1825	/// assert_eq!(Arc::as_ptr(&first), Arc::as_ptr(&second));
1826	///
1827	/// let value = std::thread::spawn(move || *second)
1828	///   .join()?;
1829	/// assert_eq!(*first, value);
1830	/// # Ok(())
1831	/// # }
1832	/// ```
1833	#[inline]
1834	fn clone(&self) -> Self {
1835		self.meta().inc_strong_shared();
1836		Self::from_inner(self.ptr)
1837	}
1838}
1839
1840impl<T: ?Sized, State: RcState> Drop for HybridRc<T, State> {
1841	/// Drops the `HybridRc`.
1842	///
1843	/// This will decrement the appropriate reference count depending on `State`. If both strong
1844	/// reference counts reach zero then the only other references (if any) are [`Weak`]. In that
1845	/// case the inner value is dropped.
1846	#[inline]
1847	fn drop(&mut self) {
1848		let no_more_strong_refs = if State::SHARED {
1849			self.meta().dec_strong_shared()
1850		} else {
1851			self.meta().dec_strong_local()
1852		};
1853
1854		if no_more_strong_refs {
1855			unsafe {
1856				self.drop_contents_and_maybe_box();
1857			}
1858		}
1859	}
1860}
1861
1862// Dereferencing traits
1863
1864impl<T: ?Sized, State: RcState> Deref for HybridRc<T, State> {
1865	type Target = T;
1866
1867	#[inline]
1868	fn deref(&self) -> &T {
1869		self.data()
1870	}
1871}
1872
1873impl<T: ?Sized, State: RcState> Borrow<T> for HybridRc<T, State> {
1874	#[inline]
1875	fn borrow(&self) -> &T {
1876		&**self
1877	}
1878}
1879
1880impl<T: ?Sized, State: RcState> AsRef<T> for HybridRc<T, State> {
1881	#[inline]
1882	fn as_ref(&self) -> &T {
1883		&**self
1884	}
1885}
1886
1887// Safety: T: Sync implies that dereferencing the Arc<T> on multiple threads is sound and T: Send
1888// implies that dropping T on another thread is sound. So T: Sync + Send gives all guarantees we
1889// need to make Arc Sync + Send.
1890unsafe impl<T: ?Sized + Sync + Send> Send for HybridRc<T, Shared> {}
1891unsafe impl<T: ?Sized + Sync + Send> Sync for HybridRc<T, Shared> {}
1892
1893// Unwind safety: A HybridRc can only be UnwindSafe if the inner type is RefUnwindSafe.
1894impl<T: RefUnwindSafe + ?Sized, State: RcState> UnwindSafe for HybridRc<T, State> {}
1895
1896// Unwind safety: An Arc is always RefUnwindSafe because of its use of atomics.
1897impl<T: RefUnwindSafe> RefUnwindSafe for HybridRc<T, Shared> {}
1898
1899// Conversions between different HybridRc variants
1900
1901impl<T: Any + 'static, State: RcState> From<HybridRc<T, State>>
1902	for HybridRc<dyn Any + 'static, State>
1903{
1904	/// Upcasts a `HybridRc<T, State>` into a `HybridRc<dyn Any, State>`
1905	#[inline]
1906	fn from(src: HybridRc<T, State>) -> Self {
1907		let ptr = src.ptr.as_ptr() as *mut RcBox<dyn Any>;
1908		mem::forget(src);
1909		Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
1910	}
1911}
1912
1913impl<T: Any + Sync + Send + 'static, State: RcState> From<HybridRc<T, State>>
1914	for HybridRc<dyn Any + Sync + Send + 'static, State>
1915{
1916	/// Upcasts a `HybridRc<T, State>` into a `HybridRc<dyn Any + Sync + Send, State>`
1917	#[inline]
1918	fn from(src: HybridRc<T, State>) -> Self {
1919		let ptr = src.ptr.as_ptr() as *mut RcBox<dyn Any + Sync + Send>;
1920		mem::forget(src);
1921		Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
1922	}
1923}
1924
1925impl<T, State: RcState, const N: usize> From<HybridRc<[T; N], State>> for HybridRc<[T], State> {
1926	/// Converts a `HybridRc<[T; N], State>` into a `HybridRc<[T], State>`
1927	///
1928	/// Workaround for coercion as long as `CoerceUnsized` is unstable.
1929	#[inline]
1930	fn from(src: HybridRc<[T; N], State>) -> Self {
1931		let ptr = src.ptr.as_ptr() as *mut RcBox<[T]>;
1932		mem::forget(src);
1933		Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
1934	}
1935}
1936
1937impl<T: ?Sized> From<Rc<T>> for HybridRc<T, Shared> {
1938	/// Converts an `Rc<T>` into an `Arc<T>`.
1939	///
1940	/// See [`to_shared()`].
1941	///
1942	/// [`to_shared()`]: HybridRc::to_shared
1943	#[inline]
1944	fn from(src: Rc<T>) -> Self {
1945		HybridRc::to_shared(&src)
1946	}
1947}
1948
1949impl<T: ?Sized> TryFrom<Arc<T>> for HybridRc<T, Local> {
1950	type Error = Arc<T>;
1951
1952	/// Tries to convert an `Arc<T>` into an `Rc<T>`.
1953	///
1954	/// See [`to_local()`].
1955	///
1956	/// [`to_local()`]: HybridRc::to_local
1957	#[inline]
1958	fn try_from(src: Arc<T>) -> Result<Self, Self::Error> {
1959		match HybridRc::to_local(&src) {
1960			Some(result) => Ok(result),
1961			None => Err(src),
1962		}
1963	}
1964}
1965
1966impl<T, State: RcState, const N: usize> TryFrom<HybridRc<[T], State>> for HybridRc<[T; N], State> {
1967	type Error = HybridRc<[T], State>;
1968
1969	/// Tries to convert a `HybridRc<[T], State>` into a `HybridRc<[T; N], State>`
1970	///
1971	/// Only succeeds if the length matches exactly.
1972	#[inline]
1973	fn try_from(src: HybridRc<[T], State>) -> Result<Self, Self::Error> {
1974		if src.len() == N {
1975			let ptr = src.ptr.as_ptr().cast();
1976			mem::forget(src);
1977			Ok(Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }))
1978		} else {
1979			Err(src)
1980		}
1981	}
1982}
1983
1984// Conversions into HybridRc
1985
1986impl<T, State: RcState> From<T> for HybridRc<T, State> {
1987	/// Moves a `T` into an `HybridRc<T, State>`
1988	///
1989	/// Equivalent to calling [`HybridRc::new(src)`].
1990	///
1991	/// [`HybridRc::new(t)`]: Self::new
1992	#[inline]
1993	fn from(src: T) -> Self {
1994		Self::new(src)
1995	}
1996}
1997
1998impl<T: Clone, State: RcState> From<&[T]> for HybridRc<[T], State> {
1999	/// Allocate a reference-counted slice and clone the elements of `src` into it.
2000	///
2001	/// # Example
2002	///
2003	/// ```
2004	/// use hybrid_rc::Rc;
2005	///
2006	/// let vecs = [
2007	/// 	vec![1,2,3],
2008	/// 	vec![4,5,6],
2009	/// ];
2010	/// let rc: Rc<[_]> = Rc::from(&vecs[..]);
2011	/// assert_eq!(&rc[..], &vecs);
2012	/// ```
2013	#[inline]
2014	fn from(src: &[T]) -> Self {
2015		let mut builder = SliceBuilder::new(Self::build_new_meta(), src.len());
2016		for item in src {
2017			builder.append(Clone::clone(item));
2018		}
2019		Self::from_inner(builder.finish().into())
2020	}
2021}
2022
2023impl<T, State: RcState> From<Vec<T>> for HybridRc<[T], State> {
2024	/// Allocate a reference-counted slice and move `src`'s items into it.
2025	///
2026	/// # Example
2027	///
2028	/// ```
2029	/// use hybrid_rc::Rc;
2030	///
2031	/// let vec = vec!["a","b","c"];
2032	/// let rc: Rc<[_]> = Rc::from(vec);
2033	/// assert_eq!(&rc[..], &["a", "b", "c"]);
2034	/// ```
2035	#[inline]
2036	fn from(mut src: Vec<T>) -> Self {
2037		unsafe {
2038			let result = HybridRc::<_, State>::copy_from_slice_unchecked(&src[..]);
2039
2040			// Set the length of `src`, so that the moved items are not dropped.
2041			src.set_len(0);
2042
2043			result
2044		}
2045	}
2046}
2047
2048impl<State: RcState> From<&str> for HybridRc<str, State> {
2049	/// Allocate a reference-counted `str` and copy `src` into it.
2050	///
2051	/// # Example
2052	///
2053	/// ```
2054	/// use hybrid_rc::Rc;
2055	///
2056	/// let a: Rc<str> = Rc::from("foobar");
2057	/// assert_eq!(&a[..], "foobar");
2058	/// ```
2059	#[inline]
2060	fn from(src: &str) -> Self {
2061		let bytes = HybridRc::<_, State>::copy_from_slice(src.as_bytes());
2062		let inner = unsafe { (bytes.ptr.as_ptr() as *mut _ as *mut RcBox<str>).as_mut() }.unwrap();
2063		mem::forget(bytes);
2064		Self::from_inner(inner.into())
2065	}
2066}
2067
2068impl<State: RcState> From<String> for HybridRc<str, State> {
2069	/// Allocate a reference-counted `str` and copy `src` into it.
2070	///
2071	/// # Example
2072	///
2073	/// ```
2074	/// use hybrid_rc::Rc;
2075	///
2076	/// let string: String = "foobar".to_owned();
2077	/// let a: Rc<str> = Rc::from(string);
2078	/// assert_eq!(&a[..], "foobar");
2079	/// ```
2080	#[inline]
2081	fn from(src: String) -> Self {
2082		Self::from(&src[..])
2083	}
2084}
2085
2086impl<'a, T: ToOwned + ?Sized, State: RcState> From<Cow<'a, T>> for HybridRc<T, State>
2087where
2088	HybridRc<T, State>: From<&'a T> + From<T::Owned>,
2089{
2090	/// Creates a new `HybridRc<T, State>` from a clone-on-write pointer by copying its content.
2091	///
2092	/// # Example
2093	///
2094	/// ```rust
2095	/// use hybrid_rc::Rc;
2096	/// use std::borrow::Cow;
2097	///
2098	/// let cow: Cow<str> = Cow::Borrowed("foobar");
2099	/// let a: Rc<str> = Rc::from(cow);
2100	/// assert_eq!(&a[..], "foobar");
2101	/// ```
2102	#[inline]
2103	fn from(src: Cow<'a, T>) -> HybridRc<T, State> {
2104		match src {
2105			Cow::Borrowed(value) => HybridRc::from(value),
2106			Cow::Owned(value) => HybridRc::from(value),
2107		}
2108	}
2109}
2110
2111impl<T: ?Sized, State: RcState> From<Box<T>> for HybridRc<T, State> {
2112	#[inline]
2113	fn from(src: Box<T>) -> HybridRc<T, State> {
2114		let len = mem::size_of_val(&*src);
2115		let inner = RcBox::allocate_for_val(Self::build_new_meta(), &*src, false);
2116		let dest = unsafe { ptr::addr_of_mut!((*inner.as_ptr()).data) }.cast();
2117
2118		// Safety: The freshly allocated `RcBox` can't alias `src` and the payload can be fully
2119		// moved by copying the memory, because it's not Pin<Box<T>>. `allocate_for_val` ensures
2120		// the destination payload buffer is big enough for the value.
2121		unsafe {
2122			(&*src as *const T)
2123				.cast::<u8>()
2124				.copy_to_nonoverlapping(dest, len);
2125		}
2126
2127		// Drop original box without running the destructor
2128		// Safety: This *should* be sound, as ManuallyDrop<T> has the same layout as T.
2129		mem::drop(unsafe { mem::transmute::<Box<T>, Box<mem::ManuallyDrop<T>>>(src) });
2130
2131		HybridRc::from_inner(inner)
2132	}
2133}
2134
2135impl<T, State: RcState> iter::FromIterator<T> for HybridRc<[T], State> {
2136	/// Takes each element in the `Iterator` and collects it into an `HybridRc<[T], State>`.
2137	///
2138	/// # Performance characteristics
2139	///
2140	/// Collecion is done by first collecting into a `Vec<T>`.
2141	///
2142	/// This will allocate as many times as needed for constructing the `Vec<T>`
2143	/// and then it will allocate once for turning the `Vec<T>` into the `HybridRc<[T], State>`.
2144	///
2145	/// Once specialization is stablilized this will be optimized for [`TrustedLen`] iterators.
2146	///
2147	/// [`TrustedLen`]: core::iter::TrustedLen
2148	fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
2149		let vec: Vec<T> = iter.into_iter().collect();
2150		vec.into()
2151	}
2152}
2153
2154// Propagate some useful traits implemented by the inner type
2155
2156impl<T: Default, State: RcState> Default for HybridRc<T, State> {
2157	/// Creates a new `HybridRc`, with the `Default` value for `T`.
2158	#[inline]
2159	fn default() -> Self {
2160		Self::new(Default::default())
2161	}
2162}
2163
2164impl<T: ?Sized + PartialEq, S1: RcState, S2: RcState> PartialEq<HybridRc<T, S2>>
2165	for HybridRc<T, S1>
2166{
2167	/// Equality for `HybridRc`s.
2168	///
2169	/// Two `HybridRc`s are equal if their inner values are equal, independent of if they are
2170	/// stored in the same allocation.
2171	#[inline]
2172	fn eq(&self, other: &HybridRc<T, S2>) -> bool {
2173		**self == **other
2174	}
2175}
2176
2177impl<T: ?Sized + Eq, State: RcState> Eq for HybridRc<T, State> {}
2178
2179impl<T: ?Sized + Hash, State: RcState> Hash for HybridRc<T, State> {
2180	#[inline]
2181	fn hash<H: Hasher>(&self, state: &mut H) {
2182		Self::data(self).hash(state);
2183	}
2184}
2185
2186impl<T: ?Sized + PartialOrd, S1: RcState, S2: RcState> PartialOrd<HybridRc<T, S2>>
2187	for HybridRc<T, S1>
2188{
2189	/// Partial comparison for `HybridRc`s.
2190	///
2191	/// The two are compared by calling `partial_cmp()` on their inner values.
2192	#[inline]
2193	fn partial_cmp(&self, other: &HybridRc<T, S2>) -> Option<cmp::Ordering> {
2194		(**self).partial_cmp(&**other)
2195	}
2196}
2197
2198impl<T: ?Sized + Ord, State: RcState> Ord for HybridRc<T, State> {
2199	/// Comparison for `HybridRc`s.
2200	///
2201	/// The two are compared by calling `cmp()` on their inner values.
2202	#[inline]
2203	fn cmp(&self, other: &Self) -> cmp::Ordering {
2204		(**self).cmp(&**other)
2205	}
2206}
2207
2208impl<T: ?Sized + fmt::Display, State: RcState> fmt::Display for HybridRc<T, State> {
2209	#[inline]
2210	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2211		fmt::Display::fmt(&Self::data(self), f)
2212	}
2213}
2214
2215impl<T: ?Sized + fmt::Debug, State: RcState> fmt::Debug for HybridRc<T, State> {
2216	#[inline]
2217	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2218		fmt::Debug::fmt(&Self::data(self), f)
2219	}
2220}
2221
2222// `HybridRc` can be formatted as a pointer.
2223impl<T: ?Sized, State: RcState> fmt::Pointer for HybridRc<T, State> {
2224	/// Formats the value using the given formatter.
2225	///
2226	/// If the `#` flag is used, the state (shared/local) is written after the address.
2227	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2228		if f.alternate() {
2229			fmt::Pointer::fmt(&Self::as_ptr(self), f)?;
2230			f.write_str(if State::SHARED {
2231				" [shared]"
2232			} else {
2233				" [local]"
2234			})
2235		} else {
2236			fmt::Pointer::fmt(&Self::as_ptr(self), f)
2237		}
2238	}
2239}
2240
2241/// `HybridRc<T>` is always `Unpin` itself, because the data value is on the heap,
2242/// so moving `HybridRc<T>` doesn't move the content even if `T` is not `Unpin`.
2243///
2244/// This allows unpinning e.g. `Pin<Box<HybridRc<T>>>` but not any `Pin<HybridRc<T>>`!
2245impl<T: ?Sized, State: RcState> Unpin for HybridRc<T, State> {}
2246
2247/// `Weak<T>` represents a non-owning reference to a value managed by a [`HybridRc<T, _>`].
2248/// The value is accessed by calling [`upgrade()`] or [`upgrade_local()`] on `Weak`.
2249///
2250/// `Weak` references are typically used to prevent circular references that would keep
2251/// the shared value alive indefinitely.
2252///
2253/// The typical way to obtain a `Weak<T>` is to call [`HybridRc::downgrade()`].
2254///
2255/// [`upgrade()`]: Weak::upgrade
2256/// [`upgrade_local()`]: Weak::upgrade_local
2257#[must_use]
2258pub struct Weak<T: ?Sized> {
2259	ptr: NonNull<RcBox<T>>,
2260}
2261
2262impl<T: ?Sized> Weak<T> {
2263	/// Accesses the metadata area of the shared allocation.
2264	///
2265	/// `None` for instances created through `Weak::new()`.
2266	#[inline]
2267	fn meta(&self) -> Option<&RcMeta> {
2268		if is_senitel(self.ptr.as_ptr()) {
2269			None
2270		} else {
2271			// Safety: as long as one Rc or Weak
2272			// for this item exists, the memory stays
2273			// allocated.
2274			Some(unsafe { &(*self.ptr.as_ptr()).meta })
2275		}
2276	}
2277
2278	/// Returns a raw pointer to the value referenced by this `Weak<T>`.
2279	///
2280	/// The pointer is valid only if there are some strong references. It may be dangling,
2281	/// unaligned or even null otherwise.
2282	///
2283	/// # Example
2284	/// ```
2285	/// use hybrid_rc::Rc;
2286	///
2287	/// let strong = Rc::new(42i32);
2288	/// let weak = Rc::downgrade(&strong);
2289	/// {
2290	/// 	let pointer = weak.as_ptr();
2291	/// 	// As long as strong is not dropped, the pointer stays valid
2292	/// 	assert_eq!(42, unsafe { *pointer });
2293	/// }
2294	/// drop(strong);
2295	/// {
2296	/// 	// Calling weak.as_ptr() is still safe, but dereferencing it would lead
2297	/// 	// to undefined behaviour.
2298	/// 	let pointer = weak.as_ptr();
2299	/// 	// assert_eq!(42, unsafe { &*pointer }); // undefined behaviour
2300	/// }
2301	#[must_use]
2302	#[inline]
2303	pub fn as_ptr(&self) -> *const T {
2304		let ptr: *mut RcBox<T> = self.ptr.as_ptr();
2305
2306		if is_senitel(ptr) {
2307			// If the pointer is dangling, we return the sentinel directly. This cannot be
2308			// a valid payload address, as the payload is at least as aligned as ArcInner (usize).
2309			ptr as *const T
2310		} else {
2311			// Safety: raw pointer manipulation like in sync::Weak, as the payload may have been
2312			// dropped at this point and to keep provenance.
2313			unsafe { ptr::addr_of_mut!((*ptr).data) }
2314		}
2315	}
2316
2317	/// Attempts to upgrade the Weak pointer to an [`Rc`].
2318	///
2319	/// **Note:** Only one thread can have `Rc`s for a value at any point in time.
2320	/// See [`upgrade()`] to upgrade to an [`Arc`].
2321	///
2322	/// In `no_std` environments this will only succeed if no `Rc` exists on *any* thread.
2323	///
2324	/// # Errors
2325	/// - [`ValueDropped`]: the referenced value has already been dropped.
2326	/// - [`WrongThread`]: another thread currently holds `Rc`s for the value.
2327	///
2328	/// # Example
2329	/// ```
2330	/// use hybrid_rc::{Arc, Rc, Weak, UpgradeError};
2331	/// # fn main() -> Result<(), UpgradeError> {
2332	/// let strong = Arc::new(42i32);
2333	/// let weak = Arc::downgrade(&strong);
2334	///
2335	/// {
2336	/// 	let strong2 = weak.upgrade_local()?;
2337	/// 	assert_eq!(Arc::as_ptr(&strong), Rc::as_ptr(&strong2));
2338	/// }
2339	///
2340	/// std::mem::drop(strong);
2341	///
2342	/// let error = Weak::upgrade_local(&weak).unwrap_err();
2343	/// assert_eq!(error, UpgradeError::ValueDropped);
2344	/// # Ok(())
2345	/// # }
2346	/// ```
2347	///
2348	/// [`upgrade()`]: Weak::upgrade
2349	/// [`ValueDropped`]: UpgradeError::ValueDropped
2350	/// [`WrongThread`]: UpgradeError::WrongThread
2351	#[inline]
2352	pub fn upgrade_local(&self) -> Result<Rc<T>, UpgradeError> {
2353		let meta = self.meta().ok_or(UpgradeError::ValueDropped)?;
2354		let current_thread = ThreadId::current_thread();
2355
2356		let owner = match meta.owner.store_if_none(
2357			Some(current_thread),
2358			Ordering::Acquire,
2359			Ordering::Relaxed,
2360		) {
2361			Ok(_) => None,
2362			Err(owner) => owner,
2363		};
2364
2365		if owner == None || owner == Some(current_thread) {
2366			if meta.try_inc_strong_local().is_ok() {
2367				Ok(HybridRc::<T, Local>::from_inner(self.ptr))
2368			} else {
2369				// Relaxed is enough, as try_inc_strong_local failing means that
2370				// the value was already dropped.
2371				meta.owner.store(None, Ordering::Relaxed);
2372				Err(UpgradeError::ValueDropped)
2373			}
2374		} else {
2375			Err(UpgradeError::WrongThread)
2376		}
2377	}
2378
2379	/// Attempts to upgrade the Weak pointer to an [`Arc`].
2380	///
2381	/// Also see [`upgrade_local()`] to upgrade to an [`Rc`].
2382	///
2383	/// # Errors
2384	/// - [`ValueDropped`]: the referenced value has already been dropped.
2385	///
2386	/// [`upgrade_local()`]: Weak::upgrade_local
2387	/// [`ValueDropped`]: UpgradeError::ValueDropped
2388	#[inline]
2389	pub fn upgrade(&self) -> Result<Arc<T>, UpgradeError> {
2390		let meta = self.meta().ok_or(UpgradeError::ValueDropped)?;
2391		meta.try_inc_strong_shared()
2392			.map_err(|_| UpgradeError::ValueDropped)?;
2393		Ok(HybridRc::<T, Shared>::from_inner(self.ptr))
2394	}
2395
2396	/// Gets a lower bound to the number of strong pointers to the inner value.
2397	///
2398	/// If `self` was created using [`Weak::new`], this will return 0.
2399	///
2400	/// Please understand that another thread might change the count at any time, including
2401	/// potentially between calling this method and acting on the result.
2402	///
2403	/// # Examples
2404	///
2405	/// ```
2406	/// use hybrid_rc::{Arc, Rc, Weak};
2407	///
2408	/// let reference = Rc::new(42);
2409	/// let _2nd_ref = Rc::clone(&reference);
2410	/// let shared_ref = Rc::to_shared(&reference);
2411	/// let _2nd_shared_ref = Arc::clone(&shared_ref);
2412	/// let weak = Rc::downgrade(&reference);
2413	///
2414	/// // shared_ref only knows the count of shared references and that there is at least one
2415	/// // local reference, so it will show 3 instead of 4:
2416	/// assert_eq!(Weak::strong_count(&weak), 3);
2417	/// ```
2418	#[inline]
2419	pub fn strong_count(&self) -> usize {
2420		if let Some(meta) = self.meta() {
2421			meta.strong_shared.load(Ordering::SeqCst)
2422		} else {
2423			0
2424		}
2425	}
2426
2427	/// Gets the number of [`Weak`] pointers to this allocation.
2428	///
2429	/// Please understand that another thread may change the count at any time, including
2430	/// potentially between calling this method and acting on the result. Also there might by
2431	/// off-by-one errors when other threads concurrently upgrade or downgrade pointers.
2432	///
2433	/// # Examples
2434	///
2435	/// ```
2436	/// use hybrid_rc::{Rc, Weak};
2437	///
2438	/// let reference = Rc::new(42);
2439	/// let weak = Rc::downgrade(&reference);
2440	/// let _weak_2 = weak.clone();
2441	///
2442	/// assert_eq!(Weak::weak_count(&weak), 2);
2443	/// ```
2444	#[inline]
2445	pub fn weak_count(&self) -> usize {
2446		if let Some(meta) = self.meta() {
2447			let weak = meta.weak.load(Ordering::SeqCst);
2448			if weak == usize::MAX {
2449				0
2450			} else if meta.strong_shared.load(Ordering::SeqCst) > 0 {
2451				weak - 1
2452			} else {
2453				weak
2454			}
2455		} else {
2456			0
2457		}
2458	}
2459}
2460
2461impl<T> Weak<T> {
2462	/// Constructs a dummy `Weak<T>`, without allocating any memory.
2463	///
2464	/// Trying to upgrade the result will always result in a [`ValueDropped`] error.
2465	///
2466	/// [`ValueDropped`]: UpgradeError::ValueDropped
2467	pub fn new() -> Weak<T> {
2468		Self { ptr: senitel() }
2469	}
2470}
2471
2472impl<T: ?Sized> fmt::Debug for Weak<T> {
2473	#[inline]
2474	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2475		write!(f, "(Weak)")
2476	}
2477}
2478
2479impl<T: ?Sized> fmt::Pointer for Weak<T> {
2480	/// Formats the value using the given formatter.
2481	///
2482	/// If the `#` flag is used, the state (weak) is written after the address.
2483	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2484		if f.alternate() {
2485			fmt::Pointer::fmt(&Self::as_ptr(self), f)?;
2486			f.write_str(" [weak]")
2487		} else {
2488			fmt::Pointer::fmt(&Self::as_ptr(self), f)
2489		}
2490	}
2491}
2492
2493impl<T> Default for Weak<T> {
2494	/// Constructs a dummy `Weak<T>`, without allocating any memory.
2495	///
2496	/// See [`Weak<T>::new()`].
2497	#[inline]
2498	fn default() -> Self {
2499		Self::new()
2500	}
2501}
2502
2503impl<T: ?Sized> Clone for Weak<T> {
2504	/// Creates another `Weak` reference for the same value.
2505	///
2506	/// # Example
2507	/// ```
2508	/// use hybrid_rc::{Rc, Weak};
2509	///
2510	/// let strong = Rc::new(42i32);
2511	/// let weak = Rc::downgrade(&strong);
2512	/// let weak2 = Weak::clone(&weak);
2513	///
2514	/// assert_eq!(weak.as_ptr(), weak2.as_ptr());
2515	/// ```
2516	#[inline]
2517	fn clone(&self) -> Self {
2518		if let Some(meta) = self.meta() {
2519			// We can ignore the lock in Weak::clone() as the counter is only locked by HybridRc
2520			// when there are no Weak instances/ (meta.weak == 1).
2521			meta.inc_weak_nolock();
2522		}
2523		Self { ptr: self.ptr }
2524	}
2525}
2526
2527impl<T: ?Sized> Drop for Weak<T> {
2528	/// Drops the `Weak` reference.
2529	///
2530	/// Once all `HybridRc` and `Weak` references to a shared value are dropped, the shared
2531	/// allocation is fully released.
2532	#[inline]
2533	fn drop(&mut self) {
2534		if let Some(meta) = self.meta() {
2535			let last_reference = meta.dec_weak();
2536			if last_reference {
2537				unsafe {
2538					// Safety: only called if this was the last (weak) reference
2539					RcBox::dealloc(self.ptr);
2540				}
2541			}
2542		}
2543	}
2544}
2545
2546// Safety: Like for Arc<T> T: Send + Sync gives all guarantees we need to make Weak Send + Sync.
2547unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
2548unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
2549
2550/// `PinWeak<T>` represents a non-owning reference to a pinned value managed by a
2551/// [`Pin`]`<`[`HybridRc<T, _>`]`>`.
2552///
2553/// The typical way to obtain a `PinWeak<T>` is to call [`HybridRc::downgrade_pin()`].
2554///
2555/// See [`Weak<T>`] for more information about weak references.
2556///
2557/// [`upgrade()`]: PinWeak::upgrade
2558/// [`upgrade_local()`]: PinWeak::upgrade_local
2559#[repr(transparent)]
2560pub struct PinWeak<T: ?Sized>(Weak<T>);
2561
2562impl<T: ?Sized> PinWeak<T> {
2563	/// Attempts to upgrade the pinned weak pointer to a pinned [`Rc`].
2564	///
2565	/// See [`Weak::upgrade_local()`] for more information.
2566	///
2567	/// # Errors
2568	/// - [`ValueDropped`]: the referenced value has already been dropped.
2569	/// - [`WrongThread`]: another thread currently holds `Rc`s for the value.
2570	///
2571	/// [`ValueDropped`]: UpgradeError::ValueDropped
2572	/// [`WrongThread`]: UpgradeError::WrongThread
2573	#[inline]
2574	pub fn upgrade_local(&self) -> Result<Pin<Rc<T>>, UpgradeError> {
2575		Ok(unsafe { Pin::new_unchecked(self.0.upgrade_local()?) })
2576	}
2577
2578	/// Attempts to upgrade the pinned weak pointer to a pinned [`Arc`].
2579	///
2580	/// See [`Weak::upgrade()`] for more information.
2581	///
2582	/// # Errors
2583	/// - [`ValueDropped`]: the referenced value has already been dropped.
2584	///
2585	/// [`ValueDropped`]: UpgradeError::ValueDropped
2586	#[inline]
2587	pub fn upgrade(&self) -> Result<Pin<Arc<T>>, UpgradeError> {
2588		Ok(unsafe { Pin::new_unchecked(self.0.upgrade()?) })
2589	}
2590
2591	/// Gets a lower bound to the number of strong pointers to the inner value.
2592	///
2593	/// See [`Weak::strong_count()`] for more information.
2594	#[inline]
2595	pub fn strong_count(&self) -> usize {
2596		self.0.strong_count()
2597	}
2598
2599	/// Gets the number of [`Weak`] pointers to this allocation.
2600	///
2601	/// See [`Weak::strong_count()`] for more information.
2602	#[inline]
2603	pub fn weak_count(&self) -> usize {
2604		self.0.weak_count()
2605	}
2606
2607	/// Transforms this `PinWeak<T>` into a [`Weak<T>`]
2608	///
2609	/// # Safety
2610	/// This function is unsafe. You must guarantee that you will continue to treat the `Weak` as
2611	/// pinned after you call this function. Not maintaining the pinning invariants that is a
2612	/// violation of the API contract and may lead to undefined behavior in later (safe) operations.
2613	///
2614	/// If the underlying data is [`Unpin`], [`PinWeak::into_inner()`] should be used instead.
2615	#[inline]
2616	pub unsafe fn into_inner_unchecked(self) -> Weak<T> {
2617		self.0
2618	}
2619}
2620
2621impl<T> PinWeak<T> {
2622	/// Constructs a dummy `PinWeak<T>`, without allocating any memory.
2623	///
2624	/// Trying to upgrade the result will always result in a [`ValueDropped`] error.
2625	///
2626	/// [`ValueDropped`]: UpgradeError::ValueDropped
2627	pub fn new() -> PinWeak<T> {
2628		Self(Weak::new())
2629	}
2630}
2631
2632impl<T: ?Sized + Unpin> PinWeak<T> {
2633	/// Transforms this `PinWeak<T>` into a [`Weak<T>`]
2634	///
2635	/// This requires that the data inside the shared allocation is [`Unpin`], so that we
2636	/// can ignore the pinning invariants when unwrapping it.
2637	#[inline]
2638	pub fn into_inner(self) -> Weak<T> {
2639		self.0
2640	}
2641}
2642
2643impl<T: ?Sized> fmt::Debug for PinWeak<T> {
2644	#[inline]
2645	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2646		write!(f, "Pin<(Weak)>")
2647	}
2648}
2649
2650impl<T: ?Sized> fmt::Pointer for PinWeak<T> {
2651	/// Formats the value using the given formatter.
2652	///
2653	/// If the `#` flag is used, the state (weak) is written after the address.
2654	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2655		if f.alternate() {
2656			fmt::Pointer::fmt(&self.0.as_ptr(), f)?;
2657			f.write_str(" [weak]")
2658		} else {
2659			fmt::Pointer::fmt(&self.0.as_ptr(), f)
2660		}
2661	}
2662}
2663
2664impl<T: ?Sized> Clone for PinWeak<T> {
2665	/// Creates another pinned weak reference for the same value.
2666	///
2667	/// See [`Weak::clone()`] for more information.
2668	#[inline]
2669	fn clone(&self) -> Self {
2670		Self(self.0.clone())
2671	}
2672}
2673
2674impl<T> Default for PinWeak<T> {
2675	/// Constructs a dummy `PinWeak<T>`, without allocating any memory.
2676	///
2677	/// See [`PinWeak<T>::new()`].
2678	#[inline]
2679	fn default() -> Self {
2680		Self::new()
2681	}
2682}
2683
2684// Safety: Like for Weak<T> T: Send + Sync gives all guarantees we need to make PinWeak Send + Sync.
2685unsafe impl<T: ?Sized + Sync + Send> Send for PinWeak<T> {}
2686unsafe impl<T: ?Sized + Sync + Send> Sync for PinWeak<T> {}