1use core::{
2 alloc::Layout,
3 borrow::Borrow,
4 fmt::{self, Debug, Display, Pointer},
5 hash::{Hash, Hasher},
6 marker::PhantomData,
7 ops::Deref,
8 ptr::NonNull,
9};
10
11use crate::{
12 Finalization,
13 barrier::{Unlock, Write},
14 collect::Collect,
15 context::{Collection, Mutation},
16 gc_weak::GcWeak,
17 static_collect::Static,
18 types::{GcBox, GcBoxHeader, GcBoxInner, GcColor, Invariant},
19};
20
21pub struct Gc<'gc, T: ?Sized + 'gc> {
27 pub(crate) ptr: NonNull<GcBoxInner<T>>,
28 pub(crate) _invariant: Invariant<'gc>,
29}
30
31impl<'gc, T: Debug + ?Sized + 'gc> Debug for Gc<'gc, T> {
32 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
33 fmt::Debug::fmt(&**self, fmt)
34 }
35}
36
37impl<'gc, T: ?Sized + 'gc> Pointer for Gc<'gc, T> {
38 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
39 fmt::Pointer::fmt(&Gc::as_ptr(*self), fmt)
40 }
41}
42
43impl<'gc, T: Display + ?Sized + 'gc> Display for Gc<'gc, T> {
44 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
45 fmt::Display::fmt(&**self, fmt)
46 }
47}
48
49impl<'gc, T: ?Sized + 'gc> Copy for Gc<'gc, T> {}
50
51impl<'gc, T: ?Sized + 'gc> Clone for Gc<'gc, T> {
52 #[inline]
53 fn clone(&self) -> Gc<'gc, T> {
54 *self
55 }
56}
57
58unsafe impl<'gc, T: ?Sized + 'gc> Collect for Gc<'gc, T> {
59 #[inline]
60 fn trace(&self, cc: &Collection) {
61 unsafe {
62 cc.trace(GcBox::erase(self.ptr));
63 }
64 }
65}
66
67impl<'gc, T: ?Sized + 'gc> Deref for Gc<'gc, T> {
68 type Target = T;
69
70 #[inline]
71 fn deref(&self) -> &T {
72 unsafe { &self.ptr.as_ref().value }
73 }
74}
75
76impl<'gc, T: ?Sized + 'gc> AsRef<T> for Gc<'gc, T> {
77 #[inline]
78 fn as_ref(&self) -> &T {
79 unsafe { &self.ptr.as_ref().value }
80 }
81}
82
83impl<'gc, T: ?Sized + 'gc> Borrow<T> for Gc<'gc, T> {
84 #[inline]
85 fn borrow(&self) -> &T {
86 unsafe { &self.ptr.as_ref().value }
87 }
88}
89
90impl<'gc, T: Collect + 'gc> Gc<'gc, T> {
91 #[inline]
92 pub fn new(mc: &Mutation<'gc>, t: T) -> Gc<'gc, T> {
93 Gc {
94 ptr: mc.allocate(t),
95 _invariant: PhantomData,
96 }
97 }
98}
99
100impl<'gc, T: 'static> Gc<'gc, T> {
101 #[inline]
119 pub fn new_static(mc: &Mutation<'gc>, t: T) -> Gc<'gc, T> {
120 let p = Gc::new(mc, Static(t));
121 unsafe { Gc::cast::<T>(p) }
123 }
124}
125
126impl<'gc, T: ?Sized + 'gc> Gc<'gc, T> {
127 #[inline]
132 pub unsafe fn cast<U: 'gc>(this: Gc<'gc, T>) -> Gc<'gc, U> {
133 Gc {
134 ptr: NonNull::cast(this.ptr),
135 _invariant: PhantomData,
136 }
137 }
138
139 #[inline]
145 pub fn erase(this: Gc<'gc, T>) -> Gc<'gc, ()> {
146 unsafe { Gc::cast(this) }
147 }
148
149 #[inline]
155 pub unsafe fn from_ptr(ptr: *const T) -> Gc<'gc, T> {
156 unsafe {
157 let layout = Layout::new::<GcBoxHeader>();
158 let (_, header_offset) = layout.extend(Layout::for_value(&*ptr)).unwrap();
159 let header_offset = -(header_offset as isize);
160 let ptr = (ptr as *mut T).byte_offset(header_offset) as *mut GcBoxInner<T>;
161 Gc {
162 ptr: NonNull::new_unchecked(ptr),
163 _invariant: PhantomData,
164 }
165 }
166 }
167}
168
169impl<'gc, T: Unlock + ?Sized + 'gc> Gc<'gc, T> {
170 #[inline]
172 pub fn unlock(self, mc: &Mutation<'gc>) -> &'gc T::Unlocked {
173 Gc::write(mc, self);
174 unsafe { self.as_ref().unlock_unchecked() }
176 }
177}
178
179impl<'gc, T: ?Sized + 'gc> Gc<'gc, T> {
180 #[inline]
185 pub fn as_ref(self: Gc<'gc, T>) -> &'gc T {
186 unsafe { &self.ptr.as_ref().value }
190 }
191
192 #[inline]
193 pub fn downgrade(this: Gc<'gc, T>) -> GcWeak<'gc, T> {
194 GcWeak { inner: this }
195 }
196
197 #[inline]
206 pub fn write(mc: &Mutation<'gc>, gc: Self) -> &'gc Write<T> {
207 unsafe {
208 mc.backward_barrier(Gc::erase(gc), None);
209 Write::assume(gc.as_ref())
211 }
212 }
213
214 #[inline]
219 pub fn ptr_eq(this: Gc<'gc, T>, other: Gc<'gc, T>) -> bool {
220 core::ptr::eq(Gc::as_ptr(this), Gc::as_ptr(other))
223 }
224
225 #[inline]
226 pub fn as_ptr(gc: Gc<'gc, T>) -> *const T {
227 unsafe {
228 let inner = gc.ptr.as_ptr();
229 core::ptr::addr_of!((*inner).value) as *const T
230 }
231 }
232
233 #[inline]
239 pub fn is_dead(_: &Finalization<'gc>, gc: Gc<'gc, T>) -> bool {
240 let inner = unsafe { gc.ptr.as_ref() };
241 matches!(inner.header.color(), GcColor::White | GcColor::WhiteWeak)
242 }
243
244 #[inline]
250 pub fn resurrect(fc: &Finalization<'gc>, gc: Gc<'gc, T>) {
251 unsafe {
252 fc.resurrect(GcBox::erase(gc.ptr));
253 }
254 }
255}
256
257impl<'gc, T: PartialEq + ?Sized + 'gc> PartialEq for Gc<'gc, T> {
258 fn eq(&self, other: &Self) -> bool {
259 (**self).eq(other)
260 }
261
262 fn ne(&self, other: &Self) -> bool {
263 (**self).ne(other)
264 }
265}
266
267impl<'gc, T: Eq + ?Sized + 'gc> Eq for Gc<'gc, T> {}
268
269impl<'gc, T: PartialOrd + ?Sized + 'gc> PartialOrd for Gc<'gc, T> {
270 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
271 (**self).partial_cmp(other)
272 }
273
274 fn le(&self, other: &Self) -> bool {
275 (**self).le(other)
276 }
277
278 fn lt(&self, other: &Self) -> bool {
279 (**self).lt(other)
280 }
281
282 fn ge(&self, other: &Self) -> bool {
283 (**self).ge(other)
284 }
285
286 fn gt(&self, other: &Self) -> bool {
287 (**self).gt(other)
288 }
289}
290
291impl<'gc, T: Ord + ?Sized + 'gc> Ord for Gc<'gc, T> {
292 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
293 (**self).cmp(other)
294 }
295}
296
297impl<'gc, T: Hash + ?Sized + 'gc> Hash for Gc<'gc, T> {
298 fn hash<H: Hasher>(&self, state: &mut H) {
299 (**self).hash(state)
300 }
301}