1use core::{
2 mem::MaybeUninit,
3 ptr::{self, NonNull},
4};
5
6use super::*;
7
8#[derive(Debug)]
9enum Kind<T> {
10 Slot(MaybeUninit<T>),
11 Inline(NonNull<T>),
12 Dangling(NonNull<T>),
14}
15
16impl<T> Default for Kind<T> {
17 fn default() -> Self {
18 if mem::size_of::<T>() == 0 {
19 Kind::Dangling(NonNull::dangling())
20 } else if mem::needs_drop::<T>() {
21 Kind::Slot(MaybeUninit::uninit())
22 } else {
23 Kind::Inline(NonNull::dangling())
24 }
25 }
26}
27
28#[derive(Debug)]
30#[must_use = "The `T` is uninitialized, and must be initialized by `write` before it is used, if `T` is not zero sized type."]
31pub struct Owned<T, A: Allocator> {
32 kind: Kind<T>,
33 arena: A,
34 detached: bool,
35 pub(super) allocated: Meta,
36}
37
38unsafe impl<A: Allocator + Send, T> Send for Owned<T, A> {}
39unsafe impl<A: Allocator + Sync, T> Sync for Owned<T, A> {}
40
41impl<T, A: Allocator> crate::Buffer for Owned<T, A> {
42 #[inline]
46 fn capacity(&self) -> usize {
47 self.allocated.ptr_size as usize
48 }
49
50 #[inline]
54 fn offset(&self) -> usize {
55 self.allocated.ptr_offset as usize
56 }
57
58 #[inline]
62 fn buffer_capacity(&self) -> usize {
63 self.allocated.memory_size as usize
64 }
65
66 #[inline]
70 fn buffer_offset(&self) -> usize {
71 self.allocated.memory_offset as usize
72 }
73
74 #[inline]
75 unsafe fn detach(&mut self) {
76 self.detached = true;
77 }
78
79 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
80 fn flush(&self) -> std::io::Result<()> {
81 self.arena.flush_range(
82 self.allocated.ptr_offset as usize,
83 self.allocated.ptr_size as usize,
84 )
85 }
86
87 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
89 fn flush_async(&self) -> std::io::Result<()> {
90 self.arena.flush_async_range(
91 self.allocated.memory_offset as usize,
92 self.allocated.memory_size as usize,
93 )
94 }
95}
96
97impl<T, A: Allocator> Owned<T, A> {
98 #[inline]
100 pub fn write(&mut self, value: T) {
101 match &mut self.kind {
102 Kind::Slot(slot) => unsafe {
103 slot.as_mut_ptr().write(value);
104 },
105 Kind::Inline(ptr) => unsafe {
106 ptr.as_ptr().write(value);
107 },
108 Kind::Dangling(_) => {}
109 }
110 }
111
112 pub unsafe fn as_ref(&self) -> &T {
117 unsafe {
118 match &self.kind {
119 Kind::Slot(slot) => slot.as_ptr().as_ref().unwrap(),
120 Kind::Inline(ptr) => ptr.as_ref(),
121 Kind::Dangling(val) => val.as_ref(),
122 }
123 }
124 }
125
126 pub unsafe fn as_mut(&mut self) -> &mut T {
131 unsafe {
132 match &mut self.kind {
133 Kind::Slot(slot) => slot.as_mut_ptr().as_mut().unwrap(),
134 Kind::Inline(ptr) => ptr.as_mut(),
135 Kind::Dangling(val) => val.as_mut(),
136 }
137 }
138 }
139
140 pub fn as_mut_ptr(&mut self) -> NonNull<T> {
143 match &mut self.kind {
144 Kind::Slot(slot) => {
145 if slot.as_ptr().is_null() {
146 NonNull::dangling()
147 } else {
148 unsafe { NonNull::new_unchecked(slot.as_mut_ptr()) }
150 }
151 }
152 Kind::Inline(ptr) => *ptr,
153 Kind::Dangling(val) => *val,
154 }
155 }
156}
157
158impl<T, A: Allocator> Drop for Owned<T, A> {
159 fn drop(&mut self) {
160 match &mut self.kind {
161 Kind::Slot(slot) => {
162 if !self.detached {
163 unsafe {
164 if mem::needs_drop::<T>() {
165 let ptr = slot.as_mut_ptr();
166 if !ptr.is_null() {
167 ptr::drop_in_place(ptr);
168 }
169 }
170 }
171 unsafe {
173 self
174 .arena
175 .dealloc(self.allocated.memory_offset, self.allocated.memory_size);
176 }
177 }
178 }
179 Kind::Inline(_) => {
180 if !self.detached {
181 unsafe {
183 self
184 .arena
185 .dealloc(self.allocated.memory_offset, self.allocated.memory_size);
186 }
187 }
188 }
189 Kind::Dangling(_) => {}
190 }
191 }
192}
193
194#[derive(Debug)]
196#[must_use = "The `T` is uninitialized, and must be initialized by `write` before it is used, if `T` is not zero sized type."]
197pub struct RefMut<'a, T, A: Allocator> {
198 kind: Kind<T>,
199 arena: &'a A,
200 detached: bool,
201 pub(super) allocated: Meta,
202}
203
204impl<T, A: Allocator> crate::Buffer for RefMut<'_, T, A> {
205 #[inline]
206 fn capacity(&self) -> usize {
207 self.allocated.ptr_size as usize
208 }
209
210 #[inline]
211 fn offset(&self) -> usize {
212 self.allocated.ptr_offset as usize
213 }
214
215 #[inline]
216 fn buffer_capacity(&self) -> usize {
217 self.allocated.memory_size as usize
218 }
219
220 #[inline]
221 fn buffer_offset(&self) -> usize {
222 self.allocated.memory_offset as usize
223 }
224
225 #[inline]
231 unsafe fn detach(&mut self) {
232 self.detached = true;
233 }
234
235 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
236 fn flush(&self) -> std::io::Result<()> {
237 self.arena.flush_range(
238 self.allocated.ptr_offset as usize,
239 self.allocated.ptr_size as usize,
240 )
241 }
242
243 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
244 fn flush_async(&self) -> std::io::Result<()> {
245 self.arena.flush_async_range(
246 self.allocated.ptr_offset as usize,
247 self.allocated.ptr_size as usize,
248 )
249 }
250}
251
252impl<'a, T, A: Allocator> RefMut<'a, T, A> {
253 #[inline]
255 pub fn write(&mut self, value: T) {
256 match &mut self.kind {
257 Kind::Slot(slot) => unsafe {
258 slot.as_mut_ptr().write(value);
259 },
260 Kind::Inline(ptr) => unsafe {
261 ptr.as_ptr().write(value);
262 },
263 Kind::Dangling(_) => {}
264 }
265 }
266
267 pub unsafe fn as_ref(&self) -> &T {
272 unsafe {
273 match &self.kind {
274 Kind::Slot(slot) => slot.as_ptr().as_ref().unwrap(),
275 Kind::Inline(ptr) => ptr.as_ref(),
276 Kind::Dangling(val) => val.as_ref(),
277 }
278 }
279 }
280
281 pub unsafe fn as_mut(&mut self) -> &mut T {
286 unsafe {
287 match &mut self.kind {
288 Kind::Slot(slot) => slot.as_mut_ptr().as_mut().unwrap(),
289 Kind::Inline(ptr) => ptr.as_mut(),
290 Kind::Dangling(val) => val.as_mut(),
291 }
292 }
293 }
294
295 pub fn as_mut_ptr(&mut self) -> NonNull<T> {
298 match &mut self.kind {
299 Kind::Slot(slot) => {
300 if slot.as_ptr().is_null() {
301 NonNull::dangling()
302 } else {
303 unsafe { NonNull::new_unchecked(slot.as_mut_ptr()) }
305 }
306 }
307 Kind::Inline(ptr) => *ptr,
308 Kind::Dangling(val) => *val,
309 }
310 }
311
312 #[inline]
313 pub(super) fn new(slot: MaybeUninit<T>, allocated: Meta, arena: &'a A) -> Self {
314 Self {
315 kind: Kind::Slot(slot),
316 arena,
317 detached: false,
318 allocated,
319 }
320 }
321
322 #[inline]
323 pub(super) fn new_inline(value: NonNull<T>, allocated: Meta, arena: &'a A) -> Self {
324 Self {
325 kind: Kind::Inline(value),
326 arena,
327 detached: false,
328 allocated,
329 }
330 }
331
332 #[inline]
333 pub(super) fn new_zst(arena: &'a A) -> Self {
334 Self {
335 kind: Kind::Dangling(NonNull::dangling()),
336 allocated: Meta::null(arena.raw_ptr() as _),
337 arena,
338 detached: false,
339 }
340 }
341
342 #[allow(clippy::wrong_self_convention)]
343 #[inline]
344 pub(super) fn to_owned(&mut self) -> Owned<T, A> {
345 self.detached = true;
346
347 Owned {
348 arena: self.arena.clone(),
349 kind: mem::take(&mut self.kind),
350 detached: false,
351 allocated: self.allocated,
352 }
353 }
354}
355
356impl<T, A: Allocator> Drop for RefMut<'_, T, A> {
357 fn drop(&mut self) {
358 match &mut self.kind {
359 Kind::Slot(slot) => {
360 if !self.detached {
361 unsafe {
362 if mem::needs_drop::<T>() {
363 let ptr = slot.as_mut_ptr();
364 if !ptr.is_null() {
365 ptr::drop_in_place(ptr);
366 }
367 }
368 }
369 unsafe {
371 self
372 .arena
373 .dealloc(self.allocated.memory_offset, self.allocated.memory_size);
374 }
375 }
376 }
377 Kind::Inline(_) => {
378 if !self.detached {
379 unsafe {
381 self
382 .arena
383 .dealloc(self.allocated.memory_offset, self.allocated.memory_size);
384 }
385 }
386 }
387 Kind::Dangling(_) => {}
388 }
389 }
390}