rarena_allocator/
bytes.rs

1use core::{ops, ptr::NonNull};
2
3pub use dbutils::{error::InsufficientBuffer, leb128::DecodeVarintError};
4use either::Either;
5
6use super::*;
7
8/// A owned buffer that allocated by the ARENA
9pub struct BytesMut<A: Allocator> {
10  arena: Either<A, NonNull<u8>>,
11  detach: bool,
12  len: usize,
13  allocated: Meta,
14}
15
16impl<A: Allocator> core::fmt::Debug for BytesMut<A> {
17  fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
18    f.debug_struct("BytesRefMut")
19      .field("len", &self.len)
20      .field("meta", &self.allocated)
21      .finish()
22  }
23}
24
25unsafe impl<A: Allocator + Send> Send for BytesMut<A> {}
26unsafe impl<A: Allocator + Sync> Sync for BytesMut<A> {}
27
28impl<A: Allocator> ops::Deref for BytesMut<A> {
29  type Target = [u8];
30
31  #[inline]
32  fn deref(&self) -> &Self::Target {
33    match self.arena {
34      // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
35      Either::Left(ref arena) => unsafe { arena.get_bytes(self.offset(), self.len) },
36      Either::Right(_) => &[],
37    }
38  }
39}
40
41impl<A: Allocator> ops::DerefMut for BytesMut<A> {
42  #[inline]
43  fn deref_mut(&mut self) -> &mut Self::Target {
44    let offset = self.offset();
45    match self.arena {
46      // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
47      Either::Left(ref mut arena) => unsafe { arena.get_bytes_mut(offset, self.len) },
48      Either::Right(_) => &mut [],
49    }
50  }
51}
52
53impl<A: Allocator> AsRef<[u8]> for BytesMut<A> {
54  #[inline]
55  fn as_ref(&self) -> &[u8] {
56    self
57  }
58}
59
60impl<A: Allocator> AsMut<[u8]> for BytesMut<A> {
61  #[inline]
62  fn as_mut(&mut self) -> &mut [u8] {
63    self
64  }
65}
66
67impl_write!(BytesMut<A>);
68
69impl<A: Allocator> crate::Buffer for BytesMut<A> {
70  #[inline]
71  fn capacity(&self) -> usize {
72    self.allocated.ptr_size as usize
73  }
74
75  #[inline]
76  fn offset(&self) -> usize {
77    self.allocated.ptr_offset as usize
78  }
79
80  #[inline]
81  fn buffer_offset(&self) -> usize {
82    self.allocated.memory_offset as usize
83  }
84
85  #[inline]
86  fn buffer_capacity(&self) -> usize {
87    self.allocated.memory_size as usize
88  }
89
90  #[inline]
91  unsafe fn detach(&mut self) {
92    self.detach = true;
93  }
94
95  #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
96  fn flush(&self) -> std::io::Result<()> {
97    match self.arena.as_ref() {
98      Either::Left(arena) => arena.flush_range(
99        self.allocated.ptr_offset as usize,
100        self.allocated.ptr_size as usize,
101      ),
102      Either::Right(_) => Ok(()),
103    }
104  }
105
106  #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
107  fn flush_async(&self) -> std::io::Result<()> {
108    match self.arena.as_ref() {
109      Either::Left(arena) => arena.flush_async_range(
110        self.allocated.ptr_offset as usize,
111        self.allocated.ptr_size as usize,
112      ),
113      Either::Right(_) => Ok(()),
114    }
115  }
116}
117
118impl<A: Allocator> BytesMut<A> {
119  impl_bytes_mut_utils!(8);
120  impl_bytes_mut_utils!(u16, u32, u64, usize, u128, i16, i32, i64, isize, i128);
121  impl_bytes_mut_utils!(leb(u16, u32, u64, u128, i16, i32, i64, i128));
122  impl_bytes_mut_utils!(slice);
123  impl_bytes_mut_utils!(align);
124
125  impl_bytes_utils!(8);
126  impl_bytes_utils!(u16, u32, u64, usize, u128, i16, i32, i64, isize, i128);
127  impl_bytes_utils!(leb(u16, u32, u64, u128, i16, i32, i64, i128));
128  impl_bytes_utils!(slice);
129
130  /// Returns the mutable pointer to the buffer.
131  #[inline]
132  pub fn as_mut_ptr(&mut self) -> *mut u8 {
133    let offset = self.offset();
134    // SAFETY: The buffer is allocated by the ARENA, and the offset is valid.
135    match self.arena.as_mut() {
136      Either::Left(arena) => unsafe { arena.get_pointer_mut(offset) },
137      Either::Right(ptr) => ptr.as_ptr(),
138    }
139  }
140
141  /// Returns the pointer to the buffer.
142  #[inline]
143  pub fn as_ptr(&self) -> *const u8 {
144    // SAFETY: The buffer is allocated by the ARENA, and the offset is valid.
145    match self.arena.as_ref() {
146      Either::Left(arena) => unsafe { arena.get_pointer(self.offset()) },
147      Either::Right(ptr) => ptr.as_ptr(),
148    }
149  }
150
151  #[inline]
152  pub(super) fn null(parent_ptr: *const u8) -> Self {
153    Self {
154      arena: Either::Right(NonNull::dangling()),
155      len: 0,
156      allocated: Meta::null(parent_ptr),
157      detach: false,
158    }
159  }
160
161  #[inline]
162  fn buffer(&self) -> &[u8] {
163    match self.arena {
164      // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
165      Either::Left(ref arena) => unsafe { arena.get_bytes(self.offset(), self.capacity()) },
166      Either::Right(_) => &[],
167    }
168  }
169
170  #[inline]
171  fn buffer_mut(&mut self) -> &mut [u8] {
172    let offset = self.offset();
173    let cap = self.capacity();
174    match self.arena {
175      // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
176      Either::Left(ref mut arena) => unsafe { arena.get_bytes_mut(offset, cap) },
177      Either::Right(_) => &mut [],
178    }
179  }
180}
181
182impl<A: Allocator> Drop for BytesMut<A> {
183  #[inline]
184  fn drop(&mut self) {
185    match self.arena {
186      Either::Left(_) if self.detach => {}
187      // SAFETY: offset and offset + size are inbounds of the ARENA.
188      Either::Left(ref mut arena) => unsafe {
189        let _ = arena.dealloc(self.allocated.memory_offset, self.allocated.memory_size);
190      },
191      Either::Right(_) => {}
192    }
193  }
194}
195
196/// A buffer that allocated by the ARENA
197pub struct BytesRefMut<'a, A: Allocator> {
198  arena: &'a A,
199  len: usize,
200  pub(super) allocated: Meta,
201  pub(super) detach: bool,
202}
203
204impl<A: Allocator> core::fmt::Debug for BytesRefMut<'_, A> {
205  fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
206    f.debug_struct("BytesRefMut")
207      .field("len", &self.len)
208      .field("meta", &self.allocated)
209      .finish()
210  }
211}
212
213impl<A: Allocator> ops::Deref for BytesRefMut<'_, A> {
214  type Target = [u8];
215
216  #[inline]
217  fn deref(&self) -> &Self::Target {
218    if self.allocated.ptr_size == 0 {
219      return &[];
220    }
221
222    // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
223    unsafe { self.arena.get_bytes(self.offset(), self.len) }
224  }
225}
226
227impl<A: Allocator> ops::DerefMut for BytesRefMut<'_, A> {
228  #[inline]
229  fn deref_mut(&mut self) -> &mut Self::Target {
230    if self.allocated.ptr_size == 0 {
231      return &mut [];
232    }
233
234    // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
235    unsafe { self.arena.get_bytes_mut(self.offset(), self.len) }
236  }
237}
238
239impl<A: Allocator> AsRef<[u8]> for BytesRefMut<'_, A> {
240  #[inline]
241  fn as_ref(&self) -> &[u8] {
242    self
243  }
244}
245
246impl<A: Allocator> AsMut<[u8]> for BytesRefMut<'_, A> {
247  #[inline]
248  fn as_mut(&mut self) -> &mut [u8] {
249    self
250  }
251}
252
253impl_write!(BytesRefMut<'a, A>);
254
255impl<A: Allocator> crate::Buffer for BytesRefMut<'_, A> {
256  #[inline]
257  fn capacity(&self) -> usize {
258    self.allocated.ptr_size as usize
259  }
260
261  #[inline]
262  fn offset(&self) -> usize {
263    self.allocated.ptr_offset as usize
264  }
265
266  #[inline]
267  fn buffer_offset(&self) -> usize {
268    self.allocated.memory_offset as usize
269  }
270
271  #[inline]
272  fn buffer_capacity(&self) -> usize {
273    self.allocated.memory_size as usize
274  }
275
276  #[inline]
277  unsafe fn detach(&mut self) {
278    self.detach = true;
279  }
280
281  #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
282  fn flush(&self) -> std::io::Result<()> {
283    self.arena.flush_range(
284      self.allocated.ptr_offset as usize,
285      self.allocated.ptr_size as usize,
286    )
287  }
288
289  #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
290  fn flush_async(&self) -> std::io::Result<()> {
291    self.arena.flush_async_range(
292      self.allocated.ptr_offset as usize,
293      self.allocated.ptr_size as usize,
294    )
295  }
296}
297
298impl<'a, A: Allocator> BytesRefMut<'a, A> {
299  impl_bytes_mut_utils!(8);
300  impl_bytes_mut_utils!(u16, u32, u64, usize, u128, i16, i32, i64, isize, i128);
301  impl_bytes_mut_utils!(leb(u16, u32, u64, u128, i16, i32, i64, i128));
302  impl_bytes_mut_utils!(slice);
303  impl_bytes_mut_utils!(align);
304
305  impl_bytes_utils!(8);
306  impl_bytes_utils!(u16, u32, u64, usize, u128, i16, i32, i64, isize, i128);
307  impl_bytes_utils!(leb(u16, u32, u64, u128, i16, i32, i64, i128));
308  impl_bytes_utils!(slice);
309
310  /// Returns the mutable pointer to the buffer.
311  #[inline]
312  pub fn as_mut_ptr(&mut self) -> *mut u8 {
313    // SAFETY: The buffer is allocated by the ARENA, and the offset is valid.
314    unsafe { self.arena.get_pointer_mut(self.offset()) }
315  }
316
317  /// Returns the pointer to the buffer.
318  #[inline]
319  pub fn as_ptr(&self) -> *const u8 {
320    // SAFETY: The buffer is allocated by the ARENA, and the offset is valid.
321    unsafe { self.arena.get_pointer(self.offset()) }
322  }
323
324  /// Returns the length of the buffer.
325  #[inline]
326  pub const fn len(&self) -> usize {
327    self.len
328  }
329
330  /// Returns `true` if the buffer is empty.
331  #[inline]
332  pub const fn is_empty(&self) -> bool {
333    self.len == 0
334  }
335
336  /// Returns the remaining capacity.
337  #[inline]
338  pub const fn remaining(&self) -> usize {
339    self.allocated.ptr_size as usize - self.len
340  }
341
342  /// SAFETY: `len` and `offset` must be valid.
343  #[inline]
344  pub(super) unsafe fn new(arena: &'a A, allocated: Meta) -> Self {
345    Self {
346      arena,
347      len: 0,
348      allocated,
349      detach: false,
350    }
351  }
352
353  #[inline]
354  pub(super) fn null(arena: &'a A) -> Self {
355    Self {
356      allocated: Meta::null(arena.raw_ptr() as _),
357      arena,
358      len: 0,
359      detach: false,
360    }
361  }
362
363  #[allow(clippy::wrong_self_convention)]
364  #[inline]
365  pub(super) fn to_owned(&mut self) -> BytesMut<A> {
366    if self.allocated.memory_size == 0 {
367      return BytesMut::null(self.arena.raw_ptr() as _);
368    }
369    self.detach = true;
370
371    BytesMut {
372      arena: Either::Left(self.arena.clone()),
373      len: self.len,
374      allocated: self.allocated,
375      detach: false,
376    }
377  }
378
379  #[inline]
380  fn buffer(&self) -> &[u8] {
381    if self.allocated.ptr_size == 0 {
382      return &[];
383    }
384
385    // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
386    unsafe { self.arena.get_bytes(self.offset(), self.capacity()) }
387  }
388
389  #[inline]
390  fn buffer_mut(&mut self) -> &mut [u8] {
391    if self.allocated.ptr_size == 0 {
392      return &mut [];
393    }
394
395    // SAFETY: The buffer is allocated by the ARENA, and the len and offset are valid.
396    unsafe { self.arena.get_bytes_mut(self.offset(), self.capacity()) }
397  }
398}
399
400impl<A: Allocator> Drop for BytesRefMut<'_, A> {
401  #[inline]
402  fn drop(&mut self) {
403    if self.detach {
404      return;
405    }
406
407    // SAFETY: offset and offset + size are inbounds of the ARENA.
408    unsafe {
409      self
410        .arena
411        .dealloc(self.allocated.memory_offset, self.allocated.memory_size);
412    }
413  }
414}