rw_deno_core/arena/
mod.rs

1// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
2mod raw_arena;
3mod shared_arena;
4mod shared_atomic_arena;
5mod unique_arena;
6
7use std::alloc::handle_alloc_error;
8use std::alloc::Layout;
9use std::ptr::NonNull;
10
11pub use raw_arena::*;
12pub use shared_arena::*;
13pub use shared_atomic_arena::*;
14pub use unique_arena::*;
15
16const unsafe fn ptr_byte_add<T, U>(
17  ptr: NonNull<T>,
18  offset: usize,
19) -> NonNull<U> {
20  NonNull::new_unchecked((ptr.as_ptr() as *mut u8).add(offset) as _)
21}
22
23const unsafe fn ptr_byte_sub<T, U>(
24  ptr: NonNull<T>,
25  offset: usize,
26) -> NonNull<U> {
27  NonNull::new_unchecked((ptr.as_ptr() as *mut u8).sub(offset) as _)
28}
29
30#[inline(always)]
31fn alloc_layout<T>(layout: Layout) -> NonNull<T> {
32  // Layout of size zero is UB
33  assert!(std::mem::size_of::<T>() > 0);
34  let alloc = unsafe { std::alloc::alloc(layout) } as *mut _;
35  let Some(alloc) = NonNull::new(alloc) else {
36    handle_alloc_error(layout);
37  };
38  alloc
39}
40
41#[inline(always)]
42fn alloc<T>() -> NonNull<T> {
43  // Layout of size zero is UB
44  assert!(std::mem::size_of::<T>() > 0);
45  let alloc = unsafe { std::alloc::alloc(Layout::new::<T>()) } as *mut _;
46  let Some(alloc) = NonNull::new(alloc) else {
47    handle_alloc_error(Layout::new::<T>());
48  };
49  alloc
50}