Skip to main content

v8/
shared_array_buffer.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2
3use std::ffi::c_void;
4
5use crate::BackingStore;
6use crate::BackingStoreDeleterCallback;
7use crate::Local;
8use crate::SharedArrayBuffer;
9use crate::isolate::RealIsolate;
10use crate::scope::GetIsolate;
11use crate::scope::PinScope;
12use crate::support::SharedRef;
13use crate::support::UniqueRef;
14
15unsafe extern "C" {
16  fn v8__SharedArrayBuffer__New__with_byte_length(
17    isolate: *mut RealIsolate,
18    byte_length: usize,
19  ) -> *const SharedArrayBuffer;
20  fn v8__SharedArrayBuffer__New__with_backing_store(
21    isolate: *mut RealIsolate,
22    backing_store: *const SharedRef<BackingStore>,
23  ) -> *const SharedArrayBuffer;
24  fn v8__SharedArrayBuffer__ByteLength(this: *const SharedArrayBuffer)
25  -> usize;
26  fn v8__SharedArrayBuffer__GetBackingStore(
27    this: *const SharedArrayBuffer,
28  ) -> SharedRef<BackingStore>;
29  fn v8__SharedArrayBuffer__NewBackingStore__with_byte_length(
30    isolate: *mut RealIsolate,
31    byte_length: usize,
32  ) -> *mut BackingStore;
33  fn v8__SharedArrayBuffer__NewBackingStore__with_data(
34    data: *mut c_void,
35    byte_length: usize,
36    deleter: BackingStoreDeleterCallback,
37    deleter_data: *mut c_void,
38  ) -> *mut BackingStore;
39}
40
41impl SharedArrayBuffer {
42  /// Create a new SharedArrayBuffer. Allocate |byte_length| bytes.
43  /// Allocated memory will be owned by a created SharedArrayBuffer and
44  /// will be deallocated when it is garbage-collected,
45  /// unless the object is externalized.
46  #[inline(always)]
47  pub fn new<'s>(
48    scope: &PinScope<'s, '_>,
49    byte_length: usize,
50  ) -> Option<Local<'s, SharedArrayBuffer>> {
51    unsafe {
52      scope.cast_local(|sd| {
53        v8__SharedArrayBuffer__New__with_byte_length(
54          sd.get_isolate_ptr(),
55          byte_length,
56        )
57      })
58    }
59  }
60
61  #[inline(always)]
62  pub fn with_backing_store<'s>(
63    scope: &PinScope<'s, '_>,
64    backing_store: &SharedRef<BackingStore>,
65  ) -> Local<'s, SharedArrayBuffer> {
66    unsafe {
67      scope.cast_local(|sd| {
68        v8__SharedArrayBuffer__New__with_backing_store(
69          sd.get_isolate_ptr(),
70          backing_store,
71        )
72      })
73    }
74    .unwrap()
75  }
76
77  /// Data length in bytes.
78  #[inline(always)]
79  pub fn byte_length(&self) -> usize {
80    unsafe { v8__SharedArrayBuffer__ByteLength(self) }
81  }
82
83  /// Get a shared pointer to the backing store of this array buffer. This
84  /// pointer coordinates the lifetime management of the internal storage
85  /// with any live ArrayBuffers on the heap, even across isolates. The embedder
86  /// should not attempt to manage lifetime of the storage through other means.
87  #[inline(always)]
88  pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
89    unsafe { v8__SharedArrayBuffer__GetBackingStore(self) }
90  }
91
92  /// Returns a new standalone BackingStore that is allocated using the array
93  /// buffer allocator of the isolate. The result can be later passed to
94  /// ArrayBuffer::New.
95  ///
96  /// If the allocator returns nullptr, then the function may cause GCs in the
97  /// given isolate and re-try the allocation. If GCs do not help, then the
98  /// function will crash with an out-of-memory error.
99  #[inline(always)]
100  pub fn new_backing_store(
101    scope: &PinScope<'_, '_>,
102    byte_length: usize,
103  ) -> UniqueRef<BackingStore> {
104    unsafe {
105      UniqueRef::from_raw(
106        v8__SharedArrayBuffer__NewBackingStore__with_byte_length(
107          scope.get_isolate_ptr(),
108          byte_length,
109        ),
110      )
111    }
112  }
113
114  /// Returns a new standalone BackingStore that takes over the ownership of
115  /// the given buffer.
116  ///
117  /// The destructor of the BackingStore frees owned buffer memory.
118  ///
119  /// The result can be later passed to SharedArrayBuffer::New. The raw pointer
120  /// to the buffer must not be passed again to any V8 API function.
121  ///
122  /// Not available in Sandbox Mode, see new_backing_store_from_bytes for a potential alternative
123  #[inline(always)]
124  #[cfg(not(feature = "v8_enable_sandbox"))]
125  pub fn new_backing_store_from_boxed_slice(
126    data: Box<[u8]>,
127  ) -> UniqueRef<BackingStore> {
128    Self::new_backing_store_from_bytes(data)
129  }
130
131  /// Returns a new standalone BackingStore that takes over the ownership of
132  /// the given buffer.
133  ///
134  /// The destructor of the BackingStore frees owned buffer memory.
135  ///
136  /// The result can be later passed to SharedArrayBuffer::New. The raw pointer
137  /// to the buffer must not be passed again to any V8 API function.
138  ///
139  /// Not available in Sandbox Mode, see new_backing_store_from_bytes for a potential alternative
140  #[inline(always)]
141  #[cfg(not(feature = "v8_enable_sandbox"))]
142  pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
143    Self::new_backing_store_from_bytes(data)
144  }
145
146  /// Returns a new standalone BackingStore backed by a container that dereferences
147  /// to a mutable slice of bytes. The object is dereferenced once, and the resulting slice's
148  /// memory is used for the lifetime of the buffer.
149  ///
150  /// This method may be called with most single-ownership containers that implement `AsMut<[u8]>`, including
151  /// `Box<[u8]>`, and `Vec<u8>`. This will also support most other mutable bytes containers (including `bytes::BytesMut`),
152  /// though these buffers will need to be boxed to manage ownership of memory.
153  ///
154  /// Not available in sandbox mode. Sandbox mode requires data to be allocated
155  /// within the sandbox's address space. Within sandbox mode, consider the below alternatives:
156  ///
157  /// 1. consider using new_backing_store and BackingStore::data() followed by doing a std::ptr::copy to copy the data into a BackingStore.
158  /// 2. If you truly do have data that is allocated inside the sandbox address space, consider using the unsafe new_backing_store_from_ptr API
159  ///
160  /// ```
161  /// // Vector of bytes
162  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(vec![1, 2, 3]);
163  /// // Boxes slice of bytes
164  /// let boxed_slice: Box<[u8]> = vec![1, 2, 3].into_boxed_slice();
165  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(boxed_slice);
166  /// // BytesMut from bytes crate
167  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new()));
168  /// ```
169  #[cfg(not(feature = "v8_enable_sandbox"))]
170  #[inline(always)]
171  pub fn new_backing_store_from_bytes<T>(
172    mut bytes: T,
173  ) -> UniqueRef<BackingStore>
174  where
175    T: crate::array_buffer::sealed::Rawable,
176  {
177    let len = bytes.byte_len();
178
179    let (ptr, slice) = T::into_raw(bytes);
180
181    unsafe extern "C" fn drop_rawable<
182      T: crate::array_buffer::sealed::Rawable,
183    >(
184      _ptr: *mut c_void,
185      len: usize,
186      data: *mut c_void,
187    ) {
188      // SAFETY: We know that data is a raw T from above
189      unsafe { T::drop_raw(data as _, len) }
190    }
191
192    // SAFETY: We are extending the lifetime of a slice, but we're locking away the box that we
193    // derefed from so there's no way to get another mutable reference.
194    unsafe {
195      Self::new_backing_store_from_ptr(
196        slice as _,
197        len,
198        drop_rawable::<T>,
199        ptr as _,
200      )
201    }
202  }
203
204  /// Returns a new standalone BackingStore backed by given ptr.
205  ///
206  /// SAFETY: This API consumes raw pointers so is inherently
207  /// unsafe. Usually you should use new_backing_store_from_boxed_slice.
208  ///
209  /// WARNING: Using sandbox mode has extra limitations that may cause crashes
210  /// or memory safety violations if this API is used incorrectly:
211  ///
212  /// 1. Sandbox mode requires data to be allocated within the sandbox's address space.
213  /// 2. It is very easy to cause memory safety errors when using this API with sandbox mode
214  #[inline(always)]
215  pub unsafe fn new_backing_store_from_ptr(
216    data_ptr: *mut c_void,
217    byte_length: usize,
218    deleter_callback: BackingStoreDeleterCallback,
219    deleter_data: *mut c_void,
220  ) -> UniqueRef<BackingStore> {
221    unsafe {
222      UniqueRef::from_raw(v8__SharedArrayBuffer__NewBackingStore__with_data(
223        data_ptr,
224        byte_length,
225        deleter_callback,
226        deleter_data,
227      ))
228    }
229  }
230}