rusty_v8/
array_buffer.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2
3use std::cell::Cell;
4use std::ffi::c_void;
5use std::ops::Deref;
6use std::ptr::null_mut;
7use std::slice;
8
9use crate::support::long;
10use crate::support::Opaque;
11use crate::support::Shared;
12use crate::support::SharedPtrBase;
13use crate::support::SharedRef;
14use crate::support::UniquePtr;
15use crate::support::UniqueRef;
16use crate::ArrayBuffer;
17use crate::HandleScope;
18use crate::Isolate;
19use crate::Local;
20
21extern "C" {
22  fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
23  fn v8__ArrayBuffer__Allocator__NewRustAllocator(
24    handle: *const c_void,
25    vtable: *const RustAllocatorVtable<c_void>,
26  ) -> *mut Allocator;
27  fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
28  fn v8__ArrayBuffer__New__with_byte_length(
29    isolate: *mut Isolate,
30    byte_length: usize,
31  ) -> *const ArrayBuffer;
32  fn v8__ArrayBuffer__New__with_backing_store(
33    isolate: *mut Isolate,
34    backing_store: *const SharedRef<BackingStore>,
35  ) -> *const ArrayBuffer;
36  fn v8__ArrayBuffer__Detach(this: *const ArrayBuffer);
37  fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
38  fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
39  fn v8__ArrayBuffer__GetBackingStore(
40    this: *const ArrayBuffer,
41  ) -> SharedRef<BackingStore>;
42  fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
43    isolate: *mut Isolate,
44    byte_length: usize,
45  ) -> *mut BackingStore;
46  fn v8__ArrayBuffer__NewBackingStore__with_data(
47    data: *mut c_void,
48    byte_length: usize,
49    deleter: BackingStoreDeleterCallback,
50    deleter_data: *mut c_void,
51  ) -> *mut BackingStore;
52
53  fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
54  fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
55  fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
56  fn v8__BackingStore__DELETE(this: *mut BackingStore);
57
58  fn std__shared_ptr__v8__BackingStore__COPY(
59    ptr: *const SharedPtrBase<BackingStore>,
60  ) -> SharedPtrBase<BackingStore>;
61  fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
62    unique_ptr: UniquePtr<BackingStore>,
63  ) -> SharedPtrBase<BackingStore>;
64  fn std__shared_ptr__v8__BackingStore__get(
65    ptr: *const SharedPtrBase<BackingStore>,
66  ) -> *mut BackingStore;
67  fn std__shared_ptr__v8__BackingStore__reset(
68    ptr: *mut SharedPtrBase<BackingStore>,
69  );
70  fn std__shared_ptr__v8__BackingStore__use_count(
71    ptr: *const SharedPtrBase<BackingStore>,
72  ) -> long;
73
74  fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
75    ptr: *const SharedPtrBase<Allocator>,
76  ) -> SharedPtrBase<Allocator>;
77  fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
78    unique_ptr: UniquePtr<Allocator>,
79  ) -> SharedPtrBase<Allocator>;
80  fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
81    ptr: *const SharedPtrBase<Allocator>,
82  ) -> *mut Allocator;
83  fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
84    ptr: *mut SharedPtrBase<Allocator>,
85  );
86  fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
87    ptr: *const SharedPtrBase<Allocator>,
88  ) -> long;
89}
90
91/// A thread-safe allocator that V8 uses to allocate |ArrayBuffer|'s memory.
92/// The allocator is a global V8 setting. It has to be set via
93/// Isolate::CreateParams.
94///
95/// Memory allocated through this allocator by V8 is accounted for as external
96/// memory by V8. Note that V8 keeps track of the memory for all internalized
97/// |ArrayBuffer|s. Responsibility for tracking external memory (using
98/// Isolate::AdjustAmountOfExternalAllocatedMemory) is handed over to the
99/// embedder upon externalization and taken over upon internalization (creating
100/// an internalized buffer from an existing buffer).
101///
102/// Note that it is unsafe to call back into V8 from any of the allocator
103/// functions.
104///
105/// This is called v8::ArrayBuffer::Allocator in C++. Rather than use the
106/// namespace array_buffer, which will contain only the Allocator we opt in Rust
107/// to allow it to live in the top level: v8::Allocator
108#[repr(C)]
109#[derive(Debug)]
110pub struct Allocator(Opaque);
111
112/// A wrapper around the V8 Allocator class.
113#[repr(C)]
114pub struct RustAllocatorVtable<T> {
115  pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
116  pub allocate_uninitialized:
117    unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
118  pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
119  pub reallocate: unsafe extern "C" fn(
120    handle: &T,
121    data: *mut c_void,
122    old_length: usize,
123    new_length: usize,
124  ) -> *mut c_void,
125  pub drop: unsafe extern "C" fn(handle: *const T),
126}
127
128impl Shared for Allocator {
129  fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
130    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
131  }
132  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
133    unsafe {
134      std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
135        unique_ptr,
136      )
137    }
138  }
139  fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
140    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
141  }
142  fn reset(ptr: &mut SharedPtrBase<Self>) {
143    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
144  }
145  fn use_count(ptr: &SharedPtrBase<Self>) -> long {
146    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
147  }
148}
149
150/// malloc/free based convenience allocator.
151pub fn new_default_allocator() -> UniqueRef<Allocator> {
152  unsafe {
153    UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
154  }
155}
156
157/// Creates an allocator managed by Rust code.
158///
159/// Marked `unsafe` because the caller must ensure that `handle` is valid and matches what `vtable` expects.
160pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
161  handle: *const T,
162  vtable: &'static RustAllocatorVtable<T>,
163) -> UniqueRef<Allocator> {
164  UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
165    handle as *const c_void,
166    vtable as *const RustAllocatorVtable<T>
167      as *const RustAllocatorVtable<c_void>,
168  ))
169}
170
171#[test]
172fn test_rust_allocator() {
173  use std::sync::atomic::{AtomicUsize, Ordering};
174  use std::sync::Arc;
175
176  unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
177    unimplemented!()
178  }
179  unsafe extern "C" fn allocate_uninitialized(
180    _: &AtomicUsize,
181    _: usize,
182  ) -> *mut c_void {
183    unimplemented!()
184  }
185  unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
186    unimplemented!()
187  }
188  unsafe extern "C" fn reallocate(
189    _: &AtomicUsize,
190    _: *mut c_void,
191    _: usize,
192    _: usize,
193  ) -> *mut c_void {
194    unimplemented!()
195  }
196  unsafe extern "C" fn drop(x: *const AtomicUsize) {
197    let arc = Arc::from_raw(x);
198    arc.store(42, Ordering::SeqCst);
199  }
200
201  let retval = Arc::new(AtomicUsize::new(0));
202
203  let vtable: &'static RustAllocatorVtable<AtomicUsize> =
204    &RustAllocatorVtable {
205      allocate,
206      allocate_uninitialized,
207      free,
208      reallocate,
209      drop,
210    };
211  unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
212  assert_eq!(retval.load(Ordering::SeqCst), 42);
213  assert_eq!(Arc::strong_count(&retval), 1);
214}
215
216#[test]
217fn test_default_allocator() {
218  new_default_allocator();
219}
220
221impl Drop for Allocator {
222  fn drop(&mut self) {
223    unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
224  }
225}
226
227pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
228  data: *mut c_void,
229  byte_length: usize,
230  deleter_data: *mut c_void,
231);
232
233pub unsafe extern "C" fn backing_store_deleter_callback(
234  data: *mut c_void,
235  _byte_length: usize,
236  _deleter_data: *mut c_void,
237) {
238  let b = Box::from_raw(data);
239  drop(b)
240}
241
242/// A wrapper around the backing store (i.e. the raw memory) of an array buffer.
243/// See a document linked in http://crbug.com/v8/9908 for more information.
244///
245/// The allocation and destruction of backing stores is generally managed by
246/// V8. Clients should always use standard C++ memory ownership types (i.e.
247/// std::unique_ptr and std::shared_ptr) to manage lifetimes of backing stores
248/// properly, since V8 internal objects may alias backing stores.
249///
250/// This object does not keep the underlying |ArrayBuffer::Allocator| alive by
251/// default. Use Isolate::CreateParams::array_buffer_allocator_shared when
252/// creating the Isolate to make it hold a reference to the allocator itself.
253#[repr(C)]
254#[derive(Debug)]
255pub struct BackingStore([usize; 6]);
256
257unsafe impl Send for BackingStore {}
258
259impl BackingStore {
260  /// Return a pointer to the beginning of the memory block for this backing
261  /// store. The pointer is only valid as long as this backing store object
262  /// lives.
263  pub fn data(&self) -> *mut c_void {
264    unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) }
265  }
266
267  /// The length (in bytes) of this backing store.
268  pub fn byte_length(&self) -> usize {
269    unsafe { v8__BackingStore__ByteLength(self) }
270  }
271
272  /// Indicates whether the backing store was created for an ArrayBuffer or
273  /// a SharedArrayBuffer.
274  pub fn is_shared(&self) -> bool {
275    unsafe { v8__BackingStore__IsShared(self) }
276  }
277}
278
279impl Deref for BackingStore {
280  type Target = [Cell<u8>];
281
282  /// Returns a [u8] slice refencing the data in the backing store.
283  fn deref(&self) -> &Self::Target {
284    use std::ptr::NonNull;
285    // `self.data()` will return a null pointer if the backing store has
286    // length 0, and it's UB to create even an empty slice from a null pointer.
287    let data = NonNull::new(self.data() as *mut Cell<u8>)
288      .unwrap_or_else(NonNull::dangling);
289    let len = self.byte_length();
290    unsafe { slice::from_raw_parts(data.as_ptr(), len) }
291  }
292}
293
294impl Drop for BackingStore {
295  fn drop(&mut self) {
296    unsafe { v8__BackingStore__DELETE(self) };
297  }
298}
299
300impl Shared for BackingStore {
301  fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
302    unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
303  }
304  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
305    unsafe {
306      std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
307    }
308  }
309  fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
310    unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
311  }
312  fn reset(ptr: &mut SharedPtrBase<Self>) {
313    unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
314  }
315  fn use_count(ptr: &SharedPtrBase<Self>) -> long {
316    unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
317  }
318}
319
320impl ArrayBuffer {
321  /// Create a new ArrayBuffer. Allocate |byte_length| bytes.
322  /// Allocated memory will be owned by a created ArrayBuffer and
323  /// will be deallocated when it is garbage-collected,
324  /// unless the object is externalized.
325  pub fn new<'s>(
326    scope: &mut HandleScope<'s>,
327    byte_length: usize,
328  ) -> Local<'s, ArrayBuffer> {
329    unsafe {
330      scope.cast_local(|sd| {
331        v8__ArrayBuffer__New__with_byte_length(
332          sd.get_isolate_ptr(),
333          byte_length,
334        )
335      })
336    }
337    .unwrap()
338  }
339
340  pub fn with_backing_store<'s>(
341    scope: &mut HandleScope<'s>,
342    backing_store: &SharedRef<BackingStore>,
343  ) -> Local<'s, ArrayBuffer> {
344    unsafe {
345      scope.cast_local(|sd| {
346        v8__ArrayBuffer__New__with_backing_store(
347          sd.get_isolate_ptr(),
348          backing_store,
349        )
350      })
351    }
352    .unwrap()
353  }
354
355  /// Data length in bytes.
356  pub fn byte_length(&self) -> usize {
357    unsafe { v8__ArrayBuffer__ByteLength(self) }
358  }
359
360  /// Returns true if this ArrayBuffer may be detached.
361  pub fn is_detachable(&self) -> bool {
362    unsafe { v8__ArrayBuffer__IsDetachable(self) }
363  }
364
365  /// Detaches this ArrayBuffer and all its views (typed arrays).
366  /// Detaching sets the byte length of the buffer and all typed arrays to zero,
367  /// preventing JavaScript from ever accessing underlying backing store.
368  /// ArrayBuffer should have been externalized and must be detachable.
369  pub fn detach(&self) {
370    // V8 terminates when the ArrayBuffer is not detachable. Non-detachable
371    // buffers are buffers that are in use by WebAssembly or asm.js.
372    if self.is_detachable() {
373      unsafe { v8__ArrayBuffer__Detach(self) }
374    }
375  }
376
377  /// Get a shared pointer to the backing store of this array buffer. This
378  /// pointer coordinates the lifetime management of the internal storage
379  /// with any live ArrayBuffers on the heap, even across isolates. The embedder
380  /// should not attempt to manage lifetime of the storage through other means.
381  pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
382    unsafe { v8__ArrayBuffer__GetBackingStore(self) }
383  }
384
385  /// Returns a new standalone BackingStore that is allocated using the array
386  /// buffer allocator of the isolate. The result can be later passed to
387  /// ArrayBuffer::New.
388  ///
389  /// If the allocator returns nullptr, then the function may cause GCs in the
390  /// given isolate and re-try the allocation. If GCs do not help, then the
391  /// function will crash with an out-of-memory error.
392  pub fn new_backing_store(
393    scope: &mut Isolate,
394    byte_length: usize,
395  ) -> UniqueRef<BackingStore> {
396    unsafe {
397      UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
398        scope,
399        byte_length,
400      ))
401    }
402  }
403
404  /// Returns a new standalone BackingStore that takes over the ownership of
405  /// the given buffer.
406  ///
407  /// The destructor of the BackingStore frees owned buffer memory.
408  ///
409  /// The result can be later passed to ArrayBuffer::New. The raw pointer
410  /// to the buffer must not be passed again to any V8 API function.
411  pub fn new_backing_store_from_boxed_slice(
412    data: Box<[u8]>,
413  ) -> UniqueRef<BackingStore> {
414    let byte_length = data.len();
415    let data_ptr = Box::into_raw(data) as *mut c_void;
416    unsafe {
417      UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
418        data_ptr,
419        byte_length,
420        backing_store_deleter_callback,
421        null_mut(),
422      ))
423    }
424  }
425}