Skip to main content

v8/
array_buffer.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2
3use std::cell::Cell;
4use std::ffi::c_void;
5use std::ops::Deref;
6use std::ptr::NonNull;
7use std::ptr::null;
8use std::slice;
9
10use crate::ArrayBuffer;
11use crate::DataView;
12use crate::Isolate;
13use crate::Local;
14use crate::Value;
15use crate::isolate::RealIsolate;
16use crate::scope::PinScope;
17use crate::support::MaybeBool;
18use crate::support::Opaque;
19use crate::support::Shared;
20use crate::support::SharedPtrBase;
21use crate::support::SharedRef;
22use crate::support::UniquePtr;
23use crate::support::UniqueRef;
24use crate::support::long;
25
26unsafe extern "C" {
27  fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
28  fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
29  fn v8__ArrayBuffer__New__with_byte_length(
30    isolate: *mut RealIsolate,
31    byte_length: usize,
32  ) -> *const ArrayBuffer;
33  fn v8__ArrayBuffer__New__with_backing_store(
34    isolate: *mut RealIsolate,
35    backing_store: *const SharedRef<BackingStore>,
36  ) -> *const ArrayBuffer;
37  fn v8__ArrayBuffer__Detach(
38    this: *const ArrayBuffer,
39    key: *const Value,
40  ) -> MaybeBool;
41  fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
42  fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
43  fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
44  fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
45  fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
46  fn v8__ArrayBuffer__GetBackingStore(
47    this: *const ArrayBuffer,
48  ) -> SharedRef<BackingStore>;
49  fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
50    isolate: *mut RealIsolate,
51    byte_length: usize,
52  ) -> *mut BackingStore;
53  fn v8__ArrayBuffer__NewBackingStore__with_data(
54    data: *mut c_void,
55    byte_length: usize,
56    deleter: BackingStoreDeleterCallback,
57    deleter_data: *mut c_void,
58  ) -> *mut BackingStore;
59  fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
60  fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
61  fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
62  fn v8__BackingStore__IsResizableByUserJavaScript(
63    this: *const BackingStore,
64  ) -> bool;
65  fn v8__BackingStore__DELETE(this: *mut BackingStore);
66
67  fn v8__DataView__New(
68    arraybuffer: *const ArrayBuffer,
69    byte_offset: usize,
70    length: usize,
71  ) -> *const DataView;
72
73  fn std__shared_ptr__v8__BackingStore__COPY(
74    ptr: *const SharedPtrBase<BackingStore>,
75  ) -> SharedPtrBase<BackingStore>;
76  fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
77    unique_ptr: UniquePtr<BackingStore>,
78  ) -> SharedPtrBase<BackingStore>;
79  fn std__shared_ptr__v8__BackingStore__get(
80    ptr: *const SharedPtrBase<BackingStore>,
81  ) -> *mut BackingStore;
82  fn std__shared_ptr__v8__BackingStore__reset(
83    ptr: *mut SharedPtrBase<BackingStore>,
84  );
85  fn std__shared_ptr__v8__BackingStore__use_count(
86    ptr: *const SharedPtrBase<BackingStore>,
87  ) -> long;
88
89  fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
90    ptr: *const SharedPtrBase<Allocator>,
91  ) -> SharedPtrBase<Allocator>;
92  fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
93    unique_ptr: UniquePtr<Allocator>,
94  ) -> SharedPtrBase<Allocator>;
95  fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
96    ptr: *const SharedPtrBase<Allocator>,
97  ) -> *mut Allocator;
98  fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
99    ptr: *mut SharedPtrBase<Allocator>,
100  );
101  fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
102    ptr: *const SharedPtrBase<Allocator>,
103  ) -> long;
104}
105
106// Rust allocator feature is only available in non-sandboxed mode
107#[cfg(not(feature = "v8_enable_sandbox"))]
108unsafe extern "C" {
109  fn v8__ArrayBuffer__Allocator__NewRustAllocator(
110    handle: *const c_void,
111    vtable: *const RustAllocatorVtable<c_void>,
112  ) -> *mut Allocator;
113}
114
115/// A thread-safe allocator that V8 uses to allocate |ArrayBuffer|'s memory.
116/// The allocator is a global V8 setting. It has to be set via
117/// Isolate::CreateParams.
118///
119/// Memory allocated through this allocator by V8 is accounted for as external
120/// memory by V8. Note that V8 keeps track of the memory for all internalized
121/// |ArrayBuffer|s. Responsibility for tracking external memory (using
122/// Isolate::AdjustAmountOfExternalAllocatedMemory) is handed over to the
123/// embedder upon externalization and taken over upon internalization (creating
124/// an internalized buffer from an existing buffer).
125///
126/// Note that it is unsafe to call back into V8 from any of the allocator
127/// functions.
128///
129/// This is called v8::ArrayBuffer::Allocator in C++. Rather than use the
130/// namespace array_buffer, which will contain only the Allocator we opt in Rust
131/// to allow it to live in the top level: v8::Allocator
132#[repr(C)]
133#[derive(Debug)]
134pub struct Allocator(Opaque);
135
136/// A wrapper around the V8 Allocator class.
137#[cfg(not(feature = "v8_enable_sandbox"))]
138#[repr(C)]
139pub struct RustAllocatorVtable<T> {
140  pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
141  pub allocate_uninitialized:
142    unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
143  pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
144  pub drop: unsafe extern "C" fn(handle: *const T),
145}
146
147impl Shared for Allocator {
148  fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
149    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
150  }
151  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
152    unsafe {
153      std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
154        unique_ptr,
155      )
156    }
157  }
158  fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
159    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
160  }
161  fn reset(ptr: &mut SharedPtrBase<Self>) {
162    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
163  }
164  fn use_count(ptr: &SharedPtrBase<Self>) -> long {
165    unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
166  }
167}
168
169/// malloc/free based convenience allocator.
170#[inline(always)]
171pub fn new_default_allocator() -> UniqueRef<Allocator> {
172  unsafe {
173    UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
174  }
175}
176
177/// Creates an allocator managed by Rust code.
178///
179/// Marked `unsafe` because the caller must ensure that `handle` is valid and matches what `vtable` expects.
180///
181/// Not usable in sandboxed mode
182#[inline(always)]
183#[cfg(not(feature = "v8_enable_sandbox"))]
184pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
185  handle: *const T,
186  vtable: &'static RustAllocatorVtable<T>,
187) -> UniqueRef<Allocator> {
188  unsafe {
189    UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
190      handle as *const c_void,
191      vtable as *const RustAllocatorVtable<T>
192        as *const RustAllocatorVtable<c_void>,
193    ))
194  }
195}
196
197#[test]
198#[cfg(not(feature = "v8_enable_sandbox"))]
199fn test_rust_allocator() {
200  use std::sync::Arc;
201  use std::sync::atomic::{AtomicUsize, Ordering};
202
203  unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
204    unimplemented!()
205  }
206  unsafe extern "C" fn allocate_uninitialized(
207    _: &AtomicUsize,
208    _: usize,
209  ) -> *mut c_void {
210    unimplemented!()
211  }
212  unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
213    unimplemented!()
214  }
215  unsafe extern "C" fn drop(x: *const AtomicUsize) {
216    unsafe {
217      let arc = Arc::from_raw(x);
218      arc.store(42, Ordering::SeqCst);
219    }
220  }
221
222  let retval = Arc::new(AtomicUsize::new(0));
223
224  let vtable: &'static RustAllocatorVtable<AtomicUsize> =
225    &RustAllocatorVtable {
226      allocate,
227      allocate_uninitialized,
228      free,
229      drop,
230    };
231  unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
232  assert_eq!(retval.load(Ordering::SeqCst), 42);
233  assert_eq!(Arc::strong_count(&retval), 1);
234}
235
236#[test]
237fn test_default_allocator() {
238  crate::V8::initialize_platform(
239    crate::new_default_platform(0, false).make_shared(),
240  );
241  crate::V8::initialize();
242  new_default_allocator();
243}
244
245impl Drop for Allocator {
246  fn drop(&mut self) {
247    unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
248  }
249}
250
251pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
252  data: *mut c_void,
253  byte_length: usize,
254  deleter_data: *mut c_void,
255);
256
257#[cfg(not(feature = "v8_enable_sandbox"))]
258pub(crate) mod sealed {
259  pub trait Rawable {
260    fn byte_len(&mut self) -> usize;
261    fn into_raw(self) -> (*const (), *const u8);
262    unsafe fn drop_raw(ptr: *const (), size: usize);
263  }
264}
265
266#[cfg(not(feature = "v8_enable_sandbox"))]
267macro_rules! rawable {
268  ($ty:ty) => {
269    impl sealed::Rawable for Box<[$ty]> {
270      fn byte_len(&mut self) -> usize {
271        self.as_mut().len() * std::mem::size_of::<$ty>()
272      }
273
274      fn into_raw(mut self) -> (*const (), *const u8) {
275        // Thin the fat pointer
276        let ptr = self.as_mut_ptr();
277        std::mem::forget(self);
278        (ptr as _, ptr as _)
279      }
280
281      unsafe fn drop_raw(ptr: *const (), len: usize) {
282        // Fatten the thin pointer
283        _ = unsafe {
284          Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len))
285        };
286      }
287    }
288
289    impl sealed::Rawable for Vec<$ty> {
290      fn byte_len(&mut self) -> usize {
291        Vec::<$ty>::len(self) * std::mem::size_of::<$ty>()
292      }
293
294      unsafe fn drop_raw(ptr: *const (), size: usize) {
295        unsafe {
296          <Box<[$ty]> as sealed::Rawable>::drop_raw(ptr, size);
297        }
298      }
299
300      fn into_raw(self) -> (*const (), *const u8) {
301        self.into_boxed_slice().into_raw()
302      }
303    }
304  };
305}
306
307#[cfg(not(feature = "v8_enable_sandbox"))]
308rawable!(u8);
309#[cfg(not(feature = "v8_enable_sandbox"))]
310rawable!(u16);
311#[cfg(not(feature = "v8_enable_sandbox"))]
312rawable!(u32);
313#[cfg(not(feature = "v8_enable_sandbox"))]
314rawable!(u64);
315#[cfg(not(feature = "v8_enable_sandbox"))]
316rawable!(i8);
317#[cfg(not(feature = "v8_enable_sandbox"))]
318rawable!(i16);
319#[cfg(not(feature = "v8_enable_sandbox"))]
320rawable!(i32);
321#[cfg(not(feature = "v8_enable_sandbox"))]
322rawable!(i64);
323#[cfg(not(feature = "v8_enable_sandbox"))]
324rawable!(f32);
325#[cfg(not(feature = "v8_enable_sandbox"))]
326rawable!(f64);
327
328#[cfg(not(feature = "v8_enable_sandbox"))]
329impl<T: Sized> sealed::Rawable for Box<T>
330where
331  T: AsMut<[u8]>,
332{
333  fn byte_len(&mut self) -> usize {
334    self.as_mut().as_mut().len()
335  }
336
337  fn into_raw(mut self) -> (*const (), *const u8) {
338    let data = self.as_mut().as_mut().as_mut_ptr();
339    let ptr = Self::into_raw(self);
340    (ptr as _, data)
341  }
342
343  unsafe fn drop_raw(ptr: *const (), _len: usize) {
344    unsafe {
345      _ = Self::from_raw(ptr as _);
346    }
347  }
348}
349
350/// A wrapper around the backing store (i.e. the raw memory) of an array buffer.
351/// See a document linked in http://crbug.com/v8/9908 for more information.
352///
353/// The allocation and destruction of backing stores is generally managed by
354/// V8. Clients should always use standard C++ memory ownership types (i.e.
355/// std::unique_ptr and std::shared_ptr) to manage lifetimes of backing stores
356/// properly, since V8 internal objects may alias backing stores.
357///
358/// This object does not keep the underlying |ArrayBuffer::Allocator| alive by
359/// default. Use Isolate::CreateParams::array_buffer_allocator_shared when
360/// creating the Isolate to make it hold a reference to the allocator itself.
361#[repr(C)]
362#[derive(Debug)]
363pub struct BackingStore([usize; 6]);
364
365unsafe impl Send for BackingStore {}
366
367impl BackingStore {
368  /// Return a pointer to the beginning of the memory block for this backing
369  /// store. The pointer is only valid as long as this backing store object
370  /// lives.
371  ///
372  /// Might return `None` if the backing store has zero length.
373  #[inline(always)]
374  pub fn data(&self) -> Option<NonNull<c_void>> {
375    let raw_ptr =
376      unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
377    NonNull::new(raw_ptr)
378  }
379
380  /// The length (in bytes) of this backing store.
381  #[inline(always)]
382  pub fn byte_length(&self) -> usize {
383    unsafe { v8__BackingStore__ByteLength(self) }
384  }
385
386  /// Indicates whether the backing store was created for an ArrayBuffer or
387  /// a SharedArrayBuffer.
388  #[inline(always)]
389  pub fn is_shared(&self) -> bool {
390    unsafe { v8__BackingStore__IsShared(self) }
391  }
392
393  /// Indicates whether the backing store was created for a resizable ArrayBuffer
394  /// or a growable SharedArrayBuffer, and thus may be resized by user
395  /// JavaScript code.
396  #[inline(always)]
397  pub fn is_resizable_by_user_javascript(&self) -> bool {
398    unsafe { v8__BackingStore__IsResizableByUserJavaScript(self) }
399  }
400}
401
402impl Deref for BackingStore {
403  type Target = [Cell<u8>];
404
405  /// Returns a [u8] slice refencing the data in the backing store.
406  #[inline]
407  fn deref(&self) -> &Self::Target {
408    // We use a dangling pointer if `self.data()` returns None because it's UB
409    // to create even an empty slice from a null pointer.
410    let data = self
411      .data()
412      .unwrap_or_else(NonNull::dangling)
413      .cast::<Cell<u8>>();
414    let len = self.byte_length();
415    unsafe { slice::from_raw_parts(data.as_ptr(), len) }
416  }
417}
418
419impl Drop for BackingStore {
420  #[inline]
421  fn drop(&mut self) {
422    unsafe { v8__BackingStore__DELETE(self) };
423  }
424}
425
426impl Shared for BackingStore {
427  #[inline]
428  fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
429    unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
430  }
431  #[inline]
432  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
433    unsafe {
434      std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
435    }
436  }
437  #[inline]
438  fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
439    unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
440  }
441  #[inline]
442  fn reset(ptr: &mut SharedPtrBase<Self>) {
443    unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
444  }
445  #[inline]
446  fn use_count(ptr: &SharedPtrBase<Self>) -> long {
447    unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
448  }
449}
450
451impl ArrayBuffer {
452  /// Create a new ArrayBuffer. Allocate |byte_length| bytes.
453  /// Allocated memory will be owned by a created ArrayBuffer and
454  /// will be deallocated when it is garbage-collected,
455  /// unless the object is externalized.
456  #[inline(always)]
457  pub fn new<'s>(
458    scope: &PinScope<'s, '_, ()>,
459    byte_length: usize,
460  ) -> Local<'s, ArrayBuffer> {
461    unsafe {
462      scope.cast_local(|sd| {
463        v8__ArrayBuffer__New__with_byte_length(
464          sd.get_isolate_ptr(),
465          byte_length,
466        )
467      })
468    }
469    .unwrap()
470  }
471
472  #[inline(always)]
473  pub fn with_backing_store<'s>(
474    scope: &PinScope<'s, '_, ()>,
475    backing_store: &SharedRef<BackingStore>,
476  ) -> Local<'s, ArrayBuffer> {
477    unsafe {
478      scope.cast_local(|sd| {
479        v8__ArrayBuffer__New__with_backing_store(
480          sd.get_isolate_ptr(),
481          backing_store,
482        )
483      })
484    }
485    .unwrap()
486  }
487
488  /// Data length in bytes.
489  #[inline(always)]
490  pub fn byte_length(&self) -> usize {
491    unsafe { v8__ArrayBuffer__ByteLength(self) }
492  }
493
494  /// Returns true if this ArrayBuffer may be detached.
495  #[inline(always)]
496  pub fn is_detachable(&self) -> bool {
497    unsafe { v8__ArrayBuffer__IsDetachable(self) }
498  }
499
500  /// Returns true if this ArrayBuffer was detached.
501  #[inline(always)]
502  pub fn was_detached(&self) -> bool {
503    if self.byte_length() != 0 {
504      return false;
505    }
506    unsafe { v8__ArrayBuffer__WasDetached(self) }
507  }
508
509  /// Detaches this ArrayBuffer and all its views (typed arrays).
510  /// Detaching sets the byte length of the buffer and all typed arrays to zero,
511  /// preventing JavaScript from ever accessing underlying backing store.
512  /// ArrayBuffer should have been externalized and must be detachable. Returns
513  /// `None` if the key didn't pass the `[[ArrayBufferDetachKey]]` check,
514  /// and `Some(true)` otherwise.
515  #[inline(always)]
516  pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
517    // V8 terminates when the ArrayBuffer is not detachable. Non-detachable
518    // buffers are buffers that are in use by WebAssembly or asm.js.
519    if self.is_detachable() {
520      let key = key.map_or(null(), |v| &*v as *const Value);
521      unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
522    } else {
523      Some(true)
524    }
525  }
526
527  /// Sets the `[[ArrayBufferDetachKey]]`.
528  #[inline(always)]
529  pub fn set_detach_key(&self, key: Local<Value>) {
530    unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
531  }
532
533  /// More efficient shortcut for GetBackingStore()->Data().
534  /// The returned pointer is valid as long as the ArrayBuffer is alive.
535  #[inline(always)]
536  pub fn data(&self) -> Option<NonNull<c_void>> {
537    let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
538    NonNull::new(raw_ptr)
539  }
540
541  /// Get a shared pointer to the backing store of this array buffer. This
542  /// pointer coordinates the lifetime management of the internal storage
543  /// with any live ArrayBuffers on the heap, even across isolates. The embedder
544  /// should not attempt to manage lifetime of the storage through other means.
545  #[inline(always)]
546  pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
547    unsafe { v8__ArrayBuffer__GetBackingStore(self) }
548  }
549
550  /// Returns a new standalone BackingStore that is allocated using the array
551  /// buffer allocator of the isolate. The result can be later passed to
552  /// ArrayBuffer::New.
553  ///
554  /// If the allocator returns nullptr, then the function may cause GCs in the
555  /// given isolate and re-try the allocation. If GCs do not help, then the
556  /// function will crash with an out-of-memory error.
557  #[inline(always)]
558  pub fn new_backing_store(
559    scope: &mut Isolate,
560    byte_length: usize,
561  ) -> UniqueRef<BackingStore> {
562    unsafe {
563      UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
564        (*scope).as_real_ptr(),
565        byte_length,
566      ))
567    }
568  }
569
570  /// Returns a new standalone BackingStore that takes over the ownership of
571  /// the given buffer.
572  ///
573  /// The destructor of the BackingStore frees owned buffer memory.
574  ///
575  /// The result can be later passed to ArrayBuffer::New. The raw pointer
576  /// to the buffer must not be passed again to any V8 API function.
577  ///
578  /// Not available in Sandbox Mode, see new_backing_store_from_bytes for a potential alternative
579  #[inline(always)]
580  #[cfg(not(feature = "v8_enable_sandbox"))]
581  pub fn new_backing_store_from_boxed_slice(
582    data: Box<[u8]>,
583  ) -> UniqueRef<BackingStore> {
584    Self::new_backing_store_from_bytes(data)
585  }
586
587  /// Returns a new standalone BackingStore that takes over the ownership of
588  /// the given buffer.
589  ///
590  /// The destructor of the BackingStore frees owned buffer memory.
591  ///
592  /// The result can be later passed to ArrayBuffer::New. The raw pointer
593  /// to the buffer must not be passed again to any V8 API function.
594  ///
595  /// Not available in Sandbox Mode, see new_backing_store_from_bytes for a potential alternative
596  #[inline(always)]
597  #[cfg(not(feature = "v8_enable_sandbox"))]
598  pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
599    Self::new_backing_store_from_bytes(data)
600  }
601
602  /// Returns a new standalone BackingStore backed by a container that dereferences
603  /// to a mutable slice of bytes. The object is dereferenced once, and the resulting slice's
604  /// memory is used for the lifetime of the buffer.
605  ///
606  /// This method may be called with most single-ownership containers that implement `AsMut<[u8]>`, including
607  /// `Box<[u8]>`, and `Vec<u8>`. This will also support most other mutable bytes containers (including `bytes::BytesMut`),
608  /// though these buffers will need to be boxed to manage ownership of memory.
609  ///
610  /// Not available in sandbox mode. Sandbox mode requires data to be allocated
611  /// within the sandbox's address space. Within sandbox mode, consider the below alternatives
612  ///
613  /// 1. consider using new_backing_store and BackingStore::data() followed by doing a std::ptr::copy to copy the data into a BackingStore.
614  /// 2. If you truly do have data that is allocated inside the sandbox address space, consider using the unsafe new_backing_store_from_ptr API
615  ///
616  /// ```
617  /// // Vector of bytes
618  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(vec![1, 2, 3]);
619  /// // Boxes slice of bytes
620  /// let boxed_slice: Box<[u8]> = vec![1, 2, 3].into_boxed_slice();
621  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(boxed_slice);
622  /// // BytesMut from bytes crate
623  /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new()));
624  /// ```
625  #[inline(always)]
626  #[cfg(not(feature = "v8_enable_sandbox"))]
627  pub fn new_backing_store_from_bytes<T>(
628    mut bytes: T,
629  ) -> UniqueRef<BackingStore>
630  where
631    T: sealed::Rawable,
632  {
633    let len = bytes.byte_len();
634
635    let (ptr, slice) = T::into_raw(bytes);
636
637    unsafe extern "C" fn drop_rawable<T: sealed::Rawable>(
638      _ptr: *mut c_void,
639      len: usize,
640      data: *mut c_void,
641    ) {
642      // SAFETY: We know that data is a raw T from above
643      unsafe { T::drop_raw(data as _, len) }
644    }
645
646    // SAFETY: We are extending the lifetime of a slice, but we're locking away the box that we
647    // derefed from so there's no way to get another mutable reference.
648    unsafe {
649      Self::new_backing_store_from_ptr(
650        slice as _,
651        len,
652        drop_rawable::<T>,
653        ptr as _,
654      )
655    }
656  }
657
658  /// Returns a new standalone BackingStore backed by given ptr.
659  ///
660  /// SAFETY: This API consumes raw pointers so is inherently
661  /// unsafe. Usually you should use new_backing_store_from_boxed_slice.
662  ///
663  /// WARNING: Using sandbox mode has extra limitations that may cause crashes
664  /// or memory safety violations if this API is used incorrectly:
665  ///
666  /// 1. Sandbox mode requires data to be allocated within the sandbox's address space.
667  /// 2. It is very easy to cause memory safety errors when using this API with sandbox mode
668  #[inline(always)]
669  pub unsafe fn new_backing_store_from_ptr(
670    data_ptr: *mut c_void,
671    byte_length: usize,
672    deleter_callback: BackingStoreDeleterCallback,
673    deleter_data: *mut c_void,
674  ) -> UniqueRef<BackingStore> {
675    unsafe {
676      UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
677        data_ptr,
678        byte_length,
679        deleter_callback,
680        deleter_data,
681      ))
682    }
683  }
684}
685
686impl DataView {
687  /// Returns a new DataView.
688  #[inline(always)]
689  pub fn new<'s>(
690    scope: &PinScope<'s, '_, ()>,
691    arraybuffer: Local<'s, ArrayBuffer>,
692    byte_offset: usize,
693    length: usize,
694  ) -> Local<'s, DataView> {
695    unsafe {
696      scope
697        .cast_local(|_| v8__DataView__New(&*arraybuffer, byte_offset, length))
698    }
699    .unwrap()
700  }
701}