1use std::cell::Cell;
4use std::ffi::c_void;
5use std::ops::Deref;
6use std::ptr::NonNull;
7use std::ptr::null;
8use std::slice;
9
10use crate::ArrayBuffer;
11use crate::DataView;
12use crate::Isolate;
13use crate::Local;
14use crate::Value;
15use crate::isolate::RealIsolate;
16use crate::scope::PinScope;
17use crate::support::MaybeBool;
18use crate::support::Opaque;
19use crate::support::Shared;
20use crate::support::SharedPtrBase;
21use crate::support::SharedRef;
22use crate::support::UniquePtr;
23use crate::support::UniqueRef;
24use crate::support::long;
25
26unsafe extern "C" {
27 fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
28 fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
29 fn v8__ArrayBuffer__New__with_byte_length(
30 isolate: *mut RealIsolate,
31 byte_length: usize,
32 ) -> *const ArrayBuffer;
33 fn v8__ArrayBuffer__New__with_backing_store(
34 isolate: *mut RealIsolate,
35 backing_store: *const SharedRef<BackingStore>,
36 ) -> *const ArrayBuffer;
37 fn v8__ArrayBuffer__Detach(
38 this: *const ArrayBuffer,
39 key: *const Value,
40 ) -> MaybeBool;
41 fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
42 fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
43 fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
44 fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
45 fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
46 fn v8__ArrayBuffer__GetBackingStore(
47 this: *const ArrayBuffer,
48 ) -> SharedRef<BackingStore>;
49 fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
50 isolate: *mut RealIsolate,
51 byte_length: usize,
52 ) -> *mut BackingStore;
53 fn v8__ArrayBuffer__NewBackingStore__with_data(
54 data: *mut c_void,
55 byte_length: usize,
56 deleter: BackingStoreDeleterCallback,
57 deleter_data: *mut c_void,
58 ) -> *mut BackingStore;
59 fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
60 fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
61 fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
62 fn v8__BackingStore__IsResizableByUserJavaScript(
63 this: *const BackingStore,
64 ) -> bool;
65 fn v8__BackingStore__DELETE(this: *mut BackingStore);
66
67 fn v8__DataView__New(
68 arraybuffer: *const ArrayBuffer,
69 byte_offset: usize,
70 length: usize,
71 ) -> *const DataView;
72
73 fn std__shared_ptr__v8__BackingStore__COPY(
74 ptr: *const SharedPtrBase<BackingStore>,
75 ) -> SharedPtrBase<BackingStore>;
76 fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
77 unique_ptr: UniquePtr<BackingStore>,
78 ) -> SharedPtrBase<BackingStore>;
79 fn std__shared_ptr__v8__BackingStore__get(
80 ptr: *const SharedPtrBase<BackingStore>,
81 ) -> *mut BackingStore;
82 fn std__shared_ptr__v8__BackingStore__reset(
83 ptr: *mut SharedPtrBase<BackingStore>,
84 );
85 fn std__shared_ptr__v8__BackingStore__use_count(
86 ptr: *const SharedPtrBase<BackingStore>,
87 ) -> long;
88
89 fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
90 ptr: *const SharedPtrBase<Allocator>,
91 ) -> SharedPtrBase<Allocator>;
92 fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
93 unique_ptr: UniquePtr<Allocator>,
94 ) -> SharedPtrBase<Allocator>;
95 fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
96 ptr: *const SharedPtrBase<Allocator>,
97 ) -> *mut Allocator;
98 fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
99 ptr: *mut SharedPtrBase<Allocator>,
100 );
101 fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
102 ptr: *const SharedPtrBase<Allocator>,
103 ) -> long;
104}
105
106#[cfg(not(feature = "v8_enable_sandbox"))]
108unsafe extern "C" {
109 fn v8__ArrayBuffer__Allocator__NewRustAllocator(
110 handle: *const c_void,
111 vtable: *const RustAllocatorVtable<c_void>,
112 ) -> *mut Allocator;
113}
114
115#[repr(C)]
133#[derive(Debug)]
134pub struct Allocator(Opaque);
135
136#[cfg(not(feature = "v8_enable_sandbox"))]
138#[repr(C)]
139pub struct RustAllocatorVtable<T> {
140 pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
141 pub allocate_uninitialized:
142 unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
143 pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
144 pub drop: unsafe extern "C" fn(handle: *const T),
145}
146
147impl Shared for Allocator {
148 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
149 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
150 }
151 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
152 unsafe {
153 std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
154 unique_ptr,
155 )
156 }
157 }
158 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
159 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
160 }
161 fn reset(ptr: &mut SharedPtrBase<Self>) {
162 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
163 }
164 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
165 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
166 }
167}
168
169#[inline(always)]
171pub fn new_default_allocator() -> UniqueRef<Allocator> {
172 unsafe {
173 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
174 }
175}
176
177#[inline(always)]
183#[cfg(not(feature = "v8_enable_sandbox"))]
184pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
185 handle: *const T,
186 vtable: &'static RustAllocatorVtable<T>,
187) -> UniqueRef<Allocator> {
188 unsafe {
189 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
190 handle as *const c_void,
191 vtable as *const RustAllocatorVtable<T>
192 as *const RustAllocatorVtable<c_void>,
193 ))
194 }
195}
196
197#[test]
198#[cfg(not(feature = "v8_enable_sandbox"))]
199fn test_rust_allocator() {
200 use std::sync::Arc;
201 use std::sync::atomic::{AtomicUsize, Ordering};
202
203 unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
204 unimplemented!()
205 }
206 unsafe extern "C" fn allocate_uninitialized(
207 _: &AtomicUsize,
208 _: usize,
209 ) -> *mut c_void {
210 unimplemented!()
211 }
212 unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
213 unimplemented!()
214 }
215 unsafe extern "C" fn drop(x: *const AtomicUsize) {
216 unsafe {
217 let arc = Arc::from_raw(x);
218 arc.store(42, Ordering::SeqCst);
219 }
220 }
221
222 let retval = Arc::new(AtomicUsize::new(0));
223
224 let vtable: &'static RustAllocatorVtable<AtomicUsize> =
225 &RustAllocatorVtable {
226 allocate,
227 allocate_uninitialized,
228 free,
229 drop,
230 };
231 unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
232 assert_eq!(retval.load(Ordering::SeqCst), 42);
233 assert_eq!(Arc::strong_count(&retval), 1);
234}
235
236#[test]
237fn test_default_allocator() {
238 crate::V8::initialize_platform(
239 crate::new_default_platform(0, false).make_shared(),
240 );
241 crate::V8::initialize();
242 new_default_allocator();
243}
244
245impl Drop for Allocator {
246 fn drop(&mut self) {
247 unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
248 }
249}
250
251pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
252 data: *mut c_void,
253 byte_length: usize,
254 deleter_data: *mut c_void,
255);
256
257#[cfg(not(feature = "v8_enable_sandbox"))]
258pub(crate) mod sealed {
259 pub trait Rawable {
260 fn byte_len(&mut self) -> usize;
261 fn into_raw(self) -> (*const (), *const u8);
262 unsafe fn drop_raw(ptr: *const (), size: usize);
263 }
264}
265
266#[cfg(not(feature = "v8_enable_sandbox"))]
267macro_rules! rawable {
268 ($ty:ty) => {
269 impl sealed::Rawable for Box<[$ty]> {
270 fn byte_len(&mut self) -> usize {
271 self.as_mut().len() * std::mem::size_of::<$ty>()
272 }
273
274 fn into_raw(mut self) -> (*const (), *const u8) {
275 let ptr = self.as_mut_ptr();
277 std::mem::forget(self);
278 (ptr as _, ptr as _)
279 }
280
281 unsafe fn drop_raw(ptr: *const (), len: usize) {
282 _ = unsafe {
284 Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len))
285 };
286 }
287 }
288
289 impl sealed::Rawable for Vec<$ty> {
290 fn byte_len(&mut self) -> usize {
291 Vec::<$ty>::len(self) * std::mem::size_of::<$ty>()
292 }
293
294 unsafe fn drop_raw(ptr: *const (), size: usize) {
295 unsafe {
296 <Box<[$ty]> as sealed::Rawable>::drop_raw(ptr, size);
297 }
298 }
299
300 fn into_raw(self) -> (*const (), *const u8) {
301 self.into_boxed_slice().into_raw()
302 }
303 }
304 };
305}
306
307#[cfg(not(feature = "v8_enable_sandbox"))]
308rawable!(u8);
309#[cfg(not(feature = "v8_enable_sandbox"))]
310rawable!(u16);
311#[cfg(not(feature = "v8_enable_sandbox"))]
312rawable!(u32);
313#[cfg(not(feature = "v8_enable_sandbox"))]
314rawable!(u64);
315#[cfg(not(feature = "v8_enable_sandbox"))]
316rawable!(i8);
317#[cfg(not(feature = "v8_enable_sandbox"))]
318rawable!(i16);
319#[cfg(not(feature = "v8_enable_sandbox"))]
320rawable!(i32);
321#[cfg(not(feature = "v8_enable_sandbox"))]
322rawable!(i64);
323#[cfg(not(feature = "v8_enable_sandbox"))]
324rawable!(f32);
325#[cfg(not(feature = "v8_enable_sandbox"))]
326rawable!(f64);
327
328#[cfg(not(feature = "v8_enable_sandbox"))]
329impl<T: Sized> sealed::Rawable for Box<T>
330where
331 T: AsMut<[u8]>,
332{
333 fn byte_len(&mut self) -> usize {
334 self.as_mut().as_mut().len()
335 }
336
337 fn into_raw(mut self) -> (*const (), *const u8) {
338 let data = self.as_mut().as_mut().as_mut_ptr();
339 let ptr = Self::into_raw(self);
340 (ptr as _, data)
341 }
342
343 unsafe fn drop_raw(ptr: *const (), _len: usize) {
344 unsafe {
345 _ = Self::from_raw(ptr as _);
346 }
347 }
348}
349
350#[repr(C)]
362#[derive(Debug)]
363pub struct BackingStore([usize; 6]);
364
365unsafe impl Send for BackingStore {}
366
367impl BackingStore {
368 #[inline(always)]
374 pub fn data(&self) -> Option<NonNull<c_void>> {
375 let raw_ptr =
376 unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
377 NonNull::new(raw_ptr)
378 }
379
380 #[inline(always)]
382 pub fn byte_length(&self) -> usize {
383 unsafe { v8__BackingStore__ByteLength(self) }
384 }
385
386 #[inline(always)]
389 pub fn is_shared(&self) -> bool {
390 unsafe { v8__BackingStore__IsShared(self) }
391 }
392
393 #[inline(always)]
397 pub fn is_resizable_by_user_javascript(&self) -> bool {
398 unsafe { v8__BackingStore__IsResizableByUserJavaScript(self) }
399 }
400}
401
402impl Deref for BackingStore {
403 type Target = [Cell<u8>];
404
405 #[inline]
407 fn deref(&self) -> &Self::Target {
408 let data = self
411 .data()
412 .unwrap_or_else(NonNull::dangling)
413 .cast::<Cell<u8>>();
414 let len = self.byte_length();
415 unsafe { slice::from_raw_parts(data.as_ptr(), len) }
416 }
417}
418
419impl Drop for BackingStore {
420 #[inline]
421 fn drop(&mut self) {
422 unsafe { v8__BackingStore__DELETE(self) };
423 }
424}
425
426impl Shared for BackingStore {
427 #[inline]
428 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
429 unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
430 }
431 #[inline]
432 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
433 unsafe {
434 std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
435 }
436 }
437 #[inline]
438 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
439 unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
440 }
441 #[inline]
442 fn reset(ptr: &mut SharedPtrBase<Self>) {
443 unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
444 }
445 #[inline]
446 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
447 unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
448 }
449}
450
451impl ArrayBuffer {
452 #[inline(always)]
457 pub fn new<'s>(
458 scope: &PinScope<'s, '_, ()>,
459 byte_length: usize,
460 ) -> Local<'s, ArrayBuffer> {
461 unsafe {
462 scope.cast_local(|sd| {
463 v8__ArrayBuffer__New__with_byte_length(
464 sd.get_isolate_ptr(),
465 byte_length,
466 )
467 })
468 }
469 .unwrap()
470 }
471
472 #[inline(always)]
473 pub fn with_backing_store<'s>(
474 scope: &PinScope<'s, '_, ()>,
475 backing_store: &SharedRef<BackingStore>,
476 ) -> Local<'s, ArrayBuffer> {
477 unsafe {
478 scope.cast_local(|sd| {
479 v8__ArrayBuffer__New__with_backing_store(
480 sd.get_isolate_ptr(),
481 backing_store,
482 )
483 })
484 }
485 .unwrap()
486 }
487
488 #[inline(always)]
490 pub fn byte_length(&self) -> usize {
491 unsafe { v8__ArrayBuffer__ByteLength(self) }
492 }
493
494 #[inline(always)]
496 pub fn is_detachable(&self) -> bool {
497 unsafe { v8__ArrayBuffer__IsDetachable(self) }
498 }
499
500 #[inline(always)]
502 pub fn was_detached(&self) -> bool {
503 if self.byte_length() != 0 {
504 return false;
505 }
506 unsafe { v8__ArrayBuffer__WasDetached(self) }
507 }
508
509 #[inline(always)]
516 pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
517 if self.is_detachable() {
520 let key = key.map_or(null(), |v| &*v as *const Value);
521 unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
522 } else {
523 Some(true)
524 }
525 }
526
527 #[inline(always)]
529 pub fn set_detach_key(&self, key: Local<Value>) {
530 unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
531 }
532
533 #[inline(always)]
536 pub fn data(&self) -> Option<NonNull<c_void>> {
537 let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
538 NonNull::new(raw_ptr)
539 }
540
541 #[inline(always)]
546 pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
547 unsafe { v8__ArrayBuffer__GetBackingStore(self) }
548 }
549
550 #[inline(always)]
558 pub fn new_backing_store(
559 scope: &mut Isolate,
560 byte_length: usize,
561 ) -> UniqueRef<BackingStore> {
562 unsafe {
563 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
564 (*scope).as_real_ptr(),
565 byte_length,
566 ))
567 }
568 }
569
570 #[inline(always)]
580 #[cfg(not(feature = "v8_enable_sandbox"))]
581 pub fn new_backing_store_from_boxed_slice(
582 data: Box<[u8]>,
583 ) -> UniqueRef<BackingStore> {
584 Self::new_backing_store_from_bytes(data)
585 }
586
587 #[inline(always)]
597 #[cfg(not(feature = "v8_enable_sandbox"))]
598 pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
599 Self::new_backing_store_from_bytes(data)
600 }
601
602 #[inline(always)]
626 #[cfg(not(feature = "v8_enable_sandbox"))]
627 pub fn new_backing_store_from_bytes<T>(
628 mut bytes: T,
629 ) -> UniqueRef<BackingStore>
630 where
631 T: sealed::Rawable,
632 {
633 let len = bytes.byte_len();
634
635 let (ptr, slice) = T::into_raw(bytes);
636
637 unsafe extern "C" fn drop_rawable<T: sealed::Rawable>(
638 _ptr: *mut c_void,
639 len: usize,
640 data: *mut c_void,
641 ) {
642 unsafe { T::drop_raw(data as _, len) }
644 }
645
646 unsafe {
649 Self::new_backing_store_from_ptr(
650 slice as _,
651 len,
652 drop_rawable::<T>,
653 ptr as _,
654 )
655 }
656 }
657
658 #[inline(always)]
669 pub unsafe fn new_backing_store_from_ptr(
670 data_ptr: *mut c_void,
671 byte_length: usize,
672 deleter_callback: BackingStoreDeleterCallback,
673 deleter_data: *mut c_void,
674 ) -> UniqueRef<BackingStore> {
675 unsafe {
676 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
677 data_ptr,
678 byte_length,
679 deleter_callback,
680 deleter_data,
681 ))
682 }
683 }
684}
685
686impl DataView {
687 #[inline(always)]
689 pub fn new<'s>(
690 scope: &PinScope<'s, '_, ()>,
691 arraybuffer: Local<'s, ArrayBuffer>,
692 byte_offset: usize,
693 length: usize,
694 ) -> Local<'s, DataView> {
695 unsafe {
696 scope
697 .cast_local(|_| v8__DataView__New(&*arraybuffer, byte_offset, length))
698 }
699 .unwrap()
700 }
701}