1use std::cell::Cell;
4use std::ffi::c_void;
5use std::ops::Deref;
6use std::ptr::NonNull;
7use std::ptr::null;
8use std::slice;
9
10use crate::ArrayBuffer;
11use crate::DataView;
12use crate::Isolate;
13use crate::Local;
14use crate::Value;
15use crate::isolate::RealIsolate;
16use crate::scope::PinScope;
17use crate::support::MaybeBool;
18use crate::support::Opaque;
19use crate::support::Shared;
20use crate::support::SharedPtrBase;
21use crate::support::SharedRef;
22use crate::support::UniquePtr;
23use crate::support::UniqueRef;
24use crate::support::long;
25
26unsafe extern "C" {
27 fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
28 fn v8__ArrayBuffer__Allocator__NewRustAllocator(
29 handle: *const c_void,
30 vtable: *const RustAllocatorVtable<c_void>,
31 ) -> *mut Allocator;
32 fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
33 fn v8__ArrayBuffer__New__with_byte_length(
34 isolate: *mut RealIsolate,
35 byte_length: usize,
36 ) -> *const ArrayBuffer;
37 fn v8__ArrayBuffer__New__with_backing_store(
38 isolate: *mut RealIsolate,
39 backing_store: *const SharedRef<BackingStore>,
40 ) -> *const ArrayBuffer;
41 fn v8__ArrayBuffer__Detach(
42 this: *const ArrayBuffer,
43 key: *const Value,
44 ) -> MaybeBool;
45 fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
46 fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
47 fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
48 fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
49 fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
50 fn v8__ArrayBuffer__GetBackingStore(
51 this: *const ArrayBuffer,
52 ) -> SharedRef<BackingStore>;
53 fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
54 isolate: *mut RealIsolate,
55 byte_length: usize,
56 ) -> *mut BackingStore;
57 fn v8__ArrayBuffer__NewBackingStore__with_data(
58 data: *mut c_void,
59 byte_length: usize,
60 deleter: BackingStoreDeleterCallback,
61 deleter_data: *mut c_void,
62 ) -> *mut BackingStore;
63
64 fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
65 fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
66 fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
67 fn v8__BackingStore__IsResizableByUserJavaScript(
68 this: *const BackingStore,
69 ) -> bool;
70 fn v8__BackingStore__DELETE(this: *mut BackingStore);
71
72 fn v8__DataView__New(
73 arraybuffer: *const ArrayBuffer,
74 byte_offset: usize,
75 length: usize,
76 ) -> *const DataView;
77
78 fn std__shared_ptr__v8__BackingStore__COPY(
79 ptr: *const SharedPtrBase<BackingStore>,
80 ) -> SharedPtrBase<BackingStore>;
81 fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
82 unique_ptr: UniquePtr<BackingStore>,
83 ) -> SharedPtrBase<BackingStore>;
84 fn std__shared_ptr__v8__BackingStore__get(
85 ptr: *const SharedPtrBase<BackingStore>,
86 ) -> *mut BackingStore;
87 fn std__shared_ptr__v8__BackingStore__reset(
88 ptr: *mut SharedPtrBase<BackingStore>,
89 );
90 fn std__shared_ptr__v8__BackingStore__use_count(
91 ptr: *const SharedPtrBase<BackingStore>,
92 ) -> long;
93
94 fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
95 ptr: *const SharedPtrBase<Allocator>,
96 ) -> SharedPtrBase<Allocator>;
97 fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
98 unique_ptr: UniquePtr<Allocator>,
99 ) -> SharedPtrBase<Allocator>;
100 fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
101 ptr: *const SharedPtrBase<Allocator>,
102 ) -> *mut Allocator;
103 fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
104 ptr: *mut SharedPtrBase<Allocator>,
105 );
106 fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
107 ptr: *const SharedPtrBase<Allocator>,
108 ) -> long;
109}
110
111#[repr(C)]
129#[derive(Debug)]
130pub struct Allocator(Opaque);
131
132#[repr(C)]
134pub struct RustAllocatorVtable<T> {
135 pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
136 pub allocate_uninitialized:
137 unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
138 pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
139 pub drop: unsafe extern "C" fn(handle: *const T),
140}
141
142impl Shared for Allocator {
143 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
144 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
145 }
146 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
147 unsafe {
148 std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
149 unique_ptr,
150 )
151 }
152 }
153 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
154 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
155 }
156 fn reset(ptr: &mut SharedPtrBase<Self>) {
157 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
158 }
159 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
160 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
161 }
162}
163
164#[inline(always)]
166pub fn new_default_allocator() -> UniqueRef<Allocator> {
167 unsafe {
168 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
169 }
170}
171
172#[inline(always)]
176pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
177 handle: *const T,
178 vtable: &'static RustAllocatorVtable<T>,
179) -> UniqueRef<Allocator> {
180 unsafe {
181 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
182 handle as *const c_void,
183 vtable as *const RustAllocatorVtable<T>
184 as *const RustAllocatorVtable<c_void>,
185 ))
186 }
187}
188
189#[test]
190fn test_rust_allocator() {
191 use std::sync::Arc;
192 use std::sync::atomic::{AtomicUsize, Ordering};
193
194 unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
195 unimplemented!()
196 }
197 unsafe extern "C" fn allocate_uninitialized(
198 _: &AtomicUsize,
199 _: usize,
200 ) -> *mut c_void {
201 unimplemented!()
202 }
203 unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
204 unimplemented!()
205 }
206 unsafe extern "C" fn drop(x: *const AtomicUsize) {
207 unsafe {
208 let arc = Arc::from_raw(x);
209 arc.store(42, Ordering::SeqCst);
210 }
211 }
212
213 let retval = Arc::new(AtomicUsize::new(0));
214
215 let vtable: &'static RustAllocatorVtable<AtomicUsize> =
216 &RustAllocatorVtable {
217 allocate,
218 allocate_uninitialized,
219 free,
220 drop,
221 };
222 unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
223 assert_eq!(retval.load(Ordering::SeqCst), 42);
224 assert_eq!(Arc::strong_count(&retval), 1);
225}
226
227#[test]
228fn test_default_allocator() {
229 new_default_allocator();
230}
231
232impl Drop for Allocator {
233 fn drop(&mut self) {
234 unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
235 }
236}
237
238pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
239 data: *mut c_void,
240 byte_length: usize,
241 deleter_data: *mut c_void,
242);
243
244pub(crate) mod sealed {
245 pub trait Rawable {
246 fn byte_len(&mut self) -> usize;
247 fn into_raw(self) -> (*const (), *const u8);
248 unsafe fn drop_raw(ptr: *const (), size: usize);
249 }
250}
251
252macro_rules! rawable {
253 ($ty:ty) => {
254 impl sealed::Rawable for Box<[$ty]> {
255 fn byte_len(&mut self) -> usize {
256 self.as_mut().len() * std::mem::size_of::<$ty>()
257 }
258
259 fn into_raw(mut self) -> (*const (), *const u8) {
260 let ptr = self.as_mut_ptr();
262 std::mem::forget(self);
263 (ptr as _, ptr as _)
264 }
265
266 unsafe fn drop_raw(ptr: *const (), len: usize) {
267 _ = unsafe {
269 Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len))
270 };
271 }
272 }
273
274 impl sealed::Rawable for Vec<$ty> {
275 fn byte_len(&mut self) -> usize {
276 Vec::<$ty>::len(self) * std::mem::size_of::<$ty>()
277 }
278
279 unsafe fn drop_raw(ptr: *const (), size: usize) {
280 unsafe {
281 <Box<[$ty]> as sealed::Rawable>::drop_raw(ptr, size);
282 }
283 }
284
285 fn into_raw(self) -> (*const (), *const u8) {
286 self.into_boxed_slice().into_raw()
287 }
288 }
289 };
290}
291
292rawable!(u8);
293rawable!(u16);
294rawable!(u32);
295rawable!(u64);
296rawable!(i8);
297rawable!(i16);
298rawable!(i32);
299rawable!(i64);
300rawable!(f32);
301rawable!(f64);
302
303impl<T: Sized> sealed::Rawable for Box<T>
304where
305 T: AsMut<[u8]>,
306{
307 fn byte_len(&mut self) -> usize {
308 self.as_mut().as_mut().len()
309 }
310
311 fn into_raw(mut self) -> (*const (), *const u8) {
312 let data = self.as_mut().as_mut().as_mut_ptr();
313 let ptr = Self::into_raw(self);
314 (ptr as _, data)
315 }
316
317 unsafe fn drop_raw(ptr: *const (), _len: usize) {
318 unsafe {
319 _ = Self::from_raw(ptr as _);
320 }
321 }
322}
323
324#[repr(C)]
336#[derive(Debug)]
337pub struct BackingStore([usize; 6]);
338
339unsafe impl Send for BackingStore {}
340
341impl BackingStore {
342 #[inline(always)]
348 pub fn data(&self) -> Option<NonNull<c_void>> {
349 let raw_ptr =
350 unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
351 NonNull::new(raw_ptr)
352 }
353
354 #[inline(always)]
356 pub fn byte_length(&self) -> usize {
357 unsafe { v8__BackingStore__ByteLength(self) }
358 }
359
360 #[inline(always)]
363 pub fn is_shared(&self) -> bool {
364 unsafe { v8__BackingStore__IsShared(self) }
365 }
366
367 #[inline(always)]
371 pub fn is_resizable_by_user_javascript(&self) -> bool {
372 unsafe { v8__BackingStore__IsResizableByUserJavaScript(self) }
373 }
374}
375
376impl Deref for BackingStore {
377 type Target = [Cell<u8>];
378
379 #[inline]
381 fn deref(&self) -> &Self::Target {
382 let data = self
385 .data()
386 .unwrap_or_else(NonNull::dangling)
387 .cast::<Cell<u8>>();
388 let len = self.byte_length();
389 unsafe { slice::from_raw_parts(data.as_ptr(), len) }
390 }
391}
392
393impl Drop for BackingStore {
394 #[inline]
395 fn drop(&mut self) {
396 unsafe { v8__BackingStore__DELETE(self) };
397 }
398}
399
400impl Shared for BackingStore {
401 #[inline]
402 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
403 unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
404 }
405 #[inline]
406 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
407 unsafe {
408 std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
409 }
410 }
411 #[inline]
412 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
413 unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
414 }
415 #[inline]
416 fn reset(ptr: &mut SharedPtrBase<Self>) {
417 unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
418 }
419 #[inline]
420 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
421 unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
422 }
423}
424
425impl ArrayBuffer {
426 #[inline(always)]
431 pub fn new<'s>(
432 scope: &PinScope<'s, '_, ()>,
433 byte_length: usize,
434 ) -> Local<'s, ArrayBuffer> {
435 unsafe {
436 scope.cast_local(|sd| {
437 v8__ArrayBuffer__New__with_byte_length(
438 sd.get_isolate_ptr(),
439 byte_length,
440 )
441 })
442 }
443 .unwrap()
444 }
445
446 #[inline(always)]
447 pub fn with_backing_store<'s>(
448 scope: &PinScope<'s, '_, ()>,
449 backing_store: &SharedRef<BackingStore>,
450 ) -> Local<'s, ArrayBuffer> {
451 unsafe {
452 scope.cast_local(|sd| {
453 v8__ArrayBuffer__New__with_backing_store(
454 sd.get_isolate_ptr(),
455 backing_store,
456 )
457 })
458 }
459 .unwrap()
460 }
461
462 #[inline(always)]
464 pub fn byte_length(&self) -> usize {
465 unsafe { v8__ArrayBuffer__ByteLength(self) }
466 }
467
468 #[inline(always)]
470 pub fn is_detachable(&self) -> bool {
471 unsafe { v8__ArrayBuffer__IsDetachable(self) }
472 }
473
474 #[inline(always)]
476 pub fn was_detached(&self) -> bool {
477 if self.byte_length() != 0 {
478 return false;
479 }
480 unsafe { v8__ArrayBuffer__WasDetached(self) }
481 }
482
483 #[inline(always)]
490 pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
491 if self.is_detachable() {
494 let key = key.map_or(null(), |v| &*v as *const Value);
495 unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
496 } else {
497 Some(true)
498 }
499 }
500
501 #[inline(always)]
503 pub fn set_detach_key(&self, key: Local<Value>) {
504 unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
505 }
506
507 #[inline(always)]
510 pub fn data(&self) -> Option<NonNull<c_void>> {
511 let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
512 NonNull::new(raw_ptr)
513 }
514
515 #[inline(always)]
520 pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
521 unsafe { v8__ArrayBuffer__GetBackingStore(self) }
522 }
523
524 #[inline(always)]
532 pub fn new_backing_store(
533 scope: &mut Isolate,
534 byte_length: usize,
535 ) -> UniqueRef<BackingStore> {
536 unsafe {
537 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
538 (*scope).as_real_ptr(),
539 byte_length,
540 ))
541 }
542 }
543
544 #[inline(always)]
552 pub fn new_backing_store_from_boxed_slice(
553 data: Box<[u8]>,
554 ) -> UniqueRef<BackingStore> {
555 Self::new_backing_store_from_bytes(data)
556 }
557
558 #[inline(always)]
566 pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
567 Self::new_backing_store_from_bytes(data)
568 }
569
570 #[inline(always)]
588 pub fn new_backing_store_from_bytes<T>(
589 mut bytes: T,
590 ) -> UniqueRef<BackingStore>
591 where
592 T: sealed::Rawable,
593 {
594 let len = bytes.byte_len();
595
596 let (ptr, slice) = T::into_raw(bytes);
597
598 unsafe extern "C" fn drop_rawable<T: sealed::Rawable>(
599 _ptr: *mut c_void,
600 len: usize,
601 data: *mut c_void,
602 ) {
603 unsafe { T::drop_raw(data as _, len) }
605 }
606
607 unsafe {
610 Self::new_backing_store_from_ptr(
611 slice as _,
612 len,
613 drop_rawable::<T>,
614 ptr as _,
615 )
616 }
617 }
618
619 #[inline(always)]
624 pub unsafe fn new_backing_store_from_ptr(
625 data_ptr: *mut c_void,
626 byte_length: usize,
627 deleter_callback: BackingStoreDeleterCallback,
628 deleter_data: *mut c_void,
629 ) -> UniqueRef<BackingStore> {
630 unsafe {
631 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
632 data_ptr,
633 byte_length,
634 deleter_callback,
635 deleter_data,
636 ))
637 }
638 }
639}
640
641impl DataView {
642 #[inline(always)]
644 pub fn new<'s>(
645 scope: &PinScope<'s, '_, ()>,
646 arraybuffer: Local<'s, ArrayBuffer>,
647 byte_offset: usize,
648 length: usize,
649 ) -> Local<'s, DataView> {
650 unsafe {
651 scope
652 .cast_local(|_| v8__DataView__New(&*arraybuffer, byte_offset, length))
653 }
654 .unwrap()
655 }
656}