1use std::cell::Cell;
4use std::ffi::c_void;
5use std::ops::Deref;
6use std::ptr::null_mut;
7use std::slice;
8
9use crate::support::long;
10use crate::support::Opaque;
11use crate::support::Shared;
12use crate::support::SharedPtrBase;
13use crate::support::SharedRef;
14use crate::support::UniquePtr;
15use crate::support::UniqueRef;
16use crate::ArrayBuffer;
17use crate::HandleScope;
18use crate::Isolate;
19use crate::Local;
20
21extern "C" {
22 fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
23 fn v8__ArrayBuffer__Allocator__NewRustAllocator(
24 handle: *const c_void,
25 vtable: *const RustAllocatorVtable<c_void>,
26 ) -> *mut Allocator;
27 fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
28 fn v8__ArrayBuffer__New__with_byte_length(
29 isolate: *mut Isolate,
30 byte_length: usize,
31 ) -> *const ArrayBuffer;
32 fn v8__ArrayBuffer__New__with_backing_store(
33 isolate: *mut Isolate,
34 backing_store: *const SharedRef<BackingStore>,
35 ) -> *const ArrayBuffer;
36 fn v8__ArrayBuffer__Detach(this: *const ArrayBuffer);
37 fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
38 fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
39 fn v8__ArrayBuffer__GetBackingStore(
40 this: *const ArrayBuffer,
41 ) -> SharedRef<BackingStore>;
42 fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
43 isolate: *mut Isolate,
44 byte_length: usize,
45 ) -> *mut BackingStore;
46 fn v8__ArrayBuffer__NewBackingStore__with_data(
47 data: *mut c_void,
48 byte_length: usize,
49 deleter: BackingStoreDeleterCallback,
50 deleter_data: *mut c_void,
51 ) -> *mut BackingStore;
52
53 fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
54 fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
55 fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
56 fn v8__BackingStore__DELETE(this: *mut BackingStore);
57
58 fn std__shared_ptr__v8__BackingStore__COPY(
59 ptr: *const SharedPtrBase<BackingStore>,
60 ) -> SharedPtrBase<BackingStore>;
61 fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
62 unique_ptr: UniquePtr<BackingStore>,
63 ) -> SharedPtrBase<BackingStore>;
64 fn std__shared_ptr__v8__BackingStore__get(
65 ptr: *const SharedPtrBase<BackingStore>,
66 ) -> *mut BackingStore;
67 fn std__shared_ptr__v8__BackingStore__reset(
68 ptr: *mut SharedPtrBase<BackingStore>,
69 );
70 fn std__shared_ptr__v8__BackingStore__use_count(
71 ptr: *const SharedPtrBase<BackingStore>,
72 ) -> long;
73
74 fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
75 ptr: *const SharedPtrBase<Allocator>,
76 ) -> SharedPtrBase<Allocator>;
77 fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
78 unique_ptr: UniquePtr<Allocator>,
79 ) -> SharedPtrBase<Allocator>;
80 fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
81 ptr: *const SharedPtrBase<Allocator>,
82 ) -> *mut Allocator;
83 fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
84 ptr: *mut SharedPtrBase<Allocator>,
85 );
86 fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
87 ptr: *const SharedPtrBase<Allocator>,
88 ) -> long;
89}
90
91#[repr(C)]
109#[derive(Debug)]
110pub struct Allocator(Opaque);
111
112#[repr(C)]
114pub struct RustAllocatorVtable<T> {
115 pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
116 pub allocate_uninitialized:
117 unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
118 pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
119 pub reallocate: unsafe extern "C" fn(
120 handle: &T,
121 data: *mut c_void,
122 old_length: usize,
123 new_length: usize,
124 ) -> *mut c_void,
125 pub drop: unsafe extern "C" fn(handle: *const T),
126}
127
128impl Shared for Allocator {
129 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
130 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
131 }
132 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
133 unsafe {
134 std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
135 unique_ptr,
136 )
137 }
138 }
139 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
140 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
141 }
142 fn reset(ptr: &mut SharedPtrBase<Self>) {
143 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
144 }
145 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
146 unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
147 }
148}
149
150pub fn new_default_allocator() -> UniqueRef<Allocator> {
152 unsafe {
153 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
154 }
155}
156
157pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
161 handle: *const T,
162 vtable: &'static RustAllocatorVtable<T>,
163) -> UniqueRef<Allocator> {
164 UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
165 handle as *const c_void,
166 vtable as *const RustAllocatorVtable<T>
167 as *const RustAllocatorVtable<c_void>,
168 ))
169}
170
171#[test]
172fn test_rust_allocator() {
173 use std::sync::atomic::{AtomicUsize, Ordering};
174 use std::sync::Arc;
175
176 unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
177 unimplemented!()
178 }
179 unsafe extern "C" fn allocate_uninitialized(
180 _: &AtomicUsize,
181 _: usize,
182 ) -> *mut c_void {
183 unimplemented!()
184 }
185 unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
186 unimplemented!()
187 }
188 unsafe extern "C" fn reallocate(
189 _: &AtomicUsize,
190 _: *mut c_void,
191 _: usize,
192 _: usize,
193 ) -> *mut c_void {
194 unimplemented!()
195 }
196 unsafe extern "C" fn drop(x: *const AtomicUsize) {
197 let arc = Arc::from_raw(x);
198 arc.store(42, Ordering::SeqCst);
199 }
200
201 let retval = Arc::new(AtomicUsize::new(0));
202
203 let vtable: &'static RustAllocatorVtable<AtomicUsize> =
204 &RustAllocatorVtable {
205 allocate,
206 allocate_uninitialized,
207 free,
208 reallocate,
209 drop,
210 };
211 unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
212 assert_eq!(retval.load(Ordering::SeqCst), 42);
213 assert_eq!(Arc::strong_count(&retval), 1);
214}
215
216#[test]
217fn test_default_allocator() {
218 new_default_allocator();
219}
220
221impl Drop for Allocator {
222 fn drop(&mut self) {
223 unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
224 }
225}
226
227pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
228 data: *mut c_void,
229 byte_length: usize,
230 deleter_data: *mut c_void,
231);
232
233pub unsafe extern "C" fn backing_store_deleter_callback(
234 data: *mut c_void,
235 _byte_length: usize,
236 _deleter_data: *mut c_void,
237) {
238 let b = Box::from_raw(data);
239 drop(b)
240}
241
242#[repr(C)]
254#[derive(Debug)]
255pub struct BackingStore([usize; 6]);
256
257unsafe impl Send for BackingStore {}
258
259impl BackingStore {
260 pub fn data(&self) -> *mut c_void {
264 unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) }
265 }
266
267 pub fn byte_length(&self) -> usize {
269 unsafe { v8__BackingStore__ByteLength(self) }
270 }
271
272 pub fn is_shared(&self) -> bool {
275 unsafe { v8__BackingStore__IsShared(self) }
276 }
277}
278
279impl Deref for BackingStore {
280 type Target = [Cell<u8>];
281
282 fn deref(&self) -> &Self::Target {
284 use std::ptr::NonNull;
285 let data = NonNull::new(self.data() as *mut Cell<u8>)
288 .unwrap_or_else(NonNull::dangling);
289 let len = self.byte_length();
290 unsafe { slice::from_raw_parts(data.as_ptr(), len) }
291 }
292}
293
294impl Drop for BackingStore {
295 fn drop(&mut self) {
296 unsafe { v8__BackingStore__DELETE(self) };
297 }
298}
299
300impl Shared for BackingStore {
301 fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
302 unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
303 }
304 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
305 unsafe {
306 std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
307 }
308 }
309 fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
310 unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
311 }
312 fn reset(ptr: &mut SharedPtrBase<Self>) {
313 unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
314 }
315 fn use_count(ptr: &SharedPtrBase<Self>) -> long {
316 unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
317 }
318}
319
320impl ArrayBuffer {
321 pub fn new<'s>(
326 scope: &mut HandleScope<'s>,
327 byte_length: usize,
328 ) -> Local<'s, ArrayBuffer> {
329 unsafe {
330 scope.cast_local(|sd| {
331 v8__ArrayBuffer__New__with_byte_length(
332 sd.get_isolate_ptr(),
333 byte_length,
334 )
335 })
336 }
337 .unwrap()
338 }
339
340 pub fn with_backing_store<'s>(
341 scope: &mut HandleScope<'s>,
342 backing_store: &SharedRef<BackingStore>,
343 ) -> Local<'s, ArrayBuffer> {
344 unsafe {
345 scope.cast_local(|sd| {
346 v8__ArrayBuffer__New__with_backing_store(
347 sd.get_isolate_ptr(),
348 backing_store,
349 )
350 })
351 }
352 .unwrap()
353 }
354
355 pub fn byte_length(&self) -> usize {
357 unsafe { v8__ArrayBuffer__ByteLength(self) }
358 }
359
360 pub fn is_detachable(&self) -> bool {
362 unsafe { v8__ArrayBuffer__IsDetachable(self) }
363 }
364
365 pub fn detach(&self) {
370 if self.is_detachable() {
373 unsafe { v8__ArrayBuffer__Detach(self) }
374 }
375 }
376
377 pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
382 unsafe { v8__ArrayBuffer__GetBackingStore(self) }
383 }
384
385 pub fn new_backing_store(
393 scope: &mut Isolate,
394 byte_length: usize,
395 ) -> UniqueRef<BackingStore> {
396 unsafe {
397 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
398 scope,
399 byte_length,
400 ))
401 }
402 }
403
404 pub fn new_backing_store_from_boxed_slice(
412 data: Box<[u8]>,
413 ) -> UniqueRef<BackingStore> {
414 let byte_length = data.len();
415 let data_ptr = Box::into_raw(data) as *mut c_void;
416 unsafe {
417 UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
418 data_ptr,
419 byte_length,
420 backing_store_deleter_callback,
421 null_mut(),
422 ))
423 }
424 }
425}