use std::cell::Cell;
use std::ffi::c_void;
use std::ops::Deref;
use std::ptr::null;
use std::ptr::NonNull;
use std::slice;
use crate::support::long;
use crate::support::MaybeBool;
use crate::support::Opaque;
use crate::support::Shared;
use crate::support::SharedPtrBase;
use crate::support::SharedRef;
use crate::support::UniquePtr;
use crate::support::UniqueRef;
use crate::ArrayBuffer;
use crate::HandleScope;
use crate::Isolate;
use crate::Local;
use crate::Value;
extern "C" {
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__NewRustAllocator(
handle: *const c_void,
vtable: *const RustAllocatorVtable<c_void>,
) -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
fn v8__ArrayBuffer__New__with_byte_length(
isolate: *mut Isolate,
byte_length: usize,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__New__with_backing_store(
isolate: *mut Isolate,
backing_store: *const SharedRef<BackingStore>,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__Detach(
this: *const ArrayBuffer,
key: *const Value,
) -> MaybeBool;
fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
fn v8__ArrayBuffer__GetBackingStore(
this: *const ArrayBuffer,
) -> SharedRef<BackingStore>;
fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
isolate: *mut Isolate,
byte_length: usize,
) -> *mut BackingStore;
fn v8__ArrayBuffer__NewBackingStore__with_data(
data: *mut c_void,
byte_length: usize,
deleter: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> *mut BackingStore;
fn v8__BackingStore__EmptyBackingStore(shared: bool) -> *mut BackingStore;
fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
fn v8__BackingStore__IsResizableByUserJavaScript(
this: *const BackingStore,
) -> bool;
fn v8__BackingStore__DELETE(this: *mut BackingStore);
fn std__shared_ptr__v8__BackingStore__COPY(
ptr: *const SharedPtrBase<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__get(
ptr: *const SharedPtrBase<BackingStore>,
) -> *mut BackingStore;
fn std__shared_ptr__v8__BackingStore__reset(
ptr: *mut SharedPtrBase<BackingStore>,
);
fn std__shared_ptr__v8__BackingStore__use_count(
ptr: *const SharedPtrBase<BackingStore>,
) -> long;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
ptr: *const SharedPtrBase<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
ptr: *const SharedPtrBase<Allocator>,
) -> *mut Allocator;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
ptr: *mut SharedPtrBase<Allocator>,
);
fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
ptr: *const SharedPtrBase<Allocator>,
) -> long;
}
#[repr(C)]
#[derive(Debug)]
pub struct Allocator(Opaque);
#[repr(C)]
pub struct RustAllocatorVtable<T> {
pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub allocate_uninitialized:
unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
pub reallocate: unsafe extern "C" fn(
handle: &T,
data: *mut c_void,
old_length: usize,
new_length: usize,
) -> *mut c_void,
pub drop: unsafe extern "C" fn(handle: *const T),
}
impl Shared for Allocator {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
}
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr,
)
}
}
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
}
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
}
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
}
}
#[inline(always)]
pub fn new_default_allocator() -> UniqueRef<Allocator> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
}
}
#[inline(always)]
pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
handle: *const T,
vtable: &'static RustAllocatorVtable<T>,
) -> UniqueRef<Allocator> {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
handle as *const c_void,
vtable as *const RustAllocatorVtable<T>
as *const RustAllocatorVtable<c_void>,
))
}
#[test]
fn test_rust_allocator() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn allocate_uninitialized(
_: &AtomicUsize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
unimplemented!()
}
unsafe extern "C" fn reallocate(
_: &AtomicUsize,
_: *mut c_void,
_: usize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn drop(x: *const AtomicUsize) {
let arc = Arc::from_raw(x);
arc.store(42, Ordering::SeqCst);
}
let retval = Arc::new(AtomicUsize::new(0));
let vtable: &'static RustAllocatorVtable<AtomicUsize> =
&RustAllocatorVtable {
allocate,
allocate_uninitialized,
free,
reallocate,
drop,
};
unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
assert_eq!(retval.load(Ordering::SeqCst), 42);
assert_eq!(Arc::strong_count(&retval), 1);
}
#[test]
fn test_default_allocator() {
new_default_allocator();
}
impl Drop for Allocator {
fn drop(&mut self) {
unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
}
}
pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
data: *mut c_void,
byte_length: usize,
deleter_data: *mut c_void,
);
pub(crate) mod sealed {
pub trait Rawable<T: ?Sized> {
fn into_raw(self) -> (*const (), *const u8);
unsafe fn drop_raw(ptr: *const (), size: usize);
}
}
impl sealed::Rawable<[u8]> for Vec<u8> {
unsafe fn drop_raw(ptr: *const (), size: usize) {
<Box<[u8]> as sealed::Rawable<[u8]>>::drop_raw(ptr, size);
}
fn into_raw(self) -> (*const (), *const u8) {
self.into_boxed_slice().into_raw()
}
}
impl<T: Sized> sealed::Rawable<T> for Box<T>
where
T: AsMut<[u8]>,
{
fn into_raw(mut self) -> (*const (), *const u8) {
let data = self.as_mut().as_mut().as_mut_ptr();
let ptr = Self::into_raw(self);
(ptr as _, data)
}
unsafe fn drop_raw(ptr: *const (), _len: usize) {
_ = Self::from_raw(ptr as _);
}
}
impl sealed::Rawable<[u8]> for Box<[u8]> {
fn into_raw(mut self) -> (*const (), *const u8) {
let ptr = self.as_mut_ptr();
std::mem::forget(self);
(ptr as _, ptr)
}
unsafe fn drop_raw(ptr: *const (), len: usize) {
_ = Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len));
}
}
#[repr(C)]
#[derive(Debug)]
pub struct BackingStore([usize; 6]);
unsafe impl Send for BackingStore {}
impl BackingStore {
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr =
unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__BackingStore__ByteLength(self) }
}
#[inline(always)]
pub fn is_shared(&self) -> bool {
unsafe { v8__BackingStore__IsShared(self) }
}
#[inline(always)]
pub fn is_resizable_by_user_javascript(&self) -> bool {
unsafe { v8__BackingStore__IsResizableByUserJavaScript(self) }
}
}
impl Deref for BackingStore {
type Target = [Cell<u8>];
fn deref(&self) -> &Self::Target {
let data = self
.data()
.unwrap_or_else(NonNull::dangling)
.cast::<Cell<u8>>();
let len = self.byte_length();
unsafe { slice::from_raw_parts(data.as_ptr(), len) }
}
}
impl Drop for BackingStore {
fn drop(&mut self) {
unsafe { v8__BackingStore__DELETE(self) };
}
}
impl Shared for BackingStore {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
}
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
}
}
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
}
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
}
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
}
}
impl ArrayBuffer {
#[inline(always)]
pub fn new<'s>(
scope: &mut HandleScope<'s>,
byte_length: usize,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_byte_length(
sd.get_isolate_ptr(),
byte_length,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn with_backing_store<'s>(
scope: &mut HandleScope<'s>,
backing_store: &SharedRef<BackingStore>,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_backing_store(
sd.get_isolate_ptr(),
backing_store,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn empty<'s>(scope: &mut HandleScope<'s>) -> Local<'s, ArrayBuffer> {
let backing_store = unsafe {
UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false))
};
Self::with_backing_store(scope, &backing_store.make_shared())
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__ArrayBuffer__ByteLength(self) }
}
#[inline(always)]
pub fn is_detachable(&self) -> bool {
unsafe { v8__ArrayBuffer__IsDetachable(self) }
}
#[inline(always)]
pub fn was_detached(&self) -> bool {
if self.byte_length() != 0 {
return false;
}
unsafe { v8__ArrayBuffer__WasDetached(self) }
}
#[inline(always)]
pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
if self.is_detachable() {
let key = key.map(|v| &*v as *const Value).unwrap_or(null());
unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
} else {
Some(true)
}
}
#[inline(always)]
pub fn set_detach_key(&self, key: Local<Value>) {
unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
}
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
unsafe { v8__ArrayBuffer__GetBackingStore(self) }
}
#[inline(always)]
pub fn new_backing_store(
scope: &mut Isolate,
byte_length: usize,
) -> UniqueRef<BackingStore> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
scope,
byte_length,
))
}
}
#[inline(always)]
pub fn new_backing_store_from_boxed_slice(
data: Box<[u8]>,
) -> UniqueRef<BackingStore> {
Self::new_backing_store_from_bytes(data)
}
#[inline(always)]
pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
Self::new_backing_store_from_bytes(data)
}
#[inline(always)]
pub fn new_backing_store_from_bytes<T, U>(
mut bytes: T,
) -> UniqueRef<BackingStore>
where
U: ?Sized,
U: AsMut<[u8]>,
T: AsMut<U>,
T: sealed::Rawable<U>,
{
let len = bytes.as_mut().as_mut().len();
if len == 0 {
return unsafe {
UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false))
};
}
let (ptr, slice) = T::into_raw(bytes);
extern "C" fn drop_rawable<T: sealed::Rawable<U>, U: ?Sized>(
_ptr: *mut c_void,
len: usize,
data: *mut c_void,
) {
unsafe { <T as sealed::Rawable<U>>::drop_raw(data as _, len) }
}
unsafe {
Self::new_backing_store_from_ptr(
slice as _,
len,
drop_rawable::<T, U>,
ptr as _,
)
}
}
#[inline(always)]
pub unsafe fn new_backing_store_from_ptr(
data_ptr: *mut c_void,
byte_length: usize,
deleter_callback: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> UniqueRef<BackingStore> {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
data_ptr,
byte_length,
deleter_callback,
deleter_data,
))
}
}