use std::cell::Cell;
use std::ffi::c_void;
use std::ops::Deref;
use std::ptr::NonNull;
use std::ptr::null;
use std::slice;
use crate::ArrayBuffer;
use crate::DataView;
use crate::Isolate;
use crate::Local;
use crate::Value;
use crate::isolate::RealIsolate;
use crate::scope::PinScope;
use crate::support::MaybeBool;
use crate::support::Opaque;
use crate::support::Shared;
use crate::support::SharedPtrBase;
use crate::support::SharedRef;
use crate::support::UniquePtr;
use crate::support::UniqueRef;
use crate::support::long;
unsafe extern "C" {
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
fn v8__ArrayBuffer__New__with_byte_length(
isolate: *mut RealIsolate,
byte_length: usize,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__New__with_backing_store(
isolate: *mut RealIsolate,
backing_store: *const SharedRef<BackingStore>,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__Detach(
this: *const ArrayBuffer,
key: *const Value,
) -> MaybeBool;
fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
fn v8__ArrayBuffer__GetBackingStore(
this: *const ArrayBuffer,
) -> SharedRef<BackingStore>;
fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
isolate: *mut RealIsolate,
byte_length: usize,
) -> *mut BackingStore;
fn v8__ArrayBuffer__NewBackingStore__with_data(
data: *mut c_void,
byte_length: usize,
deleter: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> *mut BackingStore;
fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
fn v8__BackingStore__IsResizableByUserJavaScript(
this: *const BackingStore,
) -> bool;
fn v8__BackingStore__DELETE(this: *mut BackingStore);
fn v8__DataView__New(
arraybuffer: *const ArrayBuffer,
byte_offset: usize,
length: usize,
) -> *const DataView;
fn std__shared_ptr__v8__BackingStore__COPY(
ptr: *const SharedPtrBase<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__get(
ptr: *const SharedPtrBase<BackingStore>,
) -> *mut BackingStore;
fn std__shared_ptr__v8__BackingStore__reset(
ptr: *mut SharedPtrBase<BackingStore>,
);
fn std__shared_ptr__v8__BackingStore__use_count(
ptr: *const SharedPtrBase<BackingStore>,
) -> long;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
ptr: *const SharedPtrBase<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
ptr: *const SharedPtrBase<Allocator>,
) -> *mut Allocator;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
ptr: *mut SharedPtrBase<Allocator>,
);
fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
ptr: *const SharedPtrBase<Allocator>,
) -> long;
}
#[cfg(not(feature = "v8_enable_sandbox"))]
unsafe extern "C" {
fn v8__ArrayBuffer__Allocator__NewRustAllocator(
handle: *const c_void,
vtable: *const RustAllocatorVtable<c_void>,
) -> *mut Allocator;
}
#[repr(C)]
#[derive(Debug)]
pub struct Allocator(Opaque);
#[cfg(not(feature = "v8_enable_sandbox"))]
#[repr(C)]
pub struct RustAllocatorVtable<T> {
pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub allocate_uninitialized:
unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
pub drop: unsafe extern "C" fn(handle: *const T),
}
impl Shared for Allocator {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
}
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr,
)
}
}
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
}
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
}
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
}
}
#[inline(always)]
pub fn new_default_allocator() -> UniqueRef<Allocator> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
}
}
#[inline(always)]
#[cfg(not(feature = "v8_enable_sandbox"))]
pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
handle: *const T,
vtable: &'static RustAllocatorVtable<T>,
) -> UniqueRef<Allocator> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
handle as *const c_void,
vtable as *const RustAllocatorVtable<T>
as *const RustAllocatorVtable<c_void>,
))
}
}
#[test]
#[cfg(not(feature = "v8_enable_sandbox"))]
fn test_rust_allocator() {
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn allocate_uninitialized(
_: &AtomicUsize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
unimplemented!()
}
unsafe extern "C" fn drop(x: *const AtomicUsize) {
unsafe {
let arc = Arc::from_raw(x);
arc.store(42, Ordering::SeqCst);
}
}
let retval = Arc::new(AtomicUsize::new(0));
let vtable: &'static RustAllocatorVtable<AtomicUsize> =
&RustAllocatorVtable {
allocate,
allocate_uninitialized,
free,
drop,
};
unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
assert_eq!(retval.load(Ordering::SeqCst), 42);
assert_eq!(Arc::strong_count(&retval), 1);
}
#[test]
fn test_default_allocator() {
crate::V8::initialize_platform(
crate::new_default_platform(0, false).make_shared(),
);
crate::V8::initialize();
new_default_allocator();
}
impl Drop for Allocator {
fn drop(&mut self) {
unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
}
}
pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
data: *mut c_void,
byte_length: usize,
deleter_data: *mut c_void,
);
#[cfg(not(feature = "v8_enable_sandbox"))]
pub(crate) mod sealed {
pub trait Rawable {
fn byte_len(&mut self) -> usize;
fn into_raw(self) -> (*const (), *const u8);
unsafe fn drop_raw(ptr: *const (), size: usize);
}
}
#[cfg(not(feature = "v8_enable_sandbox"))]
macro_rules! rawable {
($ty:ty) => {
impl sealed::Rawable for Box<[$ty]> {
fn byte_len(&mut self) -> usize {
self.as_mut().len() * std::mem::size_of::<$ty>()
}
fn into_raw(mut self) -> (*const (), *const u8) {
let ptr = self.as_mut_ptr();
std::mem::forget(self);
(ptr as _, ptr as _)
}
unsafe fn drop_raw(ptr: *const (), len: usize) {
_ = unsafe {
Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len))
};
}
}
impl sealed::Rawable for Vec<$ty> {
fn byte_len(&mut self) -> usize {
Vec::<$ty>::len(self) * std::mem::size_of::<$ty>()
}
unsafe fn drop_raw(ptr: *const (), size: usize) {
unsafe {
<Box<[$ty]> as sealed::Rawable>::drop_raw(ptr, size);
}
}
fn into_raw(self) -> (*const (), *const u8) {
self.into_boxed_slice().into_raw()
}
}
};
}
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(u8);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(u16);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(u32);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(u64);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(i8);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(i16);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(i32);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(i64);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(f32);
#[cfg(not(feature = "v8_enable_sandbox"))]
rawable!(f64);
#[cfg(not(feature = "v8_enable_sandbox"))]
impl<T: Sized> sealed::Rawable for Box<T>
where
T: AsMut<[u8]>,
{
fn byte_len(&mut self) -> usize {
self.as_mut().as_mut().len()
}
fn into_raw(mut self) -> (*const (), *const u8) {
let data = self.as_mut().as_mut().as_mut_ptr();
let ptr = Self::into_raw(self);
(ptr as _, data)
}
unsafe fn drop_raw(ptr: *const (), _len: usize) {
unsafe {
_ = Self::from_raw(ptr as _);
}
}
}
#[repr(C)]
#[derive(Debug)]
pub struct BackingStore([usize; 6]);
unsafe impl Send for BackingStore {}
impl BackingStore {
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr =
unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__BackingStore__ByteLength(self) }
}
#[inline(always)]
pub fn is_shared(&self) -> bool {
unsafe { v8__BackingStore__IsShared(self) }
}
#[inline(always)]
pub fn is_resizable_by_user_javascript(&self) -> bool {
unsafe { v8__BackingStore__IsResizableByUserJavaScript(self) }
}
}
impl Deref for BackingStore {
type Target = [Cell<u8>];
#[inline]
fn deref(&self) -> &Self::Target {
let data = self
.data()
.unwrap_or_else(NonNull::dangling)
.cast::<Cell<u8>>();
let len = self.byte_length();
unsafe { slice::from_raw_parts(data.as_ptr(), len) }
}
}
impl Drop for BackingStore {
#[inline]
fn drop(&mut self) {
unsafe { v8__BackingStore__DELETE(self) };
}
}
impl Shared for BackingStore {
#[inline]
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
}
#[inline]
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
}
}
#[inline]
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
}
#[inline]
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
}
#[inline]
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
}
}
impl ArrayBuffer {
#[inline(always)]
pub fn new<'s>(
scope: &PinScope<'s, '_, ()>,
byte_length: usize,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_byte_length(
sd.get_isolate_ptr(),
byte_length,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn with_backing_store<'s>(
scope: &PinScope<'s, '_, ()>,
backing_store: &SharedRef<BackingStore>,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_backing_store(
sd.get_isolate_ptr(),
backing_store,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__ArrayBuffer__ByteLength(self) }
}
#[inline(always)]
pub fn is_detachable(&self) -> bool {
unsafe { v8__ArrayBuffer__IsDetachable(self) }
}
#[inline(always)]
pub fn was_detached(&self) -> bool {
if self.byte_length() != 0 {
return false;
}
unsafe { v8__ArrayBuffer__WasDetached(self) }
}
#[inline(always)]
pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
if self.is_detachable() {
let key = key.map_or(null(), |v| &*v as *const Value);
unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
} else {
Some(true)
}
}
#[inline(always)]
pub fn set_detach_key(&self, key: Local<Value>) {
unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
}
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
unsafe { v8__ArrayBuffer__GetBackingStore(self) }
}
#[inline(always)]
pub fn new_backing_store(
scope: &mut Isolate,
byte_length: usize,
) -> UniqueRef<BackingStore> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
(*scope).as_real_ptr(),
byte_length,
))
}
}
#[inline(always)]
#[cfg(not(feature = "v8_enable_sandbox"))]
pub fn new_backing_store_from_boxed_slice(
data: Box<[u8]>,
) -> UniqueRef<BackingStore> {
Self::new_backing_store_from_bytes(data)
}
#[inline(always)]
#[cfg(not(feature = "v8_enable_sandbox"))]
pub fn new_backing_store_from_vec(data: Vec<u8>) -> UniqueRef<BackingStore> {
Self::new_backing_store_from_bytes(data)
}
#[inline(always)]
#[cfg(not(feature = "v8_enable_sandbox"))]
pub fn new_backing_store_from_bytes<T>(
mut bytes: T,
) -> UniqueRef<BackingStore>
where
T: sealed::Rawable,
{
let len = bytes.byte_len();
let (ptr, slice) = T::into_raw(bytes);
unsafe extern "C" fn drop_rawable<T: sealed::Rawable>(
_ptr: *mut c_void,
len: usize,
data: *mut c_void,
) {
unsafe { T::drop_raw(data as _, len) }
}
unsafe {
Self::new_backing_store_from_ptr(
slice as _,
len,
drop_rawable::<T>,
ptr as _,
)
}
}
#[inline(always)]
pub unsafe fn new_backing_store_from_ptr(
data_ptr: *mut c_void,
byte_length: usize,
deleter_callback: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> UniqueRef<BackingStore> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
data_ptr,
byte_length,
deleter_callback,
deleter_data,
))
}
}
}
impl DataView {
#[inline(always)]
pub fn new<'s>(
scope: &PinScope<'s, '_, ()>,
arraybuffer: Local<'s, ArrayBuffer>,
byte_offset: usize,
length: usize,
) -> Local<'s, DataView> {
unsafe {
scope
.cast_local(|_| v8__DataView__New(&*arraybuffer, byte_offset, length))
}
.unwrap()
}
}