use std::cell::Cell;
use std::ffi::c_void;
use std::ops::Deref;
use std::ptr;
use std::ptr::null;
use std::ptr::null_mut;
use std::ptr::NonNull;
use std::slice;
use crate::support::long;
use crate::support::MaybeBool;
use crate::support::Opaque;
use crate::support::Shared;
use crate::support::SharedPtrBase;
use crate::support::SharedRef;
use crate::support::UniquePtr;
use crate::support::UniqueRef;
use crate::ArrayBuffer;
use crate::HandleScope;
use crate::Isolate;
use crate::Local;
use crate::Value;
extern "C" {
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__NewRustAllocator(
handle: *const c_void,
vtable: *const RustAllocatorVtable<c_void>,
) -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
fn v8__ArrayBuffer__New__with_byte_length(
isolate: *mut Isolate,
byte_length: usize,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__New__with_backing_store(
isolate: *mut Isolate,
backing_store: *const SharedRef<BackingStore>,
) -> *const ArrayBuffer;
fn v8__ArrayBuffer__Detach(
this: *const ArrayBuffer,
key: *const Value,
) -> MaybeBool;
fn v8__ArrayBuffer__SetDetachKey(this: *const ArrayBuffer, key: *const Value);
fn v8__ArrayBuffer__Data(this: *const ArrayBuffer) -> *mut c_void;
fn v8__ArrayBuffer__IsDetachable(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__WasDetached(this: *const ArrayBuffer) -> bool;
fn v8__ArrayBuffer__ByteLength(this: *const ArrayBuffer) -> usize;
fn v8__ArrayBuffer__GetBackingStore(
this: *const ArrayBuffer,
) -> SharedRef<BackingStore>;
fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
isolate: *mut Isolate,
byte_length: usize,
) -> *mut BackingStore;
fn v8__ArrayBuffer__NewBackingStore__with_data(
data: *mut c_void,
byte_length: usize,
deleter: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> *mut BackingStore;
fn v8__BackingStore__Data(this: *const BackingStore) -> *mut c_void;
fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
fn v8__BackingStore__DELETE(this: *mut BackingStore);
fn std__shared_ptr__v8__BackingStore__COPY(
ptr: *const SharedPtrBase<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__get(
ptr: *const SharedPtrBase<BackingStore>,
) -> *mut BackingStore;
fn std__shared_ptr__v8__BackingStore__reset(
ptr: *mut SharedPtrBase<BackingStore>,
);
fn std__shared_ptr__v8__BackingStore__use_count(
ptr: *const SharedPtrBase<BackingStore>,
) -> long;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
ptr: *const SharedPtrBase<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr: UniquePtr<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
ptr: *const SharedPtrBase<Allocator>,
) -> *mut Allocator;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
ptr: *mut SharedPtrBase<Allocator>,
);
fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
ptr: *const SharedPtrBase<Allocator>,
) -> long;
}
#[repr(C)]
#[derive(Debug)]
pub struct Allocator(Opaque);
#[repr(C)]
pub struct RustAllocatorVtable<T> {
pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub allocate_uninitialized:
unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
pub reallocate: unsafe extern "C" fn(
handle: &T,
data: *mut c_void,
old_length: usize,
new_length: usize,
) -> *mut c_void,
pub drop: unsafe extern "C" fn(handle: *const T),
}
impl Shared for Allocator {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
}
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique_ptr,
)
}
}
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
}
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
}
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
}
}
#[inline(always)]
pub fn new_default_allocator() -> UniqueRef<Allocator> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
}
}
#[inline(always)]
pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
handle: *const T,
vtable: &'static RustAllocatorVtable<T>,
) -> UniqueRef<Allocator> {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
handle as *const c_void,
vtable as *const RustAllocatorVtable<T>
as *const RustAllocatorVtable<c_void>,
))
}
#[test]
fn test_rust_allocator() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn allocate_uninitialized(
_: &AtomicUsize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
unimplemented!()
}
unsafe extern "C" fn reallocate(
_: &AtomicUsize,
_: *mut c_void,
_: usize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn drop(x: *const AtomicUsize) {
let arc = Arc::from_raw(x);
arc.store(42, Ordering::SeqCst);
}
let retval = Arc::new(AtomicUsize::new(0));
let vtable: &'static RustAllocatorVtable<AtomicUsize> =
&RustAllocatorVtable {
allocate,
allocate_uninitialized,
free,
reallocate,
drop,
};
unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
assert_eq!(retval.load(Ordering::SeqCst), 42);
assert_eq!(Arc::strong_count(&retval), 1);
}
#[test]
fn test_default_allocator() {
new_default_allocator();
}
impl Drop for Allocator {
fn drop(&mut self) {
unsafe { v8__ArrayBuffer__Allocator__DELETE(self) };
}
}
pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
data: *mut c_void,
byte_length: usize,
deleter_data: *mut c_void,
);
pub unsafe extern "C" fn boxed_slice_deleter_callback(
data: *mut c_void,
byte_length: usize,
_deleter_data: *mut c_void,
) {
let slice_ptr = ptr::slice_from_raw_parts_mut(data as *mut u8, byte_length);
let b = Box::from_raw(slice_ptr);
drop(b);
}
pub unsafe extern "C" fn vec_deleter_callback(
data: *mut c_void,
byte_length: usize,
deleter_data: *mut c_void,
) {
let capacity = deleter_data as usize;
drop(Vec::from_raw_parts(data as *mut u8, byte_length, capacity))
}
#[repr(C)]
#[derive(Debug)]
pub struct BackingStore([usize; 6]);
unsafe impl Send for BackingStore {}
impl BackingStore {
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr =
unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__BackingStore__ByteLength(self) }
}
#[inline(always)]
pub fn is_shared(&self) -> bool {
unsafe { v8__BackingStore__IsShared(self) }
}
}
impl Deref for BackingStore {
type Target = [Cell<u8>];
fn deref(&self) -> &Self::Target {
let data = self
.data()
.unwrap_or_else(NonNull::dangling)
.cast::<Cell<u8>>();
let len = self.byte_length();
unsafe { slice::from_raw_parts(data.as_ptr(), len) }
}
}
impl Drop for BackingStore {
fn drop(&mut self) {
unsafe { v8__BackingStore__DELETE(self) };
}
}
impl Shared for BackingStore {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
}
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
}
}
fn get(ptr: &SharedPtrBase<Self>) -> *const Self {
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
}
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
}
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
}
}
impl ArrayBuffer {
#[inline(always)]
pub fn new<'s>(
scope: &mut HandleScope<'s>,
byte_length: usize,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_byte_length(
sd.get_isolate_ptr(),
byte_length,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn with_backing_store<'s>(
scope: &mut HandleScope<'s>,
backing_store: &SharedRef<BackingStore>,
) -> Local<'s, ArrayBuffer> {
unsafe {
scope.cast_local(|sd| {
v8__ArrayBuffer__New__with_backing_store(
sd.get_isolate_ptr(),
backing_store,
)
})
}
.unwrap()
}
#[inline(always)]
pub fn byte_length(&self) -> usize {
unsafe { v8__ArrayBuffer__ByteLength(self) }
}
#[inline(always)]
pub fn is_detachable(&self) -> bool {
unsafe { v8__ArrayBuffer__IsDetachable(self) }
}
#[inline(always)]
pub fn was_detached(&self) -> bool {
if self.byte_length() != 0 {
return false;
}
unsafe { v8__ArrayBuffer__WasDetached(self) }
}
#[inline(always)]
pub fn detach(&self, key: Option<Local<Value>>) -> Option<bool> {
if self.is_detachable() {
let key = key.map(|v| &*v as *const Value).unwrap_or(null());
unsafe { v8__ArrayBuffer__Detach(self, key) }.into()
} else {
Some(true)
}
}
#[inline(always)]
pub fn set_detach_key(&self, key: Local<Value>) {
unsafe { v8__ArrayBuffer__SetDetachKey(self, &*key) };
}
#[inline(always)]
pub fn data(&self) -> Option<NonNull<c_void>> {
let raw_ptr = unsafe { v8__ArrayBuffer__Data(self) };
NonNull::new(raw_ptr)
}
#[inline(always)]
pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
unsafe { v8__ArrayBuffer__GetBackingStore(self) }
}
#[inline(always)]
pub fn new_backing_store(
scope: &mut Isolate,
byte_length: usize,
) -> UniqueRef<BackingStore> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
scope,
byte_length,
))
}
}
#[inline(always)]
pub fn new_backing_store_from_boxed_slice(
data: Box<[u8]>,
) -> UniqueRef<BackingStore> {
let byte_length = data.len();
let data_ptr = Box::into_raw(data) as *mut c_void;
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
data_ptr,
byte_length,
boxed_slice_deleter_callback,
null_mut(),
))
}
}
#[inline(always)]
pub fn new_backing_store_from_vec(
mut data: Vec<u8>,
) -> UniqueRef<BackingStore> {
let byte_length = data.len();
let capacity = data.capacity();
let data_ptr = data.as_mut_ptr() as *mut c_void;
std::mem::forget(data);
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
data_ptr,
byte_length,
vec_deleter_callback,
capacity as *mut c_void,
))
}
}
#[inline(always)]
pub unsafe fn new_backing_store_from_ptr(
data_ptr: *mut c_void,
byte_length: usize,
deleter_callback: BackingStoreDeleterCallback,
deleter_data: *mut c_void,
) -> UniqueRef<BackingStore> {
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
data_ptr,
byte_length,
deleter_callback,
deleter_data,
))
}
}