use std::fmt::Debug;
use std::marker::PhantomData;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Range;
use super::transl8::FromV8;
pub trait V8Sliceable: Copy + Clone {
type V8;
fn new_buf<'s>(
scope: &mut v8::HandleScope<'s>,
buf: v8::Local<v8::ArrayBuffer>,
byte_offset: usize,
length: usize,
) -> Option<v8::Local<'s, Self::V8>>;
}
impl V8Sliceable for u8 {
type V8 = v8::Uint8Array;
fn new_buf<'s>(
scope: &mut v8::HandleScope<'s>,
buf: v8::Local<v8::ArrayBuffer>,
byte_offset: usize,
length: usize,
) -> Option<v8::Local<'s, Self::V8>> {
v8::Uint8Array::new(scope, buf, byte_offset, length)
}
}
impl V8Sliceable for u32 {
type V8 = v8::Uint32Array;
fn new_buf<'s>(
scope: &mut v8::HandleScope<'s>,
buf: v8::Local<v8::ArrayBuffer>,
byte_offset: usize,
length: usize,
) -> Option<v8::Local<'s, Self::V8>> {
v8::Uint32Array::new(scope, buf, byte_offset, length)
}
}
impl V8Sliceable for f64 {
type V8 = v8::Float64Array;
fn new_buf<'s>(
scope: &mut v8::HandleScope<'s>,
buf: v8::Local<v8::ArrayBuffer>,
byte_offset: usize,
length: usize,
) -> Option<v8::Local<'s, Self::V8>> {
v8::Float64Array::new(scope, buf, byte_offset, length)
}
}
#[derive(Clone)]
pub struct V8Slice<T>
where
T: V8Sliceable,
{
pub(crate) store: v8::SharedRef<v8::BackingStore>,
pub(crate) range: Range<usize>,
_phantom: PhantomData<T>,
}
impl<T> Debug for V8Slice<T>
where
T: V8Sliceable,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!(
"V8Slice({:?} of {} {})",
self.range,
self.store.len(),
std::any::type_name::<T>()
))
}
}
unsafe impl<T> Send for V8Slice<T> where T: V8Sliceable {}
impl<T> V8Slice<T>
where
T: V8Sliceable,
{
#[cfg(test)]
fn very_unsafe_new_only_for_test(byte_length: usize) -> Self {
static V8_ONCE: std::sync::Once = std::sync::Once::new();
V8_ONCE.call_once(|| {
let platform =
v8::new_unprotected_default_platform(0, false).make_shared();
v8::V8::initialize_platform(platform);
v8::V8::initialize();
});
let mut isolate = v8::Isolate::new(Default::default());
unsafe {
let ptr = v8::ArrayBuffer::new_backing_store(&mut isolate, byte_length);
std::mem::forget(isolate);
Self::from_parts(ptr.into(), 0..byte_length)
}
}
pub unsafe fn from_parts(
store: v8::SharedRef<v8::BackingStore>,
range: Range<usize>,
) -> Self {
Self {
store,
range: range.start / std::mem::size_of::<T>()
..range.end / std::mem::size_of::<T>(),
_phantom: PhantomData,
}
}
fn as_slice(&self) -> &[T] {
let store = &self.store;
let Some(ptr) = store.data() else {
return &[];
};
let clamped_end =
std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
let clamped_len = clamped_end.saturating_sub(self.range.start);
if clamped_len == 0 {
return &mut [];
}
let ptr = ptr.cast::<T>().as_ptr();
unsafe {
let ptr = ptr.add(self.range.start);
std::slice::from_raw_parts(ptr, clamped_len)
}
}
fn as_slice_mut(&mut self) -> &mut [T] {
let store = &self.store;
let Some(ptr) = store.data() else {
return &mut [];
};
let clamped_end =
std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
let clamped_len = clamped_end.saturating_sub(self.range.start);
if clamped_len == 0 {
return &mut [];
}
let ptr = ptr.cast::<T>().as_ptr();
unsafe {
let ptr = ptr.add(self.range.start);
std::slice::from_raw_parts_mut(ptr, clamped_len)
}
}
pub fn len(&self) -> usize {
let store = &self.store;
let clamped_end =
std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
clamped_end.saturating_sub(self.range.start)
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn to_vec(&self) -> Vec<T> {
self.as_slice().to_vec()
}
pub fn to_boxed_slice(&self) -> Box<[T]> {
self.to_vec().into_boxed_slice()
}
pub fn into_v8_local<'a>(
self,
scope: &mut v8::HandleScope<'a>,
) -> Option<v8::Local<'a, T::V8>> {
let (store, range) = self.into_parts();
let buffer = v8::ArrayBuffer::with_backing_store(scope, &store);
T::new_buf(
scope,
buffer,
range.start,
range.len() / std::mem::size_of::<T>(),
)
}
pub fn into_v8_unsliced_arraybuffer_local<'a>(
self,
scope: &mut v8::HandleScope<'a>,
) -> v8::Local<'a, v8::ArrayBuffer> {
let (store, _range) = self.into_parts();
v8::ArrayBuffer::with_backing_store(scope, &store)
}
pub fn into_parts(self) -> (v8::SharedRef<v8::BackingStore>, Range<usize>) {
(
self.store,
self.range.start * std::mem::size_of::<T>()
..self.range.end * std::mem::size_of::<T>(),
)
}
pub fn split_to(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len);
let offset = self.range.start;
let mut other = self.clone();
self.range = offset + at..offset + len;
other.range = offset..offset + at;
other
}
pub fn split_off(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len);
let offset = self.range.start;
let mut other = self.clone();
self.range = offset..offset + at;
other.range = offset + at..offset + len;
other
}
pub fn truncate(&mut self, len: usize) {
let offset = self.range.start;
self.range.end = std::cmp::min(offset + len, self.range.end)
}
}
pub(crate) fn to_ranged_buffer<'s>(
scope: &mut v8::HandleScope<'s>,
value: v8::Local<v8::Value>,
) -> Result<(v8::Local<'s, v8::ArrayBuffer>, Range<usize>), v8::DataError> {
if let Ok(view) = v8::Local::<v8::ArrayBufferView>::try_from(value) {
let (offset, len) = (view.byte_offset(), view.byte_length());
let buffer = view.buffer(scope).ok_or(v8::DataError::NoData {
expected: "view to have a buffer",
})?;
let buffer = v8::Local::new(scope, buffer); return Ok((buffer, offset..offset + len));
}
let b: v8::Local<v8::ArrayBuffer> = value.try_into()?;
let b = v8::Local::new(scope, b); Ok((b, 0..b.byte_length()))
}
impl<T> FromV8 for V8Slice<T>
where
T: V8Sliceable,
{
fn from_v8(
scope: &mut v8::HandleScope,
value: v8::Local<v8::Value>,
) -> Result<Self, crate::Error> {
match to_ranged_buffer(scope, value) {
Ok((b, range)) => {
let store = b.get_backing_store();
if store.is_resizable_by_user_javascript() {
Err(crate::Error::ResizableBackingStoreNotSupported)
} else if store.is_shared() {
Err(crate::Error::ExpectedBuffer(value.type_repr()))
} else {
Ok(unsafe { V8Slice::from_parts(store, range) })
}
}
Err(_) => Err(crate::Error::ExpectedBuffer(value.type_repr())),
}
}
}
impl<T> Deref for V8Slice<T>
where
T: V8Sliceable,
{
type Target = [T];
fn deref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> DerefMut for V8Slice<T>
where
T: V8Sliceable,
{
fn deref_mut(&mut self) -> &mut [T] {
self.as_slice_mut()
}
}
impl<T> AsRef<[T]> for V8Slice<T>
where
T: V8Sliceable,
{
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> AsMut<[T]> for V8Slice<T>
where
T: V8Sliceable,
{
fn as_mut(&mut self) -> &mut [T] {
self.as_slice_mut()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_slice<T: V8Sliceable>(len: usize) -> V8Slice<T> {
let slice = V8Slice::<T>::very_unsafe_new_only_for_test(
len * std::mem::size_of::<T>(),
);
assert_eq!(slice.len(), len);
slice
}
#[test]
pub fn test_split_off() {
test_split_off_generic::<u8>();
test_split_off_generic::<u32>();
}
pub fn test_split_off_generic<T: V8Sliceable>() {
let mut slice = make_slice::<T>(1024);
let mut other = slice.split_off(16);
assert_eq!(0..16, slice.range);
assert_eq!(16..1024, other.range);
let other2 = other.split_off(16);
assert_eq!(16..32, other.range);
assert_eq!(32..1024, other2.range);
}
#[test]
pub fn test_split_to() {
test_split_to_generic::<u8>();
test_split_to_generic::<u32>();
}
pub fn test_split_to_generic<T: V8Sliceable>() {
let mut slice = make_slice::<T>(1024);
let other = slice.split_to(16);
assert_eq!(16..1024, slice.range);
assert_eq!(0..16, other.range);
let other2 = slice.split_to(16);
assert_eq!(32..1024, slice.range);
assert_eq!(16..32, other2.range);
}
#[test]
pub fn test_truncate() {
test_truncate_generic::<u8>();
test_truncate_generic::<u32>();
}
pub fn test_truncate_generic<T: V8Sliceable>() {
let mut slice = make_slice::<T>(1024);
slice.truncate(16);
assert_eq!(0..16, slice.range);
}
#[test]
fn test_truncate_after_split() {
test_truncate_after_split_generic::<u8>();
test_truncate_after_split_generic::<u32>();
}
pub fn test_truncate_after_split_generic<T: V8Sliceable>() {
let mut slice = make_slice::<T>(1024);
_ = slice.split_to(16);
assert_eq!(16..1024, slice.range);
slice.truncate(16);
assert_eq!(16..32, slice.range);
}
}