1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.

use std::fmt::Debug;
use std::marker::PhantomData;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Range;

use super::transl8::FromV8;

/// A type that may be represented as a [`V8Slice`].
pub trait V8Sliceable: Copy + Clone {
  /// The concrete V8 data view type.
  type V8;
  fn new_buf<'s>(
    scope: &mut v8::HandleScope<'s>,
    buf: v8::Local<v8::ArrayBuffer>,
    byte_offset: usize,
    length: usize,
  ) -> Option<v8::Local<'s, Self::V8>>;
}

impl V8Sliceable for u8 {
  type V8 = v8::Uint8Array;
  fn new_buf<'s>(
    scope: &mut v8::HandleScope<'s>,
    buf: v8::Local<v8::ArrayBuffer>,
    byte_offset: usize,
    length: usize,
  ) -> Option<v8::Local<'s, Self::V8>> {
    v8::Uint8Array::new(scope, buf, byte_offset, length)
  }
}

impl V8Sliceable for u32 {
  type V8 = v8::Uint32Array;
  fn new_buf<'s>(
    scope: &mut v8::HandleScope<'s>,
    buf: v8::Local<v8::ArrayBuffer>,
    byte_offset: usize,
    length: usize,
  ) -> Option<v8::Local<'s, Self::V8>> {
    v8::Uint32Array::new(scope, buf, byte_offset, length)
  }
}

impl V8Sliceable for f64 {
  type V8 = v8::Float64Array;
  fn new_buf<'s>(
    scope: &mut v8::HandleScope<'s>,
    buf: v8::Local<v8::ArrayBuffer>,
    byte_offset: usize,
    length: usize,
  ) -> Option<v8::Local<'s, Self::V8>> {
    v8::Float64Array::new(scope, buf, byte_offset, length)
  }
}

/// A V8Slice encapsulates a slice that's been borrowed from a JavaScript
/// ArrayBuffer object. JavaScript objects can normally be garbage collected,
/// but the existence of a V8Slice inhibits this until it is dropped. It
/// behaves much like an Arc<[u8]>.
///
/// # Cloning
/// Cloning a V8Slice does not clone the contents of the buffer,
/// it creates a new reference to that buffer.
///
/// To actually clone the contents of the buffer do
/// `let copy = Vec::from(&*zero_copy_buf);`
#[derive(Clone)]
pub struct V8Slice<T>
where
  T: V8Sliceable,
{
  pub(crate) store: v8::SharedRef<v8::BackingStore>,
  pub(crate) range: Range<usize>,
  _phantom: PhantomData<T>,
}

impl<T> Debug for V8Slice<T>
where
  T: V8Sliceable,
{
  fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
    f.write_fmt(format_args!(
      "V8Slice({:?} of {} {})",
      self.range,
      self.store.len(),
      std::any::type_name::<T>()
    ))
  }
}

// SAFETY: unsafe trait must have unsafe implementation
unsafe impl<T> Send for V8Slice<T> where T: V8Sliceable {}

impl<T> V8Slice<T>
where
  T: V8Sliceable,
{
  /// Create one of these for testing. We create and forget an isolate here. If we decide to perform more v8-requiring tests,
  /// this code will probably need to be hoisted to another location.
  #[cfg(test)]
  fn very_unsafe_new_only_for_test(byte_length: usize) -> Self {
    static V8_ONCE: std::sync::Once = std::sync::Once::new();

    V8_ONCE.call_once(|| {
      let platform =
        v8::new_unprotected_default_platform(0, false).make_shared();
      v8::V8::initialize_platform(platform);
      v8::V8::initialize();
    });

    let mut isolate = v8::Isolate::new(Default::default());
    // SAFETY: This is not safe in any way whatsoever, but it's only for testing non-buffer functions.
    unsafe {
      let ptr = v8::ArrayBuffer::new_backing_store(&mut isolate, byte_length);
      std::mem::forget(isolate);
      Self::from_parts(ptr.into(), 0..byte_length)
    }
  }

  /// Create a V8Slice from raw parts.
  ///
  /// # Safety
  ///
  /// The `range` passed to this function *must* be within the bounds of the backing store, as we may
  /// create a slice from this. The [`v8::BackingStore`] must be valid, and valid for use for the purposes
  /// of this `V8Slice` (ie: the caller must understand the repercussions of using shared/resizable
  /// buffers).
  pub unsafe fn from_parts(
    store: v8::SharedRef<v8::BackingStore>,
    range: Range<usize>,
  ) -> Self {
    Self {
      store,
      range: range.start / std::mem::size_of::<T>()
        ..range.end / std::mem::size_of::<T>(),
      _phantom: PhantomData,
    }
  }

  fn as_slice(&self) -> &[T] {
    let store = &self.store;
    let Some(ptr) = store.data() else {
      return &[];
    };
    let clamped_end =
      std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
    let clamped_len = clamped_end.saturating_sub(self.range.start);
    if clamped_len == 0 {
      return &mut [];
    }
    let ptr = ptr.cast::<T>().as_ptr();
    // SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>,
    // it points to a fixed continuous slice of bytes on the heap.
    // We assume it's initialized and thus safe to read (though may not contain
    // meaningful data).
    // Note that we are likely violating Rust's safety rules here by assuming
    // nobody is mutating this buffer elsewhere, however in practice V8Slices
    // do not have overlapping read/write phases.
    unsafe {
      let ptr = ptr.add(self.range.start);
      std::slice::from_raw_parts(ptr, clamped_len)
    }
  }

  fn as_slice_mut(&mut self) -> &mut [T] {
    let store = &self.store;
    let Some(ptr) = store.data() else {
      return &mut [];
    };
    let clamped_end =
      std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
    let clamped_len = clamped_end.saturating_sub(self.range.start);
    if clamped_len == 0 {
      return &mut [];
    }
    let ptr = ptr.cast::<T>().as_ptr();
    // SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>,
    // it points to a fixed continuous slice of bytes on the heap.
    // We assume it's initialized and thus safe to read (though may not contain
    // meaningful data).
    // Note that we are likely violating Rust's safety rules here by assuming
    // nobody is mutating this buffer elsewhere, however in practice V8Slices
    // do not have overlapping read/write phases.
    unsafe {
      let ptr = ptr.add(self.range.start);
      std::slice::from_raw_parts_mut(ptr, clamped_len)
    }
  }

  /// Returns the underlying length of the range of this slice. If the range of this slice would exceed the range
  /// of the underlying backing store, the range is clamped so that it falls within the underlying backing store's
  /// valid length.
  pub fn len(&self) -> usize {
    let store = &self.store;
    let clamped_end =
      std::cmp::min(self.range.end, store.len() / std::mem::size_of::<T>());
    clamped_end.saturating_sub(self.range.start)
  }

  /// Returns whether this slice is empty. See `len` for notes about how the length is treated when the range of this
  /// slice exceeds that of the underlying backing store.
  pub fn is_empty(&self) -> bool {
    self.len() == 0
  }

  /// Create a [`Vec<T>`] copy of this slice data.
  pub fn to_vec(&self) -> Vec<T> {
    self.as_slice().to_vec()
  }

  /// Create a [`Box<[T]>`] copy of this slice data.
  pub fn to_boxed_slice(&self) -> Box<[T]> {
    self.to_vec().into_boxed_slice()
  }

  /// Takes this slice and converts it into a strongly-typed v8 array.
  pub fn into_v8_local<'a>(
    self,
    scope: &mut v8::HandleScope<'a>,
  ) -> Option<v8::Local<'a, T::V8>> {
    let (store, range) = self.into_parts();
    let buffer = v8::ArrayBuffer::with_backing_store(scope, &store);
    T::new_buf(
      scope,
      buffer,
      range.start,
      range.len() / std::mem::size_of::<T>(),
    )
  }

  /// Takes this slice and converts it into a strongly-typed v8 array, ignoring the underlying range.
  pub fn into_v8_unsliced_arraybuffer_local<'a>(
    self,
    scope: &mut v8::HandleScope<'a>,
  ) -> v8::Local<'a, v8::ArrayBuffer> {
    let (store, _range) = self.into_parts();
    v8::ArrayBuffer::with_backing_store(scope, &store)
  }

  /// Returns the slice to the parts it came from.
  pub fn into_parts(self) -> (v8::SharedRef<v8::BackingStore>, Range<usize>) {
    (
      self.store,
      self.range.start * std::mem::size_of::<T>()
        ..self.range.end * std::mem::size_of::<T>(),
    )
  }

  /// Splits the buffer into two at the given index.
  ///
  /// Afterwards `self` contains elements `[at, len)`, and the returned `V8Slice` contains elements `[0, at)`.
  ///
  /// # Panics
  ///
  /// Panics if `at > len`.
  pub fn split_to(&mut self, at: usize) -> Self {
    let len = self.len();
    assert!(at <= len);
    let offset = self.range.start;
    let mut other = self.clone();
    self.range = offset + at..offset + len;
    other.range = offset..offset + at;
    other
  }

  /// Splits the buffer into two at the given index.
  ///
  /// Afterwards `self` contains elements `[0, at)`, and the returned `V8Slice` contains elements `[at, len)`.
  ///
  /// # Panics
  ///
  /// Panics if `at > len`.
  pub fn split_off(&mut self, at: usize) -> Self {
    let len = self.len();
    assert!(at <= len);
    let offset = self.range.start;
    let mut other = self.clone();
    self.range = offset..offset + at;
    other.range = offset + at..offset + len;
    other
  }

  /// Shortens the buffer, keeping the first `len` bytes and dropping the rest.
  ///
  /// If `len` is greater than the buffer's current length, this has no effect.
  pub fn truncate(&mut self, len: usize) {
    let offset = self.range.start;
    self.range.end = std::cmp::min(offset + len, self.range.end)
  }
}

pub(crate) fn to_ranged_buffer<'s>(
  scope: &mut v8::HandleScope<'s>,
  value: v8::Local<v8::Value>,
) -> Result<(v8::Local<'s, v8::ArrayBuffer>, Range<usize>), v8::DataError> {
  if let Ok(view) = v8::Local::<v8::ArrayBufferView>::try_from(value) {
    let (offset, len) = (view.byte_offset(), view.byte_length());
    let buffer = view.buffer(scope).ok_or(v8::DataError::NoData {
      expected: "view to have a buffer",
    })?;
    let buffer = v8::Local::new(scope, buffer); // recreate handle to avoid lifetime issues
    return Ok((buffer, offset..offset + len));
  }
  let b: v8::Local<v8::ArrayBuffer> = value.try_into()?;
  let b = v8::Local::new(scope, b); // recreate handle to avoid lifetime issues
  Ok((b, 0..b.byte_length()))
}

impl<T> FromV8 for V8Slice<T>
where
  T: V8Sliceable,
{
  fn from_v8(
    scope: &mut v8::HandleScope,
    value: v8::Local<v8::Value>,
  ) -> Result<Self, crate::Error> {
    match to_ranged_buffer(scope, value) {
      Ok((b, range)) => {
        let store = b.get_backing_store();
        if store.is_resizable_by_user_javascript() {
          Err(crate::Error::ResizableBackingStoreNotSupported)
        } else if store.is_shared() {
          Err(crate::Error::ExpectedBuffer(value.type_repr()))
        } else {
          // SAFETY: we got these parts from to_ranged_buffer
          Ok(unsafe { V8Slice::from_parts(store, range) })
        }
      }
      Err(_) => Err(crate::Error::ExpectedBuffer(value.type_repr())),
    }
  }
}

impl<T> Deref for V8Slice<T>
where
  T: V8Sliceable,
{
  type Target = [T];
  fn deref(&self) -> &[T] {
    self.as_slice()
  }
}

impl<T> DerefMut for V8Slice<T>
where
  T: V8Sliceable,
{
  fn deref_mut(&mut self) -> &mut [T] {
    self.as_slice_mut()
  }
}

impl<T> AsRef<[T]> for V8Slice<T>
where
  T: V8Sliceable,
{
  fn as_ref(&self) -> &[T] {
    self.as_slice()
  }
}

impl<T> AsMut<[T]> for V8Slice<T>
where
  T: V8Sliceable,
{
  fn as_mut(&mut self) -> &mut [T] {
    self.as_slice_mut()
  }
}

#[cfg(test)]
mod tests {
  use super::*;

  fn make_slice<T: V8Sliceable>(len: usize) -> V8Slice<T> {
    let slice = V8Slice::<T>::very_unsafe_new_only_for_test(
      len * std::mem::size_of::<T>(),
    );
    assert_eq!(slice.len(), len);
    slice
  }

  #[test]
  pub fn test_split_off() {
    test_split_off_generic::<u8>();
    test_split_off_generic::<u32>();
  }

  pub fn test_split_off_generic<T: V8Sliceable>() {
    let mut slice = make_slice::<T>(1024);
    let mut other = slice.split_off(16);
    assert_eq!(0..16, slice.range);
    assert_eq!(16..1024, other.range);
    let other2 = other.split_off(16);
    assert_eq!(16..32, other.range);
    assert_eq!(32..1024, other2.range);
  }

  #[test]
  pub fn test_split_to() {
    test_split_to_generic::<u8>();
    test_split_to_generic::<u32>();
  }

  pub fn test_split_to_generic<T: V8Sliceable>() {
    let mut slice = make_slice::<T>(1024);
    let other = slice.split_to(16);
    assert_eq!(16..1024, slice.range);
    assert_eq!(0..16, other.range);
    let other2 = slice.split_to(16);
    assert_eq!(32..1024, slice.range);
    assert_eq!(16..32, other2.range);
  }

  #[test]
  pub fn test_truncate() {
    test_truncate_generic::<u8>();
    test_truncate_generic::<u32>();
  }

  pub fn test_truncate_generic<T: V8Sliceable>() {
    let mut slice = make_slice::<T>(1024);
    slice.truncate(16);
    assert_eq!(0..16, slice.range);
  }

  #[test]
  fn test_truncate_after_split() {
    test_truncate_after_split_generic::<u8>();
    test_truncate_after_split_generic::<u32>();
  }

  pub fn test_truncate_after_split_generic<T: V8Sliceable>() {
    let mut slice = make_slice::<T>(1024);
    _ = slice.split_to(16);
    assert_eq!(16..1024, slice.range);
    slice.truncate(16);
    assert_eq!(16..32, slice.range);
  }
}