1use std::ops::{Deref, Range, RangeBounds};
2use std::sync::LazyLock;
3
4use bytemuck::{Pod, Zeroable};
5use either::Either;
6
7use crate::storage::SharedStorage;
8
9pub struct Buffer<T> {
39 storage: SharedStorage<T>,
41
42 ptr: *const T,
44
45 length: usize,
47}
48
49impl<T> Clone for Buffer<T> {
50 fn clone(&self) -> Self {
51 Self {
52 storage: self.storage.clone(),
53 ptr: self.ptr,
54 length: self.length,
55 }
56 }
57}
58
59unsafe impl<T: Send + Sync> Sync for Buffer<T> {}
60unsafe impl<T: Send + Sync> Send for Buffer<T> {}
61
62impl<T: PartialEq> PartialEq for Buffer<T> {
63 #[inline]
64 fn eq(&self, other: &Self) -> bool {
65 self.deref() == other.deref()
66 }
67}
68
69impl<T: Eq> Eq for Buffer<T> {}
70
71impl<T: std::hash::Hash> std::hash::Hash for Buffer<T> {
72 #[inline]
73 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
74 self.as_slice().hash(state);
75 }
76}
77
78impl<T: std::fmt::Debug> std::fmt::Debug for Buffer<T> {
79 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
80 std::fmt::Debug::fmt(&**self, f)
81 }
82}
83
84impl<T> Default for Buffer<T> {
85 #[inline]
86 fn default() -> Self {
87 Self::new()
88 }
89}
90
91impl<T> Buffer<T> {
92 #[inline]
94 pub const fn new() -> Self {
95 Self::from_storage(SharedStorage::empty())
96 }
97
98 pub const fn from_storage(storage: SharedStorage<T>) -> Self {
100 let ptr = storage.as_ptr();
101 let length = storage.len();
102 Buffer {
103 storage,
104 ptr,
105 length,
106 }
107 }
108
109 pub fn from_static(data: &'static [T]) -> Self {
111 Self::from_storage(SharedStorage::from_static(data))
112 }
113
114 pub fn from_vec(data: Vec<T>) -> Self {
116 Self::from_storage(SharedStorage::from_vec(data))
117 }
118
119 pub fn from_owner<O: Send + AsRef<[T]> + 'static>(owner: O) -> Self {
121 Self::from_storage(SharedStorage::from_owner(owner))
122 }
123
124 pub fn with_slice<R, F: FnOnce(Buffer<T>) -> R>(slice: &[T], f: F) -> R {
128 SharedStorage::with_slice(slice, |ss| f(Self::from_storage(ss)))
129 }
130
131 pub fn with_vec<R, F: FnOnce(Buffer<T>) -> R>(vec: &mut Vec<T>, f: F) -> R {
136 SharedStorage::with_vec(vec, |ss| f(Self::from_storage(ss)))
137 }
138
139 pub fn into_storage(self) -> SharedStorage<T> {
141 self.storage
142 }
143
144 #[inline]
146 pub fn len(&self) -> usize {
147 self.length
148 }
149
150 #[inline]
152 pub fn is_empty(&self) -> bool {
153 self.length == 0
154 }
155
156 pub fn is_sliced(&self) -> bool {
160 self.storage.len() != self.length
161 }
162
163 pub fn expand_end_to_storage(self) -> Self {
167 unsafe {
168 let offset = self.ptr.offset_from(self.storage.as_ptr()) as usize;
169 Self {
170 ptr: self.ptr,
171 length: self.storage.len() - offset,
172 storage: self.storage,
173 }
174 }
175 }
176
177 #[inline]
179 pub fn as_slice(&self) -> &[T] {
180 debug_assert!(self.offset() + self.length <= self.storage.len());
182 unsafe { std::slice::from_raw_parts(self.ptr, self.length) }
183 }
184
185 #[inline]
191 #[must_use]
192 pub fn sliced<R: RangeBounds<usize>>(mut self, range: R) -> Self {
193 self.slice_in_place(range);
194 self
195 }
196
197 #[inline]
203 #[must_use]
204 pub unsafe fn sliced_unchecked<R: RangeBounds<usize>>(mut self, range: R) -> Self {
205 unsafe {
206 self.slice_in_place_unchecked(range);
207 }
208 self
209 }
210
211 #[inline]
216 pub fn slice_in_place<R: RangeBounds<usize>>(&mut self, range: R) {
217 unsafe {
218 let Range { start, end } = crate::check_range(range, ..self.len());
219 self.ptr = self.ptr.add(start);
220 self.length = end - start;
221 }
222 }
223
224 #[inline]
229 pub unsafe fn slice_in_place_unchecked<R: RangeBounds<usize>>(&mut self, range: R) {
230 unsafe {
231 let Range { start, end } = crate::decode_range_unchecked(range, ..self.len());
232 self.ptr = self.ptr.add(start);
233 self.length = end - start;
234 }
235 }
236
237 #[inline]
239 pub fn storage_ptr(&self) -> *const T {
240 self.storage.as_ptr()
241 }
242
243 #[inline]
245 pub fn offset(&self) -> usize {
246 unsafe {
247 let ret = self.ptr.offset_from(self.storage.as_ptr()) as usize;
248 debug_assert!(ret <= self.storage.len());
249 ret
250 }
251 }
252
253 #[inline]
256 pub unsafe fn set_len(&mut self, len: usize) {
257 self.length = len;
258 }
259
260 #[inline]
266 pub fn into_mut(mut self) -> Either<Self, Vec<T>> {
267 if self.is_sliced() {
269 return Either::Left(self);
270 }
271 match self.storage.try_into_vec() {
272 Ok(v) => Either::Right(v),
273 Err(slf) => {
274 self.storage = slf;
275 Either::Left(self)
276 },
277 }
278 }
279
280 #[inline]
286 pub fn get_mut_slice(&mut self) -> Option<&mut [T]> {
287 let offset = self.offset();
288 let slice = self.storage.try_as_mut_slice()?;
289 Some(unsafe { slice.get_unchecked_mut(offset..offset + self.length) })
290 }
291
292 pub fn storage_refcount(&self) -> u64 {
295 self.storage.refcount()
296 }
297
298 pub fn is_same_buffer(&self, other: &Self) -> bool {
300 self.ptr == other.ptr && self.length == other.length
301 }
302}
303
304impl<T: Pod> Buffer<T> {
305 pub fn try_transmute<U: Pod>(mut self) -> Result<Buffer<U>, Self> {
306 assert_ne!(size_of::<U>(), 0);
307 let ptr = self.ptr as *const U;
308 let length = self.length;
309 match self.storage.try_transmute() {
310 Err(v) => {
311 self.storage = v;
312 Err(self)
313 },
314 Ok(storage) => Ok(Buffer {
315 storage,
316 ptr,
317 length: length.checked_mul(size_of::<T>()).expect("overflow") / size_of::<U>(),
318 }),
319 }
320 }
321}
322
323impl<T: Clone> Buffer<T> {
324 pub fn to_vec(self) -> Vec<T> {
325 match self.into_mut() {
326 Either::Right(v) => v,
327 Either::Left(same) => same.as_slice().to_vec(),
328 }
329 }
330}
331
332#[repr(C, align(4096))]
333#[derive(Copy, Clone)]
334struct Aligned([u8; 4096]);
335
336const GLOBAL_ZERO_SIZE: usize = 8 * 1024 * 1024;
339static GLOBAL_ZEROES: LazyLock<SharedStorage<Aligned>> = LazyLock::new(|| {
340 assert!(GLOBAL_ZERO_SIZE.is_multiple_of(size_of::<Aligned>()));
341 let chunks = GLOBAL_ZERO_SIZE / size_of::<Aligned>();
342 let v = vec![Aligned([0; _]); chunks];
343 let mut ss = SharedStorage::from_vec(v);
344 ss.leak();
345 ss
346});
347
348impl<T: Zeroable> Buffer<T> {
349 pub fn zeroed(length: usize) -> Self {
350 let bytes_needed = length * size_of::<T>();
351 if align_of::<T>() <= align_of::<Aligned>() && bytes_needed <= GLOBAL_ZERO_SIZE {
352 unsafe {
353 let storage = GLOBAL_ZEROES.clone().transmute_unchecked::<T>();
355 let ptr = storage.as_ptr();
356 Buffer {
357 storage,
358 ptr,
359 length,
360 }
361 }
362 } else {
363 bytemuck::zeroed_vec(length).into()
364 }
365 }
366}
367
368impl<T> From<Vec<T>> for Buffer<T> {
369 #[inline]
370 fn from(v: Vec<T>) -> Self {
371 Self::from_vec(v)
372 }
373}
374
375impl<T> Deref for Buffer<T> {
376 type Target = [T];
377
378 #[inline(always)]
379 fn deref(&self) -> &[T] {
380 self.as_slice()
381 }
382}
383
384impl<T> AsRef<[T]> for Buffer<T> {
385 #[inline(always)]
386 fn as_ref(&self) -> &[T] {
387 self.as_slice()
388 }
389}
390
391impl<T> FromIterator<T> for Buffer<T> {
392 #[inline]
393 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
394 Vec::from_iter(iter).into()
395 }
396}
397
398#[cfg(feature = "serde")]
399mod _serde_impl {
400 use serde::{Deserialize, Serialize};
401
402 use super::Buffer;
403
404 impl<T> Serialize for Buffer<T>
405 where
406 T: Serialize,
407 {
408 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
409 where
410 S: serde::Serializer,
411 {
412 <[T] as Serialize>::serialize(self.as_slice(), serializer)
413 }
414 }
415
416 impl<'de, T> Deserialize<'de> for Buffer<T>
417 where
418 T: Deserialize<'de>,
419 {
420 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
421 where
422 D: serde::Deserializer<'de>,
423 {
424 <Vec<T> as Deserialize>::deserialize(deserializer).map(Buffer::from)
425 }
426 }
427}
428
429impl<T: Copy> IntoIterator for Buffer<T> {
430 type Item = T;
431
432 type IntoIter = IntoIter<T>;
433
434 fn into_iter(self) -> Self::IntoIter {
435 IntoIter::new(self)
436 }
437}
438
439#[derive(Debug, Clone)]
441pub struct IntoIter<T: Copy> {
442 values: Buffer<T>,
443 index: usize,
444 end: usize,
445}
446
447impl<T: Copy> IntoIter<T> {
448 #[inline]
449 fn new(values: Buffer<T>) -> Self {
450 let end = values.len();
451 Self {
452 values,
453 index: 0,
454 end,
455 }
456 }
457}
458
459impl<T: Copy> Iterator for IntoIter<T> {
460 type Item = T;
461
462 #[inline]
463 fn next(&mut self) -> Option<Self::Item> {
464 if self.index == self.end {
465 return None;
466 }
467 let old = self.index;
468 self.index += 1;
469 Some(*unsafe { self.values.get_unchecked(old) })
470 }
471
472 #[inline]
473 fn size_hint(&self) -> (usize, Option<usize>) {
474 (self.end - self.index, Some(self.end - self.index))
475 }
476
477 #[inline]
478 fn nth(&mut self, n: usize) -> Option<Self::Item> {
479 let new_index = self.index + n;
480 if new_index > self.end {
481 self.index = self.end;
482 None
483 } else {
484 self.index = new_index;
485 self.next()
486 }
487 }
488}
489
490impl<T: Copy> DoubleEndedIterator for IntoIter<T> {
491 #[inline]
492 fn next_back(&mut self) -> Option<Self::Item> {
493 if self.index == self.end {
494 None
495 } else {
496 self.end -= 1;
497 Some(*unsafe { self.values.get_unchecked(self.end) })
498 }
499 }
500}
501
502impl<T: Copy> ExactSizeIterator for IntoIter<T> {}