1use crate::num::NonMaxUsize;
16
17use super::{single_or_vec, AllocPtr, AllocSlice, AllocationError, IAlloc};
18use core::fmt::Debug;
19use core::ptr::NonNull;
20
21mod seal {
22 use super::*;
23 #[crate::stabby]
24 pub struct VecInner<T, Alloc: IAlloc> {
25 pub(crate) start: AllocPtr<T, Alloc>,
26 pub(crate) end: NonNull<T>,
27 pub(crate) capacity: NonNull<T>,
28 pub(crate) alloc: Alloc,
29 }
30 unsafe impl<T: Send, Alloc: IAlloc + Send> Send for VecInner<T, Alloc> where
32 crate::alloc::boxed::BoxedSlice<T, Alloc>: Send
33 {
34 }
35 unsafe impl<T: Sync, Alloc: IAlloc + Sync> Sync for VecInner<T, Alloc> where
37 crate::alloc::boxed::BoxedSlice<T, Alloc>: Sync
38 {
39 }
40}
41pub(crate) use seal::*;
42
43#[crate::stabby]
45pub struct Vec<T, Alloc: IAlloc = super::DefaultAllocator> {
46 pub(crate) inner: VecInner<T, Alloc>,
47}
48
49pub(crate) const fn ptr_diff<T>(lhs: NonNull<T>, rhs: NonNull<T>) -> usize {
50 let diff = if core::mem::size_of::<T>() == 0 {
51 unsafe { lhs.as_ptr().cast::<u8>().offset_from(rhs.as_ptr().cast()) }
52 } else {
53 unsafe { lhs.as_ptr().offset_from(rhs.as_ptr()) }
54 };
55 debug_assert!(diff >= 0);
56 diff as usize
57}
58pub(crate) const fn ptr_add<T>(lhs: NonNull<T>, rhs: usize) -> NonNull<T> {
59 if core::mem::size_of::<T>() == 0 {
60 unsafe { NonNull::new_unchecked(lhs.as_ptr().cast::<u8>().add(rhs)).cast() }
61 } else {
62 unsafe { NonNull::new_unchecked(lhs.as_ptr().add(rhs)) }
63 }
64}
65
66#[cfg(not(stabby_default_alloc = "disabled"))]
67impl<T> Vec<T> {
68 pub const fn new() -> Self {
70 Self::new_in(super::DefaultAllocator::new())
71 }
72}
73impl<T, Alloc: IAlloc> Vec<T, Alloc> {
74 pub const fn new_in(alloc: Alloc) -> Self {
76 let start = AllocPtr::dangling();
77 Self {
78 inner: VecInner {
79 start,
80 end: start.ptr,
81 capacity: if Self::zst_mode() {
82 unsafe { core::mem::transmute::<usize, NonNull<T>>(usize::MAX) }
83 } else {
84 start.ptr
85 },
86 alloc,
87 },
88 }
89 }
90 pub fn with_capacity_in(capacity: usize, alloc: Alloc) -> Self {
95 let mut this = Self::new_in(alloc);
96 this.reserve(capacity);
97 this
98 }
99 pub fn with_capacity(capacity: usize) -> Self
104 where
105 Alloc: Default,
106 {
107 Self::with_capacity_in(capacity, Alloc::default())
108 }
109 pub fn try_with_capacity_in(capacity: usize, alloc: Alloc) -> Result<Self, Alloc> {
113 let mut this = Self::new_in(alloc);
114 match this.try_reserve(capacity) {
115 Ok(_) => Ok(this),
116 Err(_) => Err(this.into_raw_components().2),
117 }
118 }
119 pub fn try_with_capacity(capacity: usize) -> Result<Self, Alloc>
123 where
124 Alloc: Default,
125 {
126 Self::try_with_capacity_in(capacity, Alloc::default())
127 }
128 #[inline(always)]
129 const fn zst_mode() -> bool {
130 core::mem::size_of::<T>() == 0
131 }
132 pub const fn len(&self) -> usize {
134 ptr_diff(self.inner.end, self.inner.start.ptr)
135 }
136 pub const fn is_empty(&self) -> bool {
138 self.len() == 0
139 }
140 #[rustversion::attr(since(1.86), const)]
144 pub unsafe fn set_len(&mut self, len: usize) {
145 self.inner.end = ptr_add(self.inner.start.ptr, len);
146 }
147 pub fn push(&mut self, value: T) {
152 if self.inner.end == self.inner.capacity {
153 self.grow();
154 }
155 unsafe { self.inner.end.as_ptr().write(value) }
156 self.inner.end = ptr_add(self.inner.end, 1)
157 }
158 pub fn try_push(&mut self, value: T) -> Result<(), T> {
166 if self.inner.end == self.inner.capacity && self.try_grow().is_err() {
167 return Err(value);
168 }
169 unsafe { self.inner.end.as_ptr().write(value) }
170 self.inner.end = ptr_add(self.inner.end, 1);
171 Ok(())
172 }
173 pub const fn capacity(&self) -> usize {
175 ptr_diff(self.inner.capacity, self.inner.start.ptr)
176 }
177 pub const fn remaining_capacity(&self) -> usize {
179 ptr_diff(self.inner.capacity, self.inner.end)
180 }
181 const FIRST_CAPACITY: usize = match 1024 / core::mem::size_of::<T>() {
182 0 => 1,
183 v @ 1..=8 => v,
184 _ => 8,
185 };
186 fn grow(&mut self) {
187 self.try_grow().unwrap();
188 }
189 fn try_grow(&mut self) -> Result<NonMaxUsize, AllocationError> {
190 if self.capacity() == 0 {
191 let first_capacity = Self::FIRST_CAPACITY;
192 self.try_reserve(first_capacity)
193 } else {
194 self.try_reserve((self.capacity() >> 1).max(1))
195 }
196 }
197 pub fn reserve(&mut self, additional: usize) {
204 self.try_reserve(additional).unwrap();
205 }
206 pub fn try_reserve(&mut self, additional: usize) -> Result<NonMaxUsize, AllocationError> {
214 if self.remaining_capacity() < additional {
215 let len = self.len();
216 let new_capacity = len + additional;
217 let old_capacity = self.capacity();
218 let start = if old_capacity != 0 {
219 unsafe {
220 self.inner
221 .start
222 .realloc(&mut self.inner.alloc, old_capacity, new_capacity)
223 }
224 } else {
225 AllocPtr::alloc_array(&mut self.inner.alloc, new_capacity)
226 };
227 let Some(start) = start else {
228 return Err(AllocationError());
229 };
230 let end = ptr_add(*start, len);
231 let capacity = ptr_add(*start, new_capacity);
232 self.inner.start = start;
233 self.inner.end = end;
234 self.inner.capacity = capacity;
235 Ok(unsafe { NonMaxUsize::new_unchecked(new_capacity) })
236 } else {
237 let mut capacity = self.capacity();
238 if capacity == usize::MAX {
239 capacity -= 1;
240 }
241 Ok(unsafe { NonMaxUsize::new_unchecked(capacity) })
242 }
243 }
244 pub fn truncate(&mut self, len: usize) {
248 if self.len() <= len {
249 return;
250 }
251 unsafe {
252 core::ptr::drop_in_place(&mut self[len..]);
253 self.set_len(len)
254 };
255 }
256 #[rustversion::attr(since(1.86), const)]
258 pub fn as_slice(&self) -> &[T] {
259 let start = self.inner.start;
260 let end = self.inner.end;
261 unsafe { core::slice::from_raw_parts(start.ptr.as_ptr(), ptr_diff(end, start.ptr)) }
262 }
263 #[rustversion::attr(since(1.86), const)]
265 pub fn as_slice_mut(&mut self) -> &mut [T] {
266 let start = self.inner.start;
267 let end = self.inner.end;
268 unsafe { core::slice::from_raw_parts_mut(start.ptr.as_ptr(), ptr_diff(end, start.ptr)) }
269 }
270 pub(crate) fn into_raw_components(self) -> (AllocSlice<T, Alloc>, usize, Alloc) {
271 let VecInner {
272 start,
273 end,
274 capacity: _,
275 alloc,
276 } = unsafe { core::ptr::read(&self.inner) };
277 let capacity = if core::mem::size_of::<T>() == 0 {
278 0
279 } else {
280 self.capacity()
281 };
282 core::mem::forget(self);
283 (AllocSlice { start, end }, capacity, alloc)
284 }
285 pub fn copy_extend(&mut self, slice: &[T])
290 where
291 T: Copy,
292 {
293 self.try_copy_extend(slice).unwrap();
294 }
295 pub fn try_copy_extend(&mut self, slice: &[T]) -> Result<(), AllocationError>
300 where
301 T: Copy,
302 {
303 if slice.is_empty() {
304 return Ok(());
305 }
306 self.try_reserve(slice.len())?;
307 unsafe {
308 core::ptr::copy_nonoverlapping(slice.as_ptr(), self.inner.end.as_ptr(), slice.len());
309 self.set_len(self.len() + slice.len());
310 }
311 Ok(())
312 }
313 pub fn iter(&self) -> core::slice::Iter<'_, T> {
315 self.into_iter()
316 }
317 pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
319 self.into_iter()
320 }
321 pub fn drain<R: core::ops::RangeBounds<usize>>(&mut self, range: R) -> Drain<'_, T, Alloc> {
332 let original_len = self.len();
333 let from = match range.start_bound() {
334 core::ops::Bound::Included(i) => *i,
335 core::ops::Bound::Excluded(i) => *i + 1,
336 core::ops::Bound::Unbounded => 0,
337 };
338 let to = match range.end_bound() {
339 core::ops::Bound::Included(i) => *i + 1,
340 core::ops::Bound::Excluded(i) => *i,
341 core::ops::Bound::Unbounded => original_len,
342 };
343 assert!(to >= from);
344 assert!(to <= original_len);
345 unsafe { self.set_len(from) };
346 Drain {
347 vec: self,
348 from,
349 to,
350 index: from,
351 original_len,
352 }
353 }
354 pub fn try_drain<R: core::ops::RangeBounds<usize>>(
362 &mut self,
363 range: R,
364 ) -> Option<Drain<'_, T, Alloc>> {
365 let original_len = self.len();
366 let from = match range.start_bound() {
367 core::ops::Bound::Included(i) => *i,
368 core::ops::Bound::Excluded(i) => *i + 1,
369 core::ops::Bound::Unbounded => 0,
370 };
371 let to = match range.end_bound() {
372 core::ops::Bound::Included(i) => *i + 1,
373 core::ops::Bound::Excluded(i) => *i,
374 core::ops::Bound::Unbounded => original_len,
375 };
376 if to >= from || to <= original_len {
377 return None;
378 }
379 unsafe { self.set_len(from) };
380 Some(Drain {
381 vec: self,
382 from,
383 to,
384 index: from,
385 original_len,
386 })
387 }
388 #[rustversion::attr(since(1.86), const)]
390 pub fn remove(&mut self, index: usize) -> Option<T> {
391 if index < self.len() {
392 unsafe {
393 let value = self.inner.start.ptr.as_ptr().add(index).read();
394 core::ptr::copy(
395 self.inner.start.ptr.as_ptr().add(index + 1),
396 self.inner.start.ptr.as_ptr().add(index),
397 self.len() - (index + 1),
398 );
399 self.set_len(self.len() - 1);
400 Some(value)
401 }
402 } else {
403 None
404 }
405 }
406 pub fn swap(&mut self, a: usize, b: usize) {
411 assert!(a < self.len());
412 assert!(b < self.len());
413 unsafe {
414 core::ptr::swap(
415 self.inner.start.as_ptr().add(a),
416 self.inner.start.as_ptr().add(b),
417 )
418 };
419 }
420 #[rustversion::attr(since(1.86), const)]
422 pub fn pop(&mut self) -> Option<T> {
423 if self.is_empty() {
424 None
425 } else {
426 unsafe {
427 let value = self.inner.end.as_ptr().sub(1).read();
428 self.set_len(self.len() - 1);
429 Some(value)
430 }
431 }
432 }
433 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
437 if index >= self.len() {
438 return None;
439 }
440 self.swap(index, self.len() - 1);
441 self.pop()
442 }
443 pub const fn allocator(&self) -> &Alloc {
445 &self.inner.alloc
446 }
447 #[rustversion::attr(since(1.86), const)]
449 pub fn allocator_mut(&mut self) -> &mut Alloc {
450 &mut self.inner.alloc
451 }
452}
453
454impl<T: Clone, Alloc: IAlloc + Clone> Clone for Vec<T, Alloc> {
455 fn clone(&self) -> Self {
456 let mut ret = Self::with_capacity_in(self.len(), self.inner.alloc.clone());
457 for (i, item) in self.iter().enumerate() {
458 unsafe { ret.inner.start.ptr.as_ptr().add(i).write(item.clone()) }
459 }
460 unsafe { ret.set_len(self.len()) };
461 ret
462 }
463}
464impl<T: PartialEq, Alloc: IAlloc, Rhs: AsRef<[T]>> PartialEq<Rhs> for Vec<T, Alloc> {
465 fn eq(&self, other: &Rhs) -> bool {
466 self.as_slice() == other.as_ref()
467 }
468}
469impl<T: Eq, Alloc: IAlloc> Eq for Vec<T, Alloc> {}
470impl<T: PartialOrd, Alloc: IAlloc, Rhs: AsRef<[T]>> PartialOrd<Rhs> for Vec<T, Alloc> {
471 fn partial_cmp(&self, other: &Rhs) -> Option<core::cmp::Ordering> {
472 self.as_slice().partial_cmp(other.as_ref())
473 }
474}
475impl<T: Ord, Alloc: IAlloc> Ord for Vec<T, Alloc> {
476 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
477 self.as_slice().cmp(other.as_slice())
478 }
479}
480
481use crate::{IDeterminantProvider, IStable};
482use single_or_vec::Single;
483
484macro_rules! impl_index {
485 ($index: ty) => {
486 impl<T, Alloc: IAlloc> core::ops::Index<$index> for Vec<T, Alloc> {
487 type Output = <[T] as core::ops::Index<$index>>::Output;
488 fn index(&self, index: $index) -> &Self::Output {
489 &self.as_slice()[index]
490 }
491 }
492 impl<T, Alloc: IAlloc> core::ops::IndexMut<$index> for Vec<T, Alloc> {
493 fn index_mut(&mut self, index: $index) -> &mut Self::Output {
494 &mut self.as_slice_mut()[index]
495 }
496 }
497 impl<T, Alloc: IAlloc> core::ops::Index<$index> for SingleOrVec<T, Alloc>
498 where
499 T: IStable,
500 Alloc: IStable,
501 Single<T, Alloc>: IDeterminantProvider<Vec<T, Alloc>>,
502 Vec<T, Alloc>: IStable,
503 crate::Result<Single<T, Alloc>, Vec<T, Alloc>>: IStable,
504 {
505 type Output = <[T] as core::ops::Index<$index>>::Output;
506 fn index(&self, index: $index) -> &Self::Output {
507 &self.as_slice()[index]
508 }
509 }
510 };
511}
512
513impl<T, Alloc: IAlloc> core::ops::Deref for Vec<T, Alloc> {
514 type Target = [T];
515 fn deref(&self) -> &Self::Target {
516 self.as_slice()
517 }
518}
519impl<T, Alloc: IAlloc> core::convert::AsRef<[T]> for Vec<T, Alloc> {
520 fn as_ref(&self) -> &[T] {
521 self.as_slice()
522 }
523}
524impl<T, Alloc: IAlloc> core::ops::DerefMut for Vec<T, Alloc> {
525 fn deref_mut(&mut self) -> &mut Self::Target {
526 self.as_slice_mut()
527 }
528}
529impl<T, Alloc: IAlloc> core::convert::AsMut<[T]> for Vec<T, Alloc> {
530 fn as_mut(&mut self) -> &mut [T] {
531 self.as_slice_mut()
532 }
533}
534impl<T, Alloc: IAlloc + Default> Default for Vec<T, Alloc> {
535 fn default() -> Self {
536 Self::new_in(Alloc::default())
537 }
538}
539impl<T, Alloc: IAlloc> Drop for Vec<T, Alloc> {
540 fn drop(&mut self) {
541 unsafe { core::ptr::drop_in_place(self.as_slice_mut()) }
542 if core::mem::size_of::<T>() != 0 && self.capacity() != 0 {
543 unsafe { self.inner.start.free(&mut self.inner.alloc) }
544 }
545 }
546}
547impl<T: Copy, Alloc: IAlloc + Default> From<&[T]> for Vec<T, Alloc> {
548 fn from(value: &[T]) -> Self {
549 let mut this = Self::with_capacity(value.len());
550 this.copy_extend(value);
551 this
552 }
553}
554impl<T, Alloc: IAlloc> core::iter::Extend<T> for Vec<T, Alloc> {
555 fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
556 let iter = iter.into_iter();
557 let (min, max) = iter.size_hint();
558 match max {
559 Some(max) => {
560 self.reserve(max);
561 iter.for_each(|item| {
562 unsafe { self.inner.end.as_ptr().write(item) };
563 self.inner.end = ptr_add(self.inner.end, 1);
564 })
565 }
566 _ => {
567 self.reserve(min);
568 iter.for_each(|item| self.push(item))
569 }
570 }
571 }
572}
573
574impl<T, Alloc: IAlloc + Default> core::iter::FromIterator<T> for Vec<T, Alloc> {
575 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
576 let mut ret = Self::default();
577 ret.extend(iter);
578 ret
579 }
580}
581
582impl_index!(usize);
583impl_index!(core::ops::Range<usize>);
584impl_index!(core::ops::RangeInclusive<usize>);
585impl_index!(core::ops::RangeTo<usize>);
586impl_index!(core::ops::RangeToInclusive<usize>);
587impl_index!(core::ops::RangeFrom<usize>);
588impl_index!(core::ops::RangeFull);
589
590impl<T: Debug, Alloc: IAlloc> Debug for Vec<T, Alloc> {
591 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
592 self.as_slice().fmt(f)
593 }
594}
595impl<T: core::fmt::LowerHex, Alloc: IAlloc> core::fmt::LowerHex for Vec<T, Alloc> {
596 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
597 let mut first = true;
598 for item in self {
599 if !first {
600 f.write_str(":")?;
601 }
602 first = false;
603 core::fmt::LowerHex::fmt(item, f)?;
604 }
605 Ok(())
606 }
607}
608impl<T: core::fmt::UpperHex, Alloc: IAlloc> core::fmt::UpperHex for Vec<T, Alloc> {
609 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
610 let mut first = true;
611 for item in self {
612 if !first {
613 f.write_str(":")?;
614 }
615 first = false;
616 core::fmt::UpperHex::fmt(item, f)?;
617 }
618 Ok(())
619 }
620}
621impl<'a, T, Alloc: IAlloc> IntoIterator for &'a Vec<T, Alloc> {
622 type Item = &'a T;
623 type IntoIter = core::slice::Iter<'a, T>;
624 fn into_iter(self) -> Self::IntoIter {
625 self.as_slice().iter()
626 }
627}
628impl<'a, T, Alloc: IAlloc> IntoIterator for &'a mut Vec<T, Alloc> {
629 type Item = &'a mut T;
630 type IntoIter = core::slice::IterMut<'a, T>;
631 fn into_iter(self) -> Self::IntoIter {
632 self.as_slice_mut().iter_mut()
633 }
634}
635impl<T, Alloc: IAlloc> IntoIterator for Vec<T, Alloc> {
636 type Item = T;
637 type IntoIter = IntoIter<T, Alloc>;
638 fn into_iter(self) -> Self::IntoIter {
639 IntoIter {
640 vec: self,
641 index: 0,
642 }
643 }
644}
645#[crate::stabby]
647pub struct IntoIter<T, Alloc: IAlloc> {
648 vec: Vec<T, Alloc>,
649 index: usize,
650}
651impl<T, Alloc: IAlloc> Iterator for IntoIter<T, Alloc> {
652 type Item = T;
653 fn next(&mut self) -> Option<Self::Item> {
654 (self.index < self.vec.len()).then(|| unsafe {
655 let ret = self.vec.inner.start.as_ptr().add(self.index).read();
656 self.index += 1;
657 ret
658 })
659 }
660}
661impl<T, Alloc: IAlloc> Drop for IntoIter<T, Alloc> {
662 fn drop(&mut self) {
663 unsafe {
664 core::ptr::drop_in_place(&mut self.vec.as_slice_mut()[self.index..]);
665 self.vec.set_len(0);
666 }
667 }
668}
669#[crate::stabby]
676pub struct Drain<'a, T: 'a, Alloc: IAlloc + 'a> {
677 vec: &'a mut Vec<T, Alloc>,
678 from: usize,
679 to: usize,
680 index: usize,
681 original_len: usize,
682}
683impl<'a, T: 'a, Alloc: IAlloc + 'a> Drain<'a, T, Alloc> {
684 pub fn stop(mut self) {
687 self.to = self.index
688 }
689 #[rustversion::attr(since(1.86), const)]
691 pub fn double_ended(self) -> DoubleEndedDrain<'a, T, Alloc> {
692 let ret = DoubleEndedDrain {
693 vec: unsafe { core::ptr::read(&self.vec) },
694 from: self.from,
695 to: self.to,
696 original_len: self.original_len,
697 lindex: self.index,
698 rindex: self.to,
699 };
700 core::mem::forget(self);
701 ret
702 }
703}
704impl<'a, T: 'a, Alloc: IAlloc + 'a> Iterator for Drain<'a, T, Alloc> {
705 type Item = T;
706 fn size_hint(&self) -> (usize, Option<usize>) {
707 let remaining = self.to - self.index;
708 (remaining, Some(remaining))
709 }
710 fn next(&mut self) -> Option<Self::Item> {
711 (self.index < self.to).then(|| unsafe {
712 let ret = self.vec.inner.start.as_ptr().add(self.index).read();
713 self.index += 1;
714 ret
715 })
716 }
717}
718impl<'a, T: 'a, Alloc: IAlloc + 'a> ExactSizeIterator for Drain<'a, T, Alloc> {
719 fn len(&self) -> usize {
720 self.to - self.index
721 }
722}
723impl<'a, T: 'a, Alloc: IAlloc + 'a> Drop for Drain<'a, T, Alloc> {
724 fn drop(&mut self) {
725 let tail_length = self.original_len - self.to;
726 unsafe {
727 core::ptr::drop_in_place(core::slice::from_raw_parts_mut(
728 self.vec.inner.start.as_ptr().add(self.index),
729 self.to - self.index,
730 ));
731 core::ptr::copy(
732 self.vec.inner.start.as_ptr().add(self.to),
733 self.vec.inner.start.as_ptr().add(self.from),
734 tail_length,
735 );
736 self.vec.set_len(tail_length + self.from);
737 }
738 }
739}
740#[crate::stabby]
742pub struct DoubleEndedDrain<'a, T: 'a, Alloc: IAlloc + 'a> {
743 vec: &'a mut Vec<T, Alloc>,
744 from: usize,
745 to: usize,
746 original_len: usize,
747 lindex: usize,
748 rindex: usize,
749}
750impl<'a, T: 'a, Alloc: IAlloc + 'a> Iterator for DoubleEndedDrain<'a, T, Alloc> {
751 type Item = T;
752 fn size_hint(&self) -> (usize, Option<usize>) {
753 let remaining = self.to - self.lindex;
754 (remaining, Some(remaining))
755 }
756 fn next(&mut self) -> Option<Self::Item> {
757 (self.lindex < self.rindex).then(|| unsafe {
758 let ret = self.vec.inner.start.as_ptr().add(self.lindex).read();
759 self.lindex += 1;
760 ret
761 })
762 }
763}
764impl<'a, T: 'a, Alloc: IAlloc + 'a> DoubleEndedIterator for DoubleEndedDrain<'a, T, Alloc> {
765 fn next_back(&mut self) -> Option<Self::Item> {
766 (self.lindex < self.rindex).then(|| unsafe {
767 let ret = self.vec.inner.start.as_ptr().add(self.rindex).read();
768 self.rindex -= 1;
769 ret
770 })
771 }
772}
773impl<'a, T: 'a, Alloc: IAlloc + 'a> ExactSizeIterator for DoubleEndedDrain<'a, T, Alloc> {
774 fn len(&self) -> usize {
775 self.rindex - self.lindex
776 }
777}
778impl<'a, T: 'a, Alloc: IAlloc + 'a> Drop for DoubleEndedDrain<'a, T, Alloc> {
779 fn drop(&mut self) {
780 let tail_length = self.original_len - self.to;
781 unsafe {
782 core::ptr::drop_in_place(core::slice::from_raw_parts_mut(
783 self.vec.inner.start.as_ptr().add(self.lindex),
784 self.rindex - self.lindex,
785 ));
786 core::ptr::copy(
787 self.vec.inner.start.as_ptr().add(self.to),
788 self.vec.inner.start.as_ptr().add(self.from),
789 tail_length,
790 );
791 self.vec.set_len(tail_length + self.from);
792 }
793 }
794}
795#[cfg(feature = "std")]
796impl<Alloc: IAlloc> std::io::Write for Vec<u8, Alloc> {
797 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
798 match self.try_copy_extend(buf) {
799 Ok(()) => Ok(buf.len()),
800 Err(e) => Err(std::io::Error::new(std::io::ErrorKind::OutOfMemory, e)),
801 }
802 }
803
804 fn flush(&mut self) -> std::io::Result<()> {
805 Ok(())
806 }
807}
808
809#[cfg(feature = "std")]
810#[test]
811fn test() {
812 use rand::Rng;
813 const LEN: usize = 2000;
814 let mut std = std::vec::Vec::with_capacity(LEN);
815 let mut new: Vec<u8> = Vec::new();
816 let mut capacity: Vec<u8> = Vec::with_capacity(LEN);
817 let mut rng = rand::thread_rng();
818 for _ in 0..LEN {
819 let n: u8 = rng.gen();
820 new.push(n);
821 capacity.push(n);
822 std.push(n);
823 }
824 assert_eq!(new.as_slice(), std.as_slice());
825 assert_eq!(new.as_slice(), capacity.as_slice());
826 new.drain(55..100);
827 capacity.drain(55..100);
828 std.drain(55..100);
829 new.swap(5, 92);
830 std.swap(5, 92);
831 capacity.swap(5, 92);
832 assert_eq!(new.as_slice(), std.as_slice());
833 assert_eq!(new.as_slice(), capacity.as_slice());
834}
835
836pub use super::single_or_vec::SingleOrVec;
837
838#[cfg(feature = "serde")]
839mod serde_impl {
840 use super::*;
841 use crate::alloc::IAlloc;
842 use serde::{de::Visitor, Deserialize, Serialize};
843 impl<T: Serialize, Alloc: IAlloc> Serialize for Vec<T, Alloc> {
844 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
845 where
846 S: serde::Serializer,
847 {
848 let slice: &[T] = self;
849 slice.serialize(serializer)
850 }
851 }
852 impl<'a, T: Deserialize<'a>, Alloc: IAlloc + Default> Deserialize<'a> for Vec<T, Alloc> {
853 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
854 where
855 D: serde::Deserializer<'a>,
856 {
857 deserializer.deserialize_seq(VecVisitor(core::marker::PhantomData))
858 }
859 }
860 pub struct VecVisitor<T, Alloc>(core::marker::PhantomData<(T, Alloc)>);
861 impl<'a, T: Deserialize<'a>, Alloc: IAlloc + Default> Visitor<'a> for VecVisitor<T, Alloc> {
862 type Value = Vec<T, Alloc>;
863 fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
864 formatter.write_str("A sequence")
865 }
866 fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
867 where
868 A: serde::de::SeqAccess<'a>,
869 {
870 let mut this = Vec::with_capacity_in(seq.size_hint().unwrap_or(0), Alloc::default());
871 while let Some(v) = seq.next_element()? {
872 this.push(v);
873 }
874 Ok(this)
875 }
876 }
877}