1use self::TryReserveErrorKind::{AllocError, CapacityOverflow};
4use super::{align_up, page_size, Allocation, Error};
5use crate::addr;
6use core::{
7 borrow::{Borrow, BorrowMut},
8 cmp, fmt,
9 hash::{Hash, Hasher},
10 hint,
11 iter::FusedIterator,
12 marker::PhantomData,
13 mem::{self, ManuallyDrop},
14 ops::{Deref, DerefMut, Index, IndexMut},
15 ptr,
16 slice::{self, SliceIndex},
17 sync::atomic::{
18 AtomicUsize,
19 Ordering::{Acquire, Relaxed, Release},
20 },
21};
22
23pub struct Vec<T> {
29 allocation: Allocation,
30 max_capacity: usize,
31 capacity: AtomicUsize,
32 len: AtomicUsize,
33 reserved_len: AtomicUsize,
34 marker: PhantomData<(T, fn(T))>,
43}
44
45unsafe impl<T: Send> Send for Vec<T> {}
48
49unsafe impl<T: Send + Sync> Sync for Vec<T> {}
54
55impl<T> Vec<T> {
56 #[must_use]
70 pub fn new(max_capacity: usize) -> Self {
71 handle_reserve(Self::try_new(max_capacity))
72 }
73
74 pub fn try_new(max_capacity: usize) -> Result<Self, TryReserveError> {
90 assert!(mem::align_of::<T>() <= page_size());
91
92 let size = align_up(
93 max_capacity
94 .checked_mul(mem::size_of::<T>())
95 .ok_or(CapacityOverflow)?,
96 page_size(),
97 );
98
99 #[allow(clippy::cast_possible_wrap)]
100 if size > isize::MAX as usize {
101 return Err(CapacityOverflow.into());
102 }
103
104 if size == 0 {
105 return Ok(Self::dangling(max_capacity));
106 }
107
108 let allocation = Allocation::new(size).map_err(AllocError)?;
109
110 Ok(Vec {
111 allocation,
112 max_capacity,
113 capacity: AtomicUsize::new(0),
114 len: AtomicUsize::new(0),
115 reserved_len: AtomicUsize::new(0),
116 marker: PhantomData,
117 })
118 }
119
120 #[inline]
128 #[must_use]
129 pub const fn dangling(max_capacity: usize) -> Self {
130 let allocation = Allocation::dangling(mem::align_of::<T>());
131
132 Vec {
133 allocation,
134 max_capacity,
135 capacity: AtomicUsize::new(0),
136 len: AtomicUsize::new(0),
137 reserved_len: AtomicUsize::new(0),
138 marker: PhantomData,
139 }
140 }
141
142 #[inline(always)]
144 #[must_use]
145 pub fn as_slice(&self) -> &[T] {
146 let len = self.len.load(Acquire);
147
148 unsafe { slice::from_raw_parts(self.as_ptr(), len) }
153 }
154
155 #[inline(always)]
157 #[must_use]
158 pub fn as_mut_slice(&mut self) -> &mut [T] {
159 let len = self.len_mut();
160
161 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
165 }
166
167 #[inline(always)]
169 #[must_use]
170 pub fn as_ptr(&self) -> *const T {
171 self.allocation.ptr().cast()
172 }
173
174 #[inline(always)]
176 #[must_use]
177 pub fn as_mut_ptr(&mut self) -> *mut T {
178 self.allocation.ptr().cast()
179 }
180
181 #[inline(always)]
185 #[must_use]
186 pub fn capacity(&self) -> usize {
187 if T::IS_ZST {
188 usize::MAX
189 } else {
190 self.capacity.load(Relaxed)
191 }
192 }
193
194 #[inline(always)]
195 fn capacity_mut(&mut self) -> usize {
196 if T::IS_ZST {
197 usize::MAX
198 } else {
199 *self.capacity.get_mut()
200 }
201 }
202
203 #[inline(always)]
205 #[must_use]
206 pub fn len(&self) -> usize {
207 self.len.load(Relaxed)
208 }
209
210 #[inline(always)]
211 fn len_mut(&mut self) -> usize {
212 *self.len.get_mut()
213 }
214
215 #[inline]
217 #[must_use]
218 pub fn is_empty(&self) -> bool {
219 self.len() == 0
220 }
221
222 #[inline]
224 pub fn push(&self, value: T) -> usize {
225 if T::IS_ZST {
226 let mut len = self.len();
227
228 loop {
229 if len == usize::MAX {
230 capacity_overflow();
231 }
232
233 match self.len.compare_exchange(len, len + 1, Relaxed, Relaxed) {
234 Ok(_) => return len,
235 Err(new_len) => len = new_len,
236 }
237 }
238 }
239
240 let reserved_len = self.reserved_len.fetch_add(1, Relaxed);
244
245 if reserved_len >= self.capacity() {
246 self.reserve_for_push(reserved_len);
247 }
248
249 unsafe { self.as_ptr().cast_mut().add(reserved_len).write(value) };
253
254 let mut backoff = Backoff::new();
255
256 loop {
257 match unsafe {
260 self.len
261 .compare_exchange_weak(reserved_len, reserved_len + 1, Release, Relaxed)
262 } {
263 Ok(_) => break,
264 Err(_) => backoff.spin(),
265 }
266 }
267
268 reserved_len
269 }
270
271 #[inline]
273 pub fn push_mut(&mut self, value: T) -> usize {
274 let len = self.len_mut();
275
276 if len >= self.capacity_mut() {
277 self.reserve_for_push(len);
278 }
279
280 unsafe { self.as_mut_ptr().add(len).write(value) };
282
283 unsafe { *self.len.get_mut() += 1 };
285
286 *self.reserved_len.get_mut() = self.len_mut();
287
288 len
289 }
290
291 #[inline(never)]
292 fn reserve_for_push(&self, len: usize) {
293 handle_reserve(self.grow_amortized(len, 1));
294 }
295
296 fn grow_amortized(&self, len: usize, additional: usize) -> Result<(), TryReserveError> {
298 debug_assert!(additional > 0);
299
300 if T::IS_ZST {
301 return Err(CapacityOverflow.into());
302 }
303
304 let required_capacity = len.checked_add(additional).ok_or(CapacityOverflow)?;
305
306 if required_capacity > self.max_capacity {
307 if self.reserved_len.load(Relaxed) > self.max_capacity {
308 self.reserved_len.store(self.max_capacity, Relaxed);
309 }
310
311 return Err(CapacityOverflow.into());
312 }
313
314 let new_capacity = cmp::max(self.capacity() * 2, required_capacity);
315 let new_capacity = cmp::max(new_capacity, page_size() / mem::size_of::<T>());
316 let new_capacity = cmp::min(new_capacity, self.max_capacity);
317
318 grow(
319 &self.allocation,
320 &self.capacity,
321 new_capacity,
322 mem::size_of::<T>(),
323 )
324 }
325}
326
327#[inline(never)]
328fn grow(
329 allocation: &Allocation,
330 capacity: &AtomicUsize,
331 new_capacity: usize,
332 element_size: usize,
333) -> Result<(), TryReserveError> {
334 let old_capacity = capacity.load(Relaxed);
335
336 if old_capacity == new_capacity {
337 return Ok(());
339 }
340
341 let page_size = page_size();
342
343 let old_size = old_capacity * element_size;
344 let new_size = new_capacity
345 .checked_mul(element_size)
346 .ok_or(CapacityOverflow)?;
347
348 if new_size > allocation.size() {
349 return Err(CapacityOverflow.into());
350 }
351
352 let old_size = align_up(old_size, page_size);
353 let new_size = align_up(new_size, page_size);
354 let ptr = allocation.ptr().wrapping_add(old_size);
355 let size = new_size - old_size;
356
357 allocation.commit(ptr, size).map_err(AllocError)?;
358
359 let _ = allocation.prefault(ptr, size);
360
361 if let Err(capacity) = capacity.compare_exchange(old_capacity, new_capacity, Relaxed, Relaxed) {
362 assert!(capacity >= new_capacity);
364 }
365
366 Ok(())
367}
368
369#[inline]
370fn handle_reserve<T>(res: Result<T, TryReserveError>) -> T {
371 match res.map_err(|e| e.kind) {
372 Ok(x) => x,
373 Err(CapacityOverflow) => capacity_overflow(),
374 Err(AllocError(err)) => handle_alloc_error(err),
375 }
376}
377
378#[inline(never)]
379fn capacity_overflow() -> ! {
380 panic!("capacity overflow");
381}
382
383#[allow(clippy::needless_pass_by_value)]
385#[cold]
386fn handle_alloc_error(err: Error) -> ! {
387 panic!("allocation failed: {err}");
388}
389
390impl<T> AsRef<Vec<T>> for Vec<T> {
391 #[inline]
392 fn as_ref(&self) -> &Vec<T> {
393 self
394 }
395}
396
397impl<T> AsMut<Vec<T>> for Vec<T> {
398 #[inline]
399 fn as_mut(&mut self) -> &mut Vec<T> {
400 self
401 }
402}
403
404impl<T> AsRef<[T]> for Vec<T> {
405 #[inline]
406 fn as_ref(&self) -> &[T] {
407 self
408 }
409}
410
411impl<T> AsMut<[T]> for Vec<T> {
412 #[inline]
413 fn as_mut(&mut self) -> &mut [T] {
414 self
415 }
416}
417
418impl<T> Borrow<[T]> for Vec<T> {
419 #[inline]
420 fn borrow(&self) -> &[T] {
421 self
422 }
423}
424
425impl<T> BorrowMut<[T]> for Vec<T> {
426 #[inline]
427 fn borrow_mut(&mut self) -> &mut [T] {
428 self
429 }
430}
431
432impl<T: Clone> Clone for Vec<T> {
433 #[inline]
434 fn clone(&self) -> Self {
435 let mut vec = Vec::new(self.max_capacity);
436
437 for elem in self {
438 vec.push_mut(elem.clone());
439 }
440
441 vec
442 }
443}
444
445impl<T: fmt::Debug> fmt::Debug for Vec<T> {
446 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
447 fmt::Debug::fmt(&**self, f)
448 }
449}
450
451impl<T> Deref for Vec<T> {
452 type Target = [T];
453
454 #[inline]
455 fn deref(&self) -> &Self::Target {
456 self.as_slice()
457 }
458}
459
460impl<T> DerefMut for Vec<T> {
461 #[inline]
462 fn deref_mut(&mut self) -> &mut Self::Target {
463 self.as_mut_slice()
464 }
465}
466
467impl<T> Drop for Vec<T> {
468 fn drop(&mut self) {
469 let elements = ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len_mut());
470
471 unsafe { elements.drop_in_place() };
473 }
474}
475
476impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for Vec<T> {
477 #[inline]
478 fn eq(&self, other: &Vec<U>) -> bool {
479 **self == **other
480 }
481}
482
483impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
484 #[inline]
485 fn eq(&self, other: &&[U]) -> bool {
486 **self == **other
487 }
488}
489
490impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for Vec<T> {
491 #[inline]
492 fn eq(&self, other: &&mut [U]) -> bool {
493 **self == **other
494 }
495}
496
497impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &[T] {
498 #[inline]
499 fn eq(&self, other: &Vec<U>) -> bool {
500 **self == **other
501 }
502}
503
504impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &mut [T] {
505 #[inline]
506 fn eq(&self, other: &Vec<U>) -> bool {
507 **self == **other
508 }
509}
510
511impl<T: PartialEq<U>, U> PartialEq<[U]> for Vec<T> {
512 #[inline]
513 fn eq(&self, other: &[U]) -> bool {
514 **self == *other
515 }
516}
517
518impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for [T] {
519 #[inline]
520 fn eq(&self, other: &Vec<U>) -> bool {
521 *self == **other
522 }
523}
524
525#[cfg(feature = "std")]
526impl<T: PartialEq<U> + Clone, U> PartialEq<Vec<U>> for std::borrow::Cow<'_, [T]> {
527 #[inline]
528 fn eq(&self, other: &Vec<U>) -> bool {
529 **self == **other
530 }
531}
532
533impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
534 #[inline]
535 fn eq(&self, other: &[U; N]) -> bool {
536 **self == *other
537 }
538}
539
540impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for Vec<T> {
541 #[inline]
542 fn eq(&self, other: &&[U; N]) -> bool {
543 **self == **other
544 }
545}
546
547impl<T: Eq> Eq for Vec<T> {}
548
549impl<T> Extend<T> for Vec<T> {
550 #[inline]
551 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
552 for elem in iter {
553 self.push_mut(elem);
554 }
555 }
556}
557
558impl<'a, T: Copy> Extend<&'a T> for Vec<T> {
559 #[inline]
560 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
561 for &elem in iter {
562 self.push_mut(elem);
563 }
564 }
565}
566
567impl<T: Hash> Hash for Vec<T> {
568 #[inline]
569 fn hash<H: Hasher>(&self, state: &mut H) {
570 Hash::hash(&**self, state);
571 }
572}
573
574impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
575 type Output = I::Output;
576
577 #[inline]
578 fn index(&self, index: I) -> &Self::Output {
579 Index::index(&**self, index)
580 }
581}
582
583impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
584 #[inline]
585 fn index_mut(&mut self, index: I) -> &mut Self::Output {
586 IndexMut::index_mut(&mut **self, index)
587 }
588}
589
590impl<'a, T> IntoIterator for &'a Vec<T> {
591 type Item = &'a T;
592
593 type IntoIter = slice::Iter<'a, T>;
594
595 #[inline]
596 fn into_iter(self) -> Self::IntoIter {
597 self.iter()
598 }
599}
600
601impl<'a, T> IntoIterator for &'a mut Vec<T> {
602 type Item = &'a mut T;
603
604 type IntoIter = slice::IterMut<'a, T>;
605
606 #[inline]
607 fn into_iter(self) -> Self::IntoIter {
608 self.iter_mut()
609 }
610}
611
612impl<T> IntoIterator for Vec<T> {
613 type Item = T;
614
615 type IntoIter = IntoIter<T>;
616
617 #[inline]
618 fn into_iter(self) -> Self::IntoIter {
619 let mut this = ManuallyDrop::new(self);
620
621 let allocation = unsafe { ptr::read(&this.allocation) };
624
625 let start = this.as_mut_ptr();
626
627 let end = if T::IS_ZST {
628 start.cast::<u8>().wrapping_add(this.len_mut()).cast::<T>()
629 } else {
630 unsafe { start.add(this.len_mut()) }
634 };
635
636 IntoIter {
637 _allocation: allocation,
638 start,
639 end,
640 marker: PhantomData,
641 }
642 }
643}
644
645impl<T: PartialOrd> PartialOrd for Vec<T> {
646 #[inline]
647 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
648 PartialOrd::partial_cmp(&**self, &**other)
649 }
650}
651
652impl<T: Ord> Ord for Vec<T> {
653 #[inline]
654 fn cmp(&self, other: &Self) -> cmp::Ordering {
655 Ord::cmp(&**self, &**other)
656 }
657}
658
659pub struct IntoIter<T> {
665 _allocation: Allocation,
666 start: *const T,
667 end: *const T,
668 marker: PhantomData<T>,
669}
670
671unsafe impl<T: Send> Send for IntoIter<T> {}
673
674unsafe impl<T: Sync> Sync for IntoIter<T> {}
676
677impl<T> IntoIter<T> {
678 #[inline]
680 #[must_use]
681 pub fn as_slice(&self) -> &[T] {
682 unsafe { slice::from_raw_parts(self.start, self.len()) }
683 }
684
685 #[inline]
687 #[must_use]
688 pub fn as_mut_slice(&mut self) -> &mut [T] {
689 unsafe { slice::from_raw_parts_mut(self.start.cast_mut(), self.len()) }
690 }
691}
692
693impl<T> AsRef<[T]> for IntoIter<T> {
694 #[inline]
695 fn as_ref(&self) -> &[T] {
696 self.as_slice()
697 }
698}
699
700impl<T> AsMut<[T]> for IntoIter<T> {
701 #[inline]
702 fn as_mut(&mut self) -> &mut [T] {
703 self.as_mut_slice()
704 }
705}
706
707impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
708 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
709 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
710 }
711}
712
713impl<T> Drop for IntoIter<T> {
714 fn drop(&mut self) {
715 let elements = ptr::slice_from_raw_parts_mut(self.start.cast_mut(), self.len());
716
717 unsafe { elements.drop_in_place() };
720 }
721}
722
723impl<T> Iterator for IntoIter<T> {
724 type Item = T;
725
726 #[inline]
727 fn next(&mut self) -> Option<Self::Item> {
728 if self.start == self.end {
729 return None;
730 }
731
732 let ptr = if T::IS_ZST {
733 self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
734
735 self.start
736 } else {
737 let old = self.start;
738
739 self.start = unsafe { old.add(1) };
741
742 old
743 };
744
745 Some(unsafe { ptr.read() })
748 }
749
750 #[inline]
751 fn size_hint(&self) -> (usize, Option<usize>) {
752 let len = self.len();
753
754 (len, Some(len))
755 }
756
757 #[inline]
758 fn count(self) -> usize {
759 self.len()
760 }
761}
762
763impl<T> DoubleEndedIterator for IntoIter<T> {
764 #[inline]
765 fn next_back(&mut self) -> Option<Self::Item> {
766 if self.start == self.end {
767 return None;
768 }
769
770 let ptr = if T::IS_ZST {
771 self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
772
773 self.start
774 } else {
775 self.end = unsafe { self.end.sub(1) };
777
778 self.end
779 };
780
781 Some(unsafe { ptr.read() })
784 }
785}
786
787impl<T> ExactSizeIterator for IntoIter<T> {
788 #[inline]
789 fn len(&self) -> usize {
790 if T::IS_ZST {
791 addr(self.end.cast()).wrapping_sub(addr(self.start.cast()))
792 } else {
793 #[allow(clippy::cast_sign_loss)]
796 unsafe {
801 self.end.offset_from(self.start) as usize
802 }
803 }
804 }
805}
806
807impl<T> FusedIterator for IntoIter<T> {}
808
809#[derive(Debug)]
814pub struct TryReserveError {
815 kind: TryReserveErrorKind,
816}
817
818impl From<TryReserveErrorKind> for TryReserveError {
819 #[inline]
820 fn from(kind: TryReserveErrorKind) -> Self {
821 TryReserveError { kind }
822 }
823}
824
825#[derive(Debug)]
826enum TryReserveErrorKind {
827 CapacityOverflow,
828 AllocError(Error),
829}
830
831impl fmt::Display for TryReserveError {
832 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
833 match self.kind {
834 CapacityOverflow => f.write_str(
835 "memory allocation failed because the computed capacity exceeded the collection's \
836 maximum",
837 ),
838 AllocError(_) => f.write_str(
839 "memory allocation failed because the operating system returned an error",
840 ),
841 }
842 }
843}
844
845#[cfg(feature = "std")]
846impl std::error::Error for TryReserveError {
847 fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
848 match &self.kind {
849 TryReserveErrorKind::CapacityOverflow => None,
850 TryReserveErrorKind::AllocError(err) => Some(err),
851 }
852 }
853}
854
855trait SizedTypeProperties: Sized {
856 const IS_ZST: bool = mem::size_of::<Self>() == 0;
857}
858
859impl<T> SizedTypeProperties for T {}
860
861const SPIN_LIMIT: u32 = 6;
862
863struct Backoff {
864 step: u32,
865}
866
867impl Backoff {
868 fn new() -> Self {
869 Backoff { step: 0 }
870 }
871
872 fn spin(&mut self) {
873 for _ in 0..1 << self.step {
874 hint::spin_loop();
875 }
876
877 if self.step <= SPIN_LIMIT {
878 self.step += 1;
879 }
880 }
881}