#[cfg(not(no_global_oom_handling))]
use core::cmp;
use core::cmp::Ordering;
use core::convert::TryFrom;
use core::fmt;
use core::hash::{Hash, Hasher};
#[cfg(not(no_global_oom_handling))]
use core::iter;
#[cfg(not(no_global_oom_handling))]
use core::iter::FromIterator;
use core::marker::PhantomData;
use core::mem::{self, size_of, ManuallyDrop, MaybeUninit};
use core::ops::{self, Bound, Index, IndexMut, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice::{self, SliceIndex};
#[cfg(feature = "std")]
use std::io;
use super::{
alloc::{Allocator, Global},
assume,
boxed::Box,
raw_vec::{RawVec, TryReserveError},
};
#[cfg(not(no_global_oom_handling))]
pub use self::splice::Splice;
#[cfg(not(no_global_oom_handling))]
mod splice;
pub use self::drain::Drain;
mod drain;
pub use self::into_iter::IntoIter;
mod into_iter;
mod partial_eq;
#[cfg(not(no_global_oom_handling))]
mod set_len_on_drop;
#[cfg(not(no_global_oom_handling))]
use self::set_len_on_drop::SetLenOnDrop;
pub struct Vec<T, A: Allocator = Global> {
buf: RawVec<T, A>,
len: usize,
}
impl<T> Vec<T> {
#[inline(always)]
#[must_use]
pub const fn new() -> Self {
Vec {
buf: RawVec::new(),
len: 0,
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
#[inline(always)]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) }
}
}
impl<T, A: Allocator> Vec<T, A> {
#[inline(always)]
pub const fn new_in(alloc: A) -> Self {
Vec {
buf: RawVec::new_in(alloc),
len: 0,
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Vec {
buf: RawVec::with_capacity_in(capacity, alloc),
len: 0,
}
}
#[inline(always)]
pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
unsafe {
Vec {
buf: RawVec::from_raw_parts_in(ptr, capacity, alloc),
len: length,
}
}
}
pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
let mut me = ManuallyDrop::new(self);
(me.as_mut_ptr(), me.len(), me.capacity())
}
pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
let mut me = ManuallyDrop::new(self);
let len = me.len();
let capacity = me.capacity();
let ptr = me.as_mut_ptr();
let alloc = unsafe { ptr::read(me.allocator()) };
(ptr, len, capacity, alloc)
}
#[inline(always)]
pub fn capacity(&self) -> usize {
self.buf.capacity()
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn reserve(&mut self, additional: usize) {
self.buf.reserve(self.len, additional);
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn reserve_exact(&mut self, additional: usize) {
self.buf.reserve_exact(self.len, additional);
}
#[inline(always)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.buf.try_reserve(self.len, additional)
}
#[inline(always)]
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.buf.try_reserve_exact(self.len, additional)
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn shrink_to_fit(&mut self) {
if self.capacity() > self.len {
self.buf.shrink_to_fit(self.len);
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn shrink_to(&mut self, min_capacity: usize) {
if self.capacity() > min_capacity {
self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn into_boxed_slice(mut self) -> Box<[T], A> {
unsafe {
self.shrink_to_fit();
let me = ManuallyDrop::new(self);
let buf = ptr::read(&me.buf);
let len = me.len();
Box::<[mem::MaybeUninit<T>], A>::assume_init(buf.into_box(len))
}
}
#[inline(always)]
pub fn truncate(&mut self, len: usize) {
unsafe {
if len > self.len {
return;
}
let remaining_len = self.len - len;
let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
self.len = len;
ptr::drop_in_place(s);
}
}
#[inline(always)]
pub fn as_slice(&self) -> &[T] {
self
}
#[inline(always)]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline(always)]
pub fn as_ptr(&self) -> *const T {
let ptr = self.buf.ptr();
unsafe {
assume(!ptr.is_null());
}
ptr
}
#[inline(always)]
pub fn as_mut_ptr(&mut self) -> *mut T {
let ptr = self.buf.ptr();
unsafe {
assume(!ptr.is_null());
}
ptr
}
#[inline(always)]
pub fn allocator(&self) -> &A {
self.buf.allocator()
}
#[inline(always)]
pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len = new_len;
}
#[inline(always)]
pub fn swap_remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!(
"swap_remove index (is {}) should be < len (is {})",
index, len
);
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let value = ptr::read(self.as_ptr().add(index));
let base_ptr = self.as_mut_ptr();
ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1);
self.set_len(len - 1);
value
}
}
#[cfg(not(no_global_oom_handling))]
pub fn insert(&mut self, index: usize, element: T) {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!(
"insertion index (is {}) should be <= len (is {})",
index, len
);
}
let len = self.len();
if len == self.buf.capacity() {
self.reserve(1);
}
unsafe {
{
let p = self.as_mut_ptr().add(index);
match cmp::Ord::cmp(&index, &len) {
Ordering::Less => {
ptr::copy(p, p.add(1), len - index);
}
Ordering::Equal => {
}
Ordering::Greater => {
assert_failed(index, len);
}
}
ptr::write(p, element);
}
self.set_len(len + 1);
}
}
#[track_caller]
#[inline(always)]
pub fn remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
#[track_caller]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("removal index (is {}) should be < len (is {})", index, len);
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let ret;
{
let ptr = self.as_mut_ptr().add(index);
ret = ptr::read(ptr);
ptr::copy(ptr.add(1), ptr, len - index - 1);
}
self.set_len(len - 1);
ret
}
}
#[inline(always)]
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
self.retain_mut(|elem| f(elem));
}
#[inline]
pub fn retain_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut T) -> bool,
{
let original_len = self.len();
unsafe { self.set_len(0) };
struct BackshiftOnDrop<'a, T, A: Allocator> {
v: &'a mut Vec<T, A>,
processed_len: usize,
deleted_cnt: usize,
original_len: usize,
}
impl<T, A: Allocator> Drop for BackshiftOnDrop<'_, T, A> {
fn drop(&mut self) {
if self.deleted_cnt > 0 {
unsafe {
ptr::copy(
self.v.as_ptr().add(self.processed_len),
self.v
.as_mut_ptr()
.add(self.processed_len - self.deleted_cnt),
self.original_len - self.processed_len,
);
}
}
unsafe {
self.v.set_len(self.original_len - self.deleted_cnt);
}
}
}
let mut g = BackshiftOnDrop {
v: self,
processed_len: 0,
deleted_cnt: 0,
original_len,
};
fn process_loop<F, T, A: Allocator, const DELETED: bool>(
original_len: usize,
f: &mut F,
g: &mut BackshiftOnDrop<'_, T, A>,
) where
F: FnMut(&mut T) -> bool,
{
while g.processed_len != original_len {
let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
if !f(cur) {
g.processed_len += 1;
g.deleted_cnt += 1;
unsafe { ptr::drop_in_place(cur) };
if DELETED {
continue;
} else {
break;
}
}
if DELETED {
unsafe {
let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
ptr::copy_nonoverlapping(cur, hole_slot, 1);
}
}
g.processed_len += 1;
}
}
process_loop::<F, T, A, false>(original_len, &mut f, &mut g);
process_loop::<F, T, A, true>(original_len, &mut f, &mut g);
drop(g);
}
#[inline(always)]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b))
}
#[inline]
pub fn dedup_by<F>(&mut self, mut same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
let len = self.len();
if len <= 1 {
return;
}
struct FillGapOnDrop<'a, T, A: Allocator> {
read: usize,
write: usize,
vec: &'a mut Vec<T, A>,
}
impl<'a, T, A: Allocator> Drop for FillGapOnDrop<'a, T, A> {
fn drop(&mut self) {
unsafe {
let ptr = self.vec.as_mut_ptr();
let len = self.vec.len();
let items_left = len.wrapping_sub(self.read);
let dropped_ptr = ptr.add(self.write);
let valid_ptr = ptr.add(self.read);
ptr::copy(valid_ptr, dropped_ptr, items_left);
let dropped = self.read.wrapping_sub(self.write);
self.vec.set_len(len - dropped);
}
}
}
let mut gap = FillGapOnDrop {
read: 1,
write: 1,
vec: self,
};
let ptr = gap.vec.as_mut_ptr();
unsafe {
while gap.read < len {
let read_ptr = ptr.add(gap.read);
let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
if same_bucket(&mut *read_ptr, &mut *prev_ptr) {
gap.read += 1;
ptr::drop_in_place(read_ptr);
} else {
let write_ptr = ptr.add(gap.write);
ptr::copy(read_ptr, write_ptr, 1);
gap.write += 1;
gap.read += 1;
}
}
gap.vec.set_len(gap.write);
mem::forget(gap);
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn push(&mut self, value: T) {
if self.len == self.buf.capacity() {
self.buf.reserve_for_push(self.len);
}
unsafe {
let end = self.as_mut_ptr().add(self.len);
ptr::write(end, value);
self.len += 1;
}
}
#[inline(always)]
pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> {
if self.len == self.buf.capacity() {
return Err(value);
}
unsafe {
let end = self.as_mut_ptr().add(self.len);
ptr::write(end, value);
self.len += 1;
}
Ok(())
}
#[inline(always)]
pub fn pop(&mut self) -> Option<T> {
if self.len == 0 {
None
} else {
unsafe {
self.len -= 1;
Some(ptr::read(self.as_ptr().add(self.len())))
}
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn append(&mut self, other: &mut Self) {
unsafe {
self.append_elements(other.as_slice() as _);
other.set_len(0);
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
unsafe fn append_elements(&mut self, other: *const [T]) {
let count = unsafe { (&(*other)).len() };
self.reserve(count);
let len = self.len();
unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
self.len += count;
}
#[inline(always)]
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
where
R: RangeBounds<usize>,
{
let len = self.len();
let _ = &self.as_slice()[(range.start_bound().cloned(), range.end_bound().cloned())];
let start = match range.start_bound() {
Bound::Included(&n) => n,
Bound::Excluded(&n) => n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(&n) => n + 1,
Bound::Excluded(&n) => n,
Bound::Unbounded => len,
};
unsafe {
self.set_len(start);
let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::from(self),
}
}
}
#[inline(always)]
pub fn clear(&mut self) {
let elems: *mut [T] = self.as_mut_slice();
unsafe {
self.len = 0;
ptr::drop_in_place(elems);
}
}
#[inline(always)]
pub fn len(&self) -> usize {
self.len
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
#[must_use = "use `.truncate()` if you don't need the other half"]
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
{
#[cold]
#[inline(never)]
fn assert_failed(at: usize, len: usize) -> ! {
panic!("`at` split index (is {}) should be <= len (is {})", at, len);
}
if at > self.len() {
assert_failed(at, self.len());
}
if at == 0 {
return mem::replace(
self,
Vec::with_capacity_in(self.capacity(), self.allocator().clone()),
);
}
let other_len = self.len - at;
let mut other = Vec::with_capacity_in(other_len, self.allocator().clone());
unsafe {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
}
other
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.extend(iter::repeat_with(f).take(new_len - len));
} else {
self.truncate(new_len);
}
}
#[inline(always)]
pub fn leak<'a>(self) -> &'a mut [T]
where
A: 'a,
{
let mut me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) }
}
#[inline(always)]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
unsafe {
slice::from_raw_parts_mut(
self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
self.buf.capacity() - self.len,
)
}
}
#[inline(always)]
pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() };
(init, spare)
}
unsafe fn split_at_spare_mut_with_len(
&mut self,
) -> (&mut [T], &mut [MaybeUninit<T>], &mut usize) {
let ptr = self.as_mut_ptr();
let spare_ptr = unsafe { ptr.add(self.len) };
let spare_ptr = spare_ptr.cast::<MaybeUninit<T>>();
let spare_len = self.buf.capacity() - self.len;
unsafe {
let initialized = slice::from_raw_parts_mut(ptr, self.len);
let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
(initialized, spare, &mut self.len)
}
}
}
impl<T: Clone, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
self.extend_with(new_len - len, ExtendElement(value))
} else {
self.truncate(new_len);
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn extend_from_slice(&mut self, other: &[T]) {
self.extend(other.iter().cloned())
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn extend_from_within<R>(&mut self, src: R)
where
R: RangeBounds<usize>,
{
let _ = &self.as_slice()[(src.start_bound().cloned(), src.end_bound().cloned())];
let len = self.len();
let start: ops::Bound<&usize> = src.start_bound();
let start = match start {
ops::Bound::Included(&start) => start,
ops::Bound::Excluded(start) => start + 1,
ops::Bound::Unbounded => 0,
};
let end: ops::Bound<&usize> = src.end_bound();
let end = match end {
ops::Bound::Included(end) => end + 1,
ops::Bound::Excluded(&end) => end,
ops::Bound::Unbounded => len,
};
let range = start..end;
self.reserve(range.len());
let (this, spare, len) = unsafe { self.split_at_spare_mut_with_len() };
let to_clone = unsafe { this.get_unchecked(range) };
iter::zip(to_clone, spare)
.map(|(src, dst)| dst.write(src.clone()))
.for_each(|_| *len += 1);
}
}
impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
#[inline(always)]
pub fn into_flattened(self) -> Vec<T, A> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
let (new_len, new_cap) = if size_of::<T>() == 0 {
(len.checked_mul(N).expect("vec len overflow"), usize::MAX)
} else {
(len * N, cap * N)
};
unsafe { Vec::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) }
}
}
trait ExtendWith<T> {
fn next(&mut self) -> T;
fn last(self) -> T;
}
struct ExtendElement<T>(T);
impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
#[inline(always)]
fn next(&mut self) -> T {
self.0.clone()
}
#[inline(always)]
fn last(self) -> T {
self.0
}
}
impl<T, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let mut local_len = SetLenOnDrop::new(&mut self.len);
for _ in 1..n {
ptr::write(ptr, value.next());
ptr = ptr.add(1);
local_len.increment_len(1);
}
if n > 0 {
ptr::write(ptr, value.last());
local_len.increment_len(1);
}
}
}
}
impl<T: PartialEq, A: Allocator> Vec<T, A> {
#[inline(always)]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
}
impl<T, A: Allocator> ops::Deref for Vec<T, A> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
}
}
impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
}
}
#[cfg(not(no_global_oom_handling))]
impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
#[inline(always)]
fn clone(&self) -> Self {
let alloc = self.allocator().clone();
let mut vec = Vec::with_capacity_in(self.len(), alloc);
vec.extend_from_slice(self);
vec
}
#[inline(always)]
fn clone_from(&mut self, other: &Self) {
self.truncate(other.len());
let (init, tail) = other.split_at(self.len());
self.clone_from_slice(init);
self.extend_from_slice(tail);
}
}
impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
#[inline(always)]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
type Output = I::Output;
#[inline(always)]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
#[inline(always)]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
#[cfg(not(no_global_oom_handling))]
impl<T> FromIterator<T> for Vec<T> {
#[inline(always)]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
let mut vec = Vec::new();
vec.extend(iter);
vec
}
}
impl<T, A: Allocator> IntoIterator for Vec<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
#[inline(always)]
fn into_iter(self) -> Self::IntoIter {
unsafe {
let mut me = ManuallyDrop::new(self);
let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
let begin = me.as_mut_ptr();
let end = if size_of::<T>() == 0 {
begin.cast::<u8>().wrapping_add(me.len()).cast()
} else {
begin.add(me.len()) as *const T
};
let cap = me.buf.capacity();
IntoIter {
buf: NonNull::new_unchecked(begin),
phantom: PhantomData,
cap,
alloc,
ptr: begin,
end,
}
}
}
}
impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
#[inline(always)]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
#[cfg(not(no_global_oom_handling))]
impl<T, A: Allocator> Extend<T> for Vec<T, A> {
#[inline(always)]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let mut iter = iter.into_iter();
while let Some(element) = iter.next() {
let len = self.len();
if len == self.capacity() {
let (lower, _) = iter.size_hint();
self.reserve(lower.saturating_add(1));
}
unsafe {
ptr::write(self.as_mut_ptr().add(len), element);
self.set_len(len + 1);
}
}
}
}
impl<T, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
where
R: RangeBounds<usize>,
I: IntoIterator<Item = T>,
{
Splice {
drain: self.drain(range),
replace_with: replace_with.into_iter(),
}
}
}
#[cfg(not(no_global_oom_handling))]
impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
#[inline(always)]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
let mut iter = iter.into_iter();
while let Some(element) = iter.next() {
let len = self.len();
if len == self.capacity() {
let (lower, _) = iter.size_hint();
self.reserve(lower.saturating_add(1));
}
unsafe {
ptr::write(self.as_mut_ptr().add(len), *element);
self.set_len(len + 1);
}
}
}
}
impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
#[inline(always)]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
impl<T, A: Allocator> Drop for Vec<T, A> {
#[inline(always)]
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
}
}
}
impl<T> Default for Vec<T> {
#[inline(always)]
fn default() -> Vec<T> {
Vec::new()
}
}
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
#[inline(always)]
fn as_ref(&self) -> &Vec<T, A> {
self
}
}
impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
#[inline(always)]
fn as_mut(&mut self) -> &mut Vec<T, A> {
self
}
}
impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
#[inline(always)]
fn as_ref(&self) -> &[T] {
self
}
}
impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
#[inline(always)]
fn as_mut(&mut self) -> &mut [T] {
self
}
}
#[cfg(not(no_global_oom_handling))]
impl<T: Clone> From<&[T]> for Vec<T> {
#[inline(always)]
fn from(s: &[T]) -> Vec<T> {
let mut vec = Vec::with_capacity(s.len());
vec.extend_from_slice(s);
vec
}
}
#[cfg(not(no_global_oom_handling))]
impl<T: Clone> From<&mut [T]> for Vec<T> {
#[inline(always)]
fn from(s: &mut [T]) -> Vec<T> {
let mut vec = Vec::with_capacity(s.len());
vec.extend_from_slice(s);
vec
}
}
#[cfg(not(no_global_oom_handling))]
impl<T, const N: usize> From<[T; N]> for Vec<T> {
#[inline(always)]
fn from(s: [T; N]) -> Vec<T> {
Box::slice(Box::new(s)).into_vec()
}
}
impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> {
#[inline(always)]
fn from(s: Box<[T], A>) -> Self {
s.into_vec()
}
}
impl<T, A: Allocator, const N: usize> From<Box<[T; N], A>> for Vec<T, A> {
#[inline(always)]
fn from(s: Box<[T; N], A>) -> Self {
s.into_vec()
}
}
#[cfg(not(no_global_oom_handling))]
impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> {
#[inline(always)]
fn from(v: Vec<T, A>) -> Self {
v.into_boxed_slice()
}
}
#[cfg(not(no_global_oom_handling))]
impl From<&str> for Vec<u8> {
#[inline(always)]
fn from(s: &str) -> Vec<u8> {
From::from(s.as_bytes())
}
}
impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
type Error = Vec<T, A>;
#[inline(always)]
fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> {
if vec.len() != N {
return Err(vec);
}
unsafe { vec.set_len(0) };
let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
Ok(array)
}
}
#[inline(always)]
#[cfg(not(no_global_oom_handling))]
#[doc(hidden)]
pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
let mut v = Vec::with_capacity_in(n, alloc);
v.extend_with(n, ExtendElement(elem));
v
}
#[inline(always)]
#[cfg(not(no_global_oom_handling))]
#[doc(hidden)]
pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
let mut v = Vec::with_capacity(n);
v.extend_with(n, ExtendElement(elem));
v
}
#[cfg(feature = "std")]
impl<A: Allocator> io::Write for Vec<u8, A> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.extend_from_slice(buf);
Ok(buf.len())
}
#[inline]
fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize> {
let len = bufs.iter().map(|b| b.len()).sum();
self.reserve(len);
for buf in bufs {
self.extend_from_slice(buf);
}
Ok(len)
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.extend_from_slice(buf);
Ok(())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[cfg(feature = "serde")]
impl<T, A> serde_core::Serialize for Vec<T, A>
where
T: serde_core::Serialize,
A: Allocator,
{
#[inline(always)]
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde_core::ser::Serializer,
{
serializer.collect_seq(self)
}
}
#[cfg(feature = "serde")]
impl<'de, T, A> serde_core::de::Deserialize<'de> for Vec<T, A>
where
T: serde_core::de::Deserialize<'de>,
A: Allocator + Default,
{
#[inline(always)]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde_core::de::Deserializer<'de>,
{
struct VecVisitor<T, A> {
marker: PhantomData<(T, A)>,
}
impl<'de, T, A> serde_core::de::Visitor<'de> for VecVisitor<T, A>
where
T: serde_core::de::Deserialize<'de>,
A: Allocator + Default,
{
type Value = Vec<T, A>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a sequence")
}
fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error>
where
S: serde_core::de::SeqAccess<'de>,
{
let mut values = Vec::with_capacity_in(cautious(seq.size_hint()), A::default());
while let Some(value) = seq.next_element()? {
values.push(value);
}
Ok(values)
}
}
let visitor = VecVisitor {
marker: PhantomData,
};
deserializer.deserialize_seq(visitor)
}
#[inline(always)]
fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error>
where
D: serde_core::de::Deserializer<'de>,
{
struct VecInPlaceVisitor<'a, T: 'a, A: Allocator + 'a>(&'a mut Vec<T, A>);
impl<'a, 'de, T, A> serde_core::de::Visitor<'de> for VecInPlaceVisitor<'a, T, A>
where
T: serde_core::de::Deserialize<'de>,
A: Allocator + Default,
{
type Value = ();
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a sequence")
}
fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error>
where
S: serde_core::de::SeqAccess<'de>,
{
let hint = cautious(seq.size_hint());
if let Some(additional) = hint.checked_sub(self.0.len()) {
self.0.reserve(additional);
}
for i in 0..self.0.len() {
let next = {
let next_place = InPlaceSeed(&mut self.0[i]);
seq.next_element_seed(next_place)?
};
if next.is_none() {
self.0.truncate(i);
return Ok(());
}
}
while let Some(value) = seq.next_element()? {
self.0.push(value);
}
Ok(())
}
}
deserializer.deserialize_seq(VecInPlaceVisitor(place))
}
}
#[cfg(feature = "serde")]
pub fn cautious(hint: Option<usize>) -> usize {
cmp::min(hint.unwrap_or(0), 4096)
}
#[cfg(feature = "serde")]
pub struct InPlaceSeed<'a, T: 'a>(pub &'a mut T);
#[cfg(feature = "serde")]
impl<'a, 'de, T> serde_core::de::DeserializeSeed<'de> for InPlaceSeed<'a, T>
where
T: serde_core::de::Deserialize<'de>,
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: serde_core::de::Deserializer<'de>,
{
T::deserialize_in_place(deserializer, self.0)
}
}