pub use self::drain::Drain;
mod drain;
pub use self::into_iter::IntoIter;
mod into_iter;
mod partial_eq;
use self::spec_from_elem::SpecFromElem;
mod spec_from_elem;
use self::spec_extend::SpecExtend;
mod spec_extend;
use self::set_len_on_drop::SetLenOnDrop;
mod set_len_on_drop;
mod splice;
#[cfg(rune_nightly)]
use self::is_zero::IsZero;
#[cfg(rune_nightly)]
mod is_zero;
#[cfg(feature = "alloc")]
use core::alloc::Layout;
use core::borrow::Borrow;
use core::cmp;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::iter;
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
use core::slice::{self, SliceIndex};
use crate::alloc::{Allocator, Global, SizedTypeProperties};
use crate::clone::TryClone;
use crate::error::Error;
use crate::iter::{TryExtend, TryFromIteratorIn};
use crate::ptr::{self, NonNull};
use crate::raw_vec::RawVec;
use crate::slice::range as slice_range;
use crate::slice::{RawIter, RawIterMut};
#[cfg(test)]
use crate::testing::*;
use crate::Box;
#[doc(hidden)]
pub fn try_from_elem<T: TryClone>(elem: T, n: usize) -> Result<Vec<T>, Error> {
<T as SpecFromElem>::from_elem(elem, n, Global)
}
pub struct Vec<T, A: Allocator = Global> {
buf: RawVec<T, A>,
len: usize,
}
impl<T> Vec<T> {
#[inline]
#[must_use]
pub const fn new() -> Self {
Vec {
buf: RawVec::NEW,
len: 0,
}
}
#[inline]
pub fn try_with_capacity(capacity: usize) -> Result<Self, Error> {
Self::try_with_capacity_in(capacity, Global)
}
#[cfg(feature = "alloc")]
pub fn into_std(self) -> ::rust_alloc::vec::Vec<T> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
if let Ok(layout) = Layout::array::<T>(cap) {
alloc.release(layout);
}
unsafe { ::rust_alloc::vec::Vec::from_raw_parts(ptr, len, cap) }
}
}
impl<T, A: Allocator> Vec<T, A> {
#[inline]
pub const fn new_in(alloc: A) -> Self {
Vec {
buf: RawVec::new_in(alloc),
len: 0,
}
}
#[inline]
pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, Error> {
Ok(Vec {
buf: RawVec::try_with_capacity_in(capacity, alloc)?,
len: 0,
})
}
#[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
unsafe {
Vec {
buf: RawVec::from_raw_parts_in(ptr, capacity, alloc),
len: length,
}
}
}
#[inline]
pub fn allocator(&self) -> &A {
self.buf.allocator()
}
pub(crate) fn into_raw_vec(self) -> (RawVec<T, A>, usize) {
let me = ManuallyDrop::new(self);
let buf = unsafe { ptr::read(&me.buf) };
(buf, me.len)
}
pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
let mut me = ManuallyDrop::new(self);
let len = me.len();
let capacity = me.capacity();
let ptr = me.as_mut_ptr();
let alloc = unsafe { ptr::read(me.allocator()) };
(ptr, len, capacity, alloc)
}
#[inline]
pub fn capacity(&self) -> usize {
self.buf.capacity()
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> {
self.buf.try_reserve(self.len, additional)
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), Error> {
self.buf.try_reserve_exact(self.len, additional)
}
pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> {
if self.capacity() > self.len {
self.buf.try_shrink_to_fit(self.len)?;
}
Ok(())
}
pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> {
if self.capacity() > min_capacity {
self.buf
.try_shrink_to_fit(cmp::max(self.len, min_capacity))?;
}
Ok(())
}
pub fn try_into_boxed_slice(mut self) -> Result<Box<[T], A>, Error> {
unsafe {
self.try_shrink_to_fit()?;
let me = ManuallyDrop::new(self);
let buf = ptr::read(&me.buf);
let len = me.len();
Ok(buf.into_box(len).assume_init())
}
}
pub fn truncate(&mut self, len: usize) {
unsafe {
if len > self.len {
return;
}
let remaining_len = self.len - len;
let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
self.len = len;
ptr::drop_in_place(s);
}
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline]
pub fn as_ptr(&self) -> *const T {
self.buf.ptr()
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
self.buf.ptr()
}
#[inline]
pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len = new_len;
}
#[inline]
pub fn swap_remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("swap_remove index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let value = ptr::read(self.as_ptr().add(index));
let base_ptr = self.as_mut_ptr();
ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1);
self.set_len(len - 1);
value
}
}
pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), Error> {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("insertion index (is {index}) should be <= len (is {len})");
}
let len = self.len();
if len == self.buf.capacity() {
self.try_reserve(1)?;
}
unsafe {
{
let p = self.as_mut_ptr().add(index);
if index < len {
ptr::copy(p, p.add(1), len - index);
} else if index == len {
} else {
assert_failed(index, len);
}
ptr::write(p, element);
}
self.set_len(len + 1);
}
Ok(())
}
#[track_caller]
pub fn remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
#[track_caller]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("removal index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let ret;
{
let ptr = self.as_mut_ptr().add(index);
ret = ptr::read(ptr);
ptr::copy(ptr.add(1), ptr, len - index - 1);
}
self.set_len(len - 1);
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
self.retain_mut(|elem| f(elem));
}
pub fn retain_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut T) -> bool,
{
let original_len = self.len();
unsafe { self.set_len(0) };
struct BackshiftOnDrop<'a, T, A: Allocator> {
v: &'a mut Vec<T, A>,
processed_len: usize,
deleted_cnt: usize,
original_len: usize,
}
impl<T, A: Allocator> Drop for BackshiftOnDrop<'_, T, A> {
fn drop(&mut self) {
if self.deleted_cnt > 0 {
unsafe {
ptr::copy(
self.v.as_ptr().add(self.processed_len),
self.v
.as_mut_ptr()
.add(self.processed_len - self.deleted_cnt),
self.original_len - self.processed_len,
);
}
}
unsafe {
self.v.set_len(self.original_len - self.deleted_cnt);
}
}
}
let mut g = BackshiftOnDrop {
v: self,
processed_len: 0,
deleted_cnt: 0,
original_len,
};
fn process_loop<F, T, A: Allocator, const DELETED: bool>(
original_len: usize,
f: &mut F,
g: &mut BackshiftOnDrop<'_, T, A>,
) where
F: FnMut(&mut T) -> bool,
{
while g.processed_len != original_len {
let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
if !f(cur) {
g.processed_len += 1;
g.deleted_cnt += 1;
unsafe { ptr::drop_in_place(cur) };
if DELETED {
continue;
} else {
break;
}
}
if DELETED {
unsafe {
let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
ptr::copy_nonoverlapping(cur, hole_slot, 1);
}
}
g.processed_len += 1;
}
}
process_loop::<F, T, A, false>(original_len, &mut f, &mut g);
process_loop::<F, T, A, true>(original_len, &mut f, &mut g);
drop(g);
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b))
}
pub fn dedup_by<F>(&mut self, mut same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
let len = self.len();
if len <= 1 {
return;
}
struct FillGapOnDrop<'a, T, A: Allocator> {
read: usize,
write: usize,
vec: &'a mut Vec<T, A>,
}
impl<T, A: Allocator> Drop for FillGapOnDrop<'_, T, A> {
fn drop(&mut self) {
unsafe {
let ptr = self.vec.as_mut_ptr();
let len = self.vec.len();
let items_left = len.wrapping_sub(self.read);
let dropped_ptr = ptr.add(self.write);
let valid_ptr = ptr.add(self.read);
ptr::copy(valid_ptr, dropped_ptr, items_left);
let dropped = self.read.wrapping_sub(self.write);
self.vec.set_len(len - dropped);
}
}
}
let mut gap = FillGapOnDrop {
read: 1,
write: 1,
vec: self,
};
let ptr = gap.vec.as_mut_ptr();
unsafe {
while gap.read < len {
let read_ptr = ptr.add(gap.read);
let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
if same_bucket(&mut *read_ptr, &mut *prev_ptr) {
gap.read += 1;
ptr::drop_in_place(read_ptr);
} else {
let write_ptr = ptr.add(gap.write);
ptr::copy(read_ptr, write_ptr, 1);
gap.write += 1;
gap.read += 1;
}
}
gap.vec.set_len(gap.write);
mem::forget(gap);
}
}
#[inline]
pub fn try_push(&mut self, value: T) -> Result<(), Error> {
if self.len == self.buf.capacity() {
self.buf.try_reserve_for_push(self.len)?;
}
unsafe {
let end = self.as_mut_ptr().add(self.len);
ptr::write(end, value);
self.len += 1;
}
Ok(())
}
#[inline]
pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> {
if self.len == self.buf.capacity() {
return Err(value);
}
unsafe {
let end = self.as_mut_ptr().add(self.len);
ptr::write(end, value);
self.len += 1;
}
Ok(())
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.len == 0 {
None
} else {
unsafe {
self.len -= 1;
Some(ptr::read(self.as_ptr().add(self.len())))
}
}
}
#[inline]
pub fn try_append(&mut self, other: &mut Self) -> Result<(), Error> {
unsafe {
self.try_append_elements(other.as_slice() as _)?;
other.set_len(0);
}
Ok(())
}
#[inline]
unsafe fn try_append_elements(&mut self, other: *const [T]) -> Result<(), Error> {
let count = other.len();
self.try_reserve(count)?;
let len = self.len();
unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
self.len += count;
Ok(())
}
pub unsafe fn raw_iter(&self) -> RawIter<T> {
RawIter::new(self)
}
pub unsafe fn raw_iter_mut(&mut self) -> RawIterMut<T> {
RawIterMut::new(self)
}
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
where
R: RangeBounds<usize>,
{
let len = self.len();
let Range { start, end } = slice_range(range, ..len);
unsafe {
self.set_len(start);
let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::from(self),
}
}
}
#[inline]
pub fn clear(&mut self) {
let elems: *mut [T] = self.as_mut_slice();
unsafe {
self.len = 0;
ptr::drop_in_place(elems);
}
}
#[inline]
pub const fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
#[must_use = "use `.truncate()` if you don't need the other half"]
pub fn try_split_off(&mut self, at: usize) -> Result<Self, Error>
where
A: Clone,
{
#[cold]
#[inline(never)]
fn assert_failed(at: usize, len: usize) -> ! {
panic!("`at` split index (is {at}) should be <= len (is {len})");
}
if at > self.len() {
assert_failed(at, self.len());
}
if at == 0 {
let new = Vec::try_with_capacity_in(self.capacity(), self.allocator().clone())?;
return Ok(mem::replace(self, new));
}
let other_len = self.len - at;
let mut other = Vec::try_with_capacity_in(other_len, self.allocator().clone())?;
unsafe {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
}
Ok(other)
}
pub fn try_resize_with<F>(&mut self, new_len: usize, f: F) -> Result<(), Error>
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.try_extend_trusted(iter::repeat_with(f).take(new_len - len))?;
} else {
self.truncate(new_len);
}
Ok(())
}
#[inline]
pub fn leak<'a>(self) -> &'a mut [T]
where
A: 'a,
{
let mut me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) }
}
#[inline]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
unsafe {
slice::from_raw_parts_mut(
self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
self.buf.capacity() - self.len,
)
}
}
#[inline]
pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() };
(init, spare)
}
unsafe fn split_at_spare_mut_with_len(
&mut self,
) -> (&mut [T], &mut [MaybeUninit<T>], &mut usize) {
let ptr = self.as_mut_ptr();
let spare_ptr = unsafe { ptr.add(self.len) };
let spare_ptr = spare_ptr.cast::<MaybeUninit<T>>();
let spare_len = self.buf.capacity() - self.len;
unsafe {
let initialized = slice::from_raw_parts_mut(ptr, self.len);
let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
(initialized, spare, &mut self.len)
}
}
#[inline]
pub(crate) fn try_splice_in_place<R, I>(
&mut self,
range: R,
replace_with: I,
) -> Result<(), Error>
where
R: RangeBounds<usize>,
I: IntoIterator<Item = T>,
{
let mut drain = self.drain(range);
let mut iter = replace_with.into_iter();
self::splice::splice(&mut drain, &mut iter)
}
fn try_extend_trusted(&mut self, iterator: impl iter::Iterator<Item = T>) -> Result<(), Error> {
let (low, high) = iterator.size_hint();
if let Some(additional) = high {
debug_assert_eq!(
low,
additional,
"TrustedLen iterator's size hint is not exact: {:?}",
(low, high)
);
self.try_reserve(additional)?;
unsafe {
let ptr = self.as_mut_ptr();
let mut local_len = SetLenOnDrop::new(&mut self.len);
for element in iterator {
ptr::write(ptr.add(local_len.current_len()), element);
local_len.increment_len(1);
}
}
Ok(())
} else {
Err(Error::CapacityOverflow)
}
}
}
impl<T, A: Allocator> Vec<T, A>
where
T: TryClone,
{
pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), Error> {
let len = self.len();
if new_len > len {
self.try_extend_with(new_len - len, value)?;
} else {
self.truncate(new_len);
}
Ok(())
}
pub fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), Error> {
try_extend_desugared(self, other.iter())
}
pub fn try_extend_from_within<R>(&mut self, src: R) -> Result<(), Error>
where
R: RangeBounds<usize>,
{
let range = slice_range(src, ..self.len());
self.try_reserve(range.len())?;
unsafe {
let (this, spare, len) = self.split_at_spare_mut_with_len();
let to_clone = this.get_unchecked(range);
for (src, dst) in iter::zip(to_clone, spare) {
dst.write(src.try_clone()?);
*len += 1
}
}
Ok(())
}
}
impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
pub fn into_flattened(self) -> Vec<T, A> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
let (new_len, new_cap) = if T::IS_ZST {
(len.checked_mul(N).expect("vec len overflow"), usize::MAX)
} else {
(len.wrapping_mul(N), cap.wrapping_mul(N))
};
unsafe { Vec::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) }
}
}
impl<T, A: Allocator> Vec<T, A>
where
T: TryClone,
{
fn try_extend_with(&mut self, n: usize, value: T) -> Result<(), Error> {
self.try_reserve(n)?;
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let mut local_len = SetLenOnDrop::new(&mut self.len);
for _ in 1..n {
ptr::write(ptr, value.try_clone()?);
ptr = ptr.add(1);
local_len.increment_len(1);
}
if n > 0 {
ptr::write(ptr, value);
local_len.increment_len(1);
}
}
Ok(())
}
}
impl<T, A: Allocator> Vec<T, A>
where
T: PartialEq,
{
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
}
impl<T, A: Allocator> ops::Deref for Vec<T, A> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
}
}
impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
}
}
impl<T, A: Allocator + Clone> TryClone for Vec<T, A>
where
T: TryClone,
{
fn try_clone(&self) -> Result<Self, Error> {
let alloc = self.allocator().clone();
crate::slice::to_vec(self, alloc)
}
}
#[cfg(test)]
impl<T, A: Allocator + Clone> Clone for Vec<T, A>
where
T: TryClone,
{
fn clone(&self) -> Self {
self.try_clone().abort()
}
}
impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<T, A: Allocator> IntoIterator for Vec<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
const fn wrapping_byte_add<T>(this: *mut T, count: usize) -> *mut T {
this.cast::<u8>().wrapping_add(count) as *mut T
}
unsafe {
let mut me = ManuallyDrop::new(self);
let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
let begin = me.as_mut_ptr();
let end = if T::IS_ZST {
wrapping_byte_add(begin, me.len())
} else {
begin.add(me.len()) as *const T
};
let cap = me.buf.capacity();
IntoIter {
buf: NonNull::new_unchecked(begin),
phantom: PhantomData,
cap,
alloc,
ptr: begin,
end,
}
}
}
}
impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
fn try_extend_desugared<'a, T, A: Allocator>(
this: &mut Vec<T, A>,
mut iterator: impl Iterator<Item = &'a T>,
) -> Result<(), Error>
where
T: 'a + TryClone,
{
while let Some(element) = iterator.next() {
let len = this.len();
if len == this.capacity() {
let (lower, _) = iterator.size_hint();
this.try_reserve(lower.saturating_add(1))?;
}
unsafe {
ptr::write(this.as_mut_ptr().add(len), element.try_clone()?);
this.set_len(len + 1);
}
}
Ok(())
}
impl<T, A1, A2> PartialOrd<Vec<T, A2>> for Vec<T, A1>
where
T: PartialOrd,
A1: Allocator,
A2: Allocator,
{
#[inline]
fn partial_cmp(&self, other: &Vec<T, A2>) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
#[cfg(rune_nightly)]
unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec<T, A> {
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
}
}
}
#[cfg(not(rune_nightly))]
impl<T, A: Allocator> Drop for Vec<T, A> {
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
}
}
}
impl<T> Default for Vec<T> {
fn default() -> Vec<T> {
Vec::new()
}
}
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T, A: Allocator> Borrow<[T]> for Vec<T, A> {
#[inline]
fn borrow(&self) -> &[T] {
self
}
}
impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
fn as_ref(&self) -> &Vec<T, A> {
self
}
}
impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
fn as_mut(&mut self) -> &mut Vec<T, A> {
self
}
}
impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
fn as_ref(&self) -> &[T] {
self
}
}
impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T> TryFrom<&[T]> for Vec<T>
where
T: TryClone,
{
type Error = Error;
fn try_from(values: &[T]) -> Result<Self, Error> {
let mut out = Vec::try_with_capacity(values.len())?;
for value in values {
out.try_push(value.try_clone()?)?;
}
Ok(out)
}
}
impl<T, const N: usize> TryFrom<[T; N]> for Vec<T> {
type Error = Error;
fn try_from(arr: [T; N]) -> Result<Self, Error> {
let mut out = Vec::try_with_capacity(arr.len())?;
let arr = ManuallyDrop::new(arr);
if !<T>::IS_ZST {
unsafe {
ptr::copy_nonoverlapping(arr.as_ptr(), out.as_mut_ptr(), N);
}
}
unsafe {
out.set_len(N);
}
Ok(out)
}
}
#[cfg(feature = "alloc")]
impl<T> TryFrom<::rust_alloc::vec::Vec<T>> for Vec<T, Global> {
type Error = Error;
fn try_from(vec: ::rust_alloc::vec::Vec<T>) -> Result<Self, Error> {
let mut vec = ManuallyDrop::new(vec);
let ptr = vec.as_mut_ptr();
let length = vec.len();
let capacity = vec.capacity();
if let Ok(layout) = Layout::array::<T>(capacity) {
Global.take(layout)?;
}
unsafe { Ok(Self::from_raw_parts_in(ptr, length, capacity, Global)) }
}
}
impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
type Error = Vec<T, A>;
fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> {
if vec.len() != N {
return Err(vec);
}
unsafe { vec.set_len(0) };
let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
Ok(array)
}
}
impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> {
fn from(s: Box<[T], A>) -> Self {
crate::slice::into_vec(s)
}
}
impl<T, A: Allocator> TryFromIteratorIn<T, A> for Vec<T, A> {
fn try_from_iter_in<I>(iter: I, alloc: A) -> Result<Self, Error>
where
I: IntoIterator<Item = T>,
{
let mut this = Vec::new_in(alloc);
for value in iter {
this.try_push(value)?;
}
Ok(this)
}
}
#[cfg(test)]
impl<T> FromIterator<T> for Vec<T> {
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = T>,
{
Self::try_from_iter_in(iter, Global).abort()
}
}
impl<T, A: Allocator> TryExtend<T> for Vec<T, A> {
#[inline]
fn try_extend<I: IntoIterator<Item = T>>(&mut self, iter: I) -> Result<(), Error> {
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
}
}
#[cfg(feature = "std")]
fn io_err(error: Error) -> std::io::Error {
std::io::Error::other(error)
}
#[cfg(feature = "std")]
impl std::io::Write for Vec<u8> {
#[inline]
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.try_extend_from_slice(buf).map_err(io_err)?;
Ok(buf.len())
}
#[inline]
fn write_vectored(&mut self, bufs: &[std::io::IoSlice<'_>]) -> std::io::Result<usize> {
let len = bufs.iter().map(|b| b.len()).sum();
self.try_reserve(len).map_err(io_err)?;
for buf in bufs {
self.try_extend_from_slice(buf).map_err(io_err)?;
}
Ok(len)
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
self.try_extend_from_slice(buf).map_err(io_err)?;
Ok(())
}
#[inline]
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}