shared_vector/shared.rs
1use core::fmt::Debug;
2use core::ops::{Deref, DerefMut, Index, IndexMut};
3use core::ptr::NonNull;
4use core::{mem, ptr};
5use core::sync::atomic::Ordering;
6
7use crate::raw;
8use crate::alloc::{AllocError, Allocator, Global};
9use crate::raw::{BufferSize, HeaderBuffer};
10use crate::vector::{Vector, RawVector};
11use crate::{grow_amortized, AtomicRefCount, DefaultRefCount, RefCount};
12
13/// A heap allocated, atomically reference counted, immutable contiguous buffer containing elements of type `T`.
14///
15/// <svg width="280" height="120" viewBox="0 0 74.08 31.75" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="a"><stop offset="0" stop-color="#491c9c"/><stop offset="1" stop-color="#d54b27"/></linearGradient><linearGradient xlink:href="#a" id="b" gradientUnits="userSpaceOnUse" x1="6.27" y1="34.86" x2="87.72" y2="13.24" gradientTransform="translate(-2.64 -18.48)"/></defs><rect width="10.57" height="10.66" x="2.66" y="18.48" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="15.88" y="18.52" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="29.11" y="18.52" ry="1.37" fill="#3dbdaa"/><circle cx="33.87" cy="18.56" r=".79" fill="#666"/><circle cx="7.41" cy="18.56" r=".79" fill="#666"/><circle cx="20.64" cy="18.56" r=".79" fill="#666"/><path d="M7.38 18.54c.03-2.63-3.41-2.66-3.41-5.31" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M20.64 18.56c0-2.91-15.35-1.36-15.35-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M33.87 18.56c0-3.97-27.26-2.68-27.26-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><rect width="68.79" height="10.58" x="2.65" y="2.68" ry="1.37" fill="url(#b)"/><rect width="15.35" height="9.51" x="3.18" y="3.21" ry=".9" fill="#78a2d4"/><rect width="9.26" height="9.51" x="19.85" y="3.2" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="29.64" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="39.43" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="49.22" y="3.21" ry=".9" fill="#eaa577"/><circle cx="62.84" cy="7.97" r=".66" fill="#eaa577"/><circle cx="64.7" cy="7.97" r=".66" fill="#eaa577"/><circle cx="66.55" cy="7.97" r=".66" fill="#eaa577"/></svg>
16///
17/// See [RefCountedVector].
18pub type AtomicSharedVector<T, A = Global> = RefCountedVector<T, AtomicRefCount, A>;
19
20/// A heap allocated, reference counted, immutable contiguous buffer containing elements of type `T`.
21///
22/// <svg width="280" height="120" viewBox="0 0 74.08 31.75" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="a"><stop offset="0" stop-color="#491c9c"/><stop offset="1" stop-color="#d54b27"/></linearGradient><linearGradient xlink:href="#a" id="b" gradientUnits="userSpaceOnUse" x1="6.27" y1="34.86" x2="87.72" y2="13.24" gradientTransform="translate(-2.64 -18.48)"/></defs><rect width="10.57" height="10.66" x="2.66" y="18.48" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="15.88" y="18.52" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="29.11" y="18.52" ry="1.37" fill="#3dbdaa"/><circle cx="33.87" cy="18.56" r=".79" fill="#666"/><circle cx="7.41" cy="18.56" r=".79" fill="#666"/><circle cx="20.64" cy="18.56" r=".79" fill="#666"/><path d="M7.38 18.54c.03-2.63-3.41-2.66-3.41-5.31" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M20.64 18.56c0-2.91-15.35-1.36-15.35-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M33.87 18.56c0-3.97-27.26-2.68-27.26-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><rect width="68.79" height="10.58" x="2.65" y="2.68" ry="1.37" fill="url(#b)"/><rect width="15.35" height="9.51" x="3.18" y="3.21" ry=".9" fill="#78a2d4"/><rect width="9.26" height="9.51" x="19.85" y="3.2" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="29.64" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="39.43" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="49.22" y="3.21" ry=".9" fill="#eaa577"/><circle cx="62.84" cy="7.97" r=".66" fill="#eaa577"/><circle cx="64.7" cy="7.97" r=".66" fill="#eaa577"/><circle cx="66.55" cy="7.97" r=".66" fill="#eaa577"/></svg>
23///
24/// See [RefCountedVector].
25pub type SharedVector<T, A = Global> = RefCountedVector<T, DefaultRefCount, A>;
26
27/// A heap allocated, reference counted, immutable contiguous buffer containing elements of type `T`.
28///
29/// <svg width="280" height="120" viewBox="0 0 74.08 31.75" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="a"><stop offset="0" stop-color="#491c9c"/><stop offset="1" stop-color="#d54b27"/></linearGradient><linearGradient xlink:href="#a" id="b" gradientUnits="userSpaceOnUse" x1="6.27" y1="34.86" x2="87.72" y2="13.24" gradientTransform="translate(-2.64 -18.48)"/></defs><rect width="10.57" height="10.66" x="2.66" y="18.48" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="15.88" y="18.52" ry="1.37" fill="#3dbdaa"/><rect width="10.57" height="10.66" x="29.11" y="18.52" ry="1.37" fill="#3dbdaa"/><circle cx="33.87" cy="18.56" r=".79" fill="#666"/><circle cx="7.41" cy="18.56" r=".79" fill="#666"/><circle cx="20.64" cy="18.56" r=".79" fill="#666"/><path d="M7.38 18.54c.03-2.63-3.41-2.66-3.41-5.31" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M20.64 18.56c0-2.91-15.35-1.36-15.35-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><path d="M33.87 18.56c0-3.97-27.26-2.68-27.26-5.33" fill="none" stroke="#999" stroke-width=".86" stroke-linecap="round"/><rect width="68.79" height="10.58" x="2.65" y="2.68" ry="1.37" fill="url(#b)"/><rect width="15.35" height="9.51" x="3.18" y="3.21" ry=".9" fill="#78a2d4"/><rect width="9.26" height="9.51" x="19.85" y="3.2" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="29.64" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="39.43" y="3.22" ry=".9" fill="#eaa577"/><rect width="9.26" height="9.51" x="49.22" y="3.21" ry=".9" fill="#eaa577"/><circle cx="62.84" cy="7.97" r=".66" fill="#eaa577"/><circle cx="64.7" cy="7.97" r=".66" fill="#eaa577"/><circle cx="66.55" cy="7.97" r=".66" fill="#eaa577"/></svg>
30///
31/// Similar in principle to `Arc<[T]>`. It can be converted into a `Vector<T>` for
32/// free if there is only a single reference to the RefCountedVector alive.
33///
34/// # Copy-on-write "Immutable" vectors
35///
36/// This type contains mutable methods like `push` and `pop`. These internally allocate a new buffer
37/// if the buffer is not unique (there are more than one reference to it). When there is a single reference,
38/// these mutable operation simply update the existing buffer.
39///
40/// In other words, this type behaves like an [immutable (or persistent) data structure](https://en.wikipedia.org/wiki/Persistent_data_structure)
41/// Actual mutability only happens under the hood as an optimization when a single reference exists.
42#[repr(transparent)]
43pub struct RefCountedVector<T, R: RefCount, A: Allocator = Global> {
44 pub(crate) inner: HeaderBuffer<T, R, A>,
45}
46
47impl<T, R: RefCount> RefCountedVector<T, R, Global> {
48 /// Creates an empty shared buffer without allocating memory.
49 #[inline]
50 pub fn new() -> RefCountedVector<T, R, Global> {
51 Self::try_with_capacity_in(0, Global).unwrap()
52 }
53
54 /// Constructs a new, empty vector with at least the specified capacity.
55 #[inline]
56 pub fn with_capacity(cap: usize) -> RefCountedVector<T, R, Global> {
57 Self::try_with_capacity_in(cap, Global).unwrap()
58 }
59
60 /// Clones the contents of a slice into a new vector.
61 #[inline]
62 pub fn from_slice(slice: &[T]) -> RefCountedVector<T, R, Global>
63 where
64 T: Clone,
65 {
66 Self::try_from_slice_in(slice, Global).unwrap()
67 }
68}
69
70impl<T, R: RefCount, A: Allocator> RefCountedVector<T, R, A> {
71 /// Creates an empty vector without allocating memory.
72 pub fn new_in(allocator: A) -> Self {
73 Self::try_with_capacity_in(0, allocator).unwrap()
74 }
75
76 /// Creates an empty pre-allocated vector with a given storage capacity.
77 pub fn with_capacity_in(cap: usize, allocator: A) -> Self {
78 Self::try_with_capacity_in(cap, allocator).unwrap()
79 }
80
81 /// Tries to construct a new, empty vector with at least the specified capacity.
82 #[inline]
83 pub fn try_with_capacity_in(cap: usize, allocator: A) -> Result<Self, AllocError> {
84 raw::assert_ref_count_layout::<R>();
85 unsafe {
86 let (ptr, cap) = raw::allocate_header_buffer::<T, A>(cap, &allocator)?;
87
88 ptr::write(
89 ptr.cast().as_ptr(),
90 raw::Header {
91 vec: raw::VecHeader {
92 cap: cap as BufferSize,
93 len: 0,
94 },
95 ref_count: R::new(1),
96 allocator,
97 },
98 );
99
100 Ok(RefCountedVector {
101 inner: HeaderBuffer::from_raw(ptr.cast()),
102 })
103 }
104 }
105
106 pub fn try_from_slice_in(slice: &[T], allocator: A) -> Result<Self, AllocError> where T: Clone {
107 let mut v = Self::try_with_capacity_in(slice.len(), allocator)?;
108
109 unsafe {
110 raw::extend_from_slice_assuming_capacity(v.data_ptr(), v.vec_header_mut(), slice);
111 }
112
113 Ok(v)
114 }
115
116 /// Returns `true` if the vector contains no elements.
117 #[inline]
118 pub fn is_empty(&self) -> bool {
119 self.vec_header().len == 0
120 }
121
122 /// Returns the number of elements in the vector, also referred to as its ‘length’.
123 #[inline]
124 pub fn len(&self) -> usize {
125 self.vec_header().len as usize
126 }
127
128 /// Returns the total number of elements the vector can hold without reallocating.
129 #[inline]
130 pub fn capacity(&self) -> usize {
131 self.vec_header().cap as usize
132 }
133
134 /// Returns number of elements that can be added without reallocating.
135 #[inline]
136 pub fn remaining_capacity(&self) -> usize {
137 let h = self.vec_header();
138 (h.cap - h.len) as usize
139 }
140
141 /// Returns a reference to the underlying allocator.
142 pub fn allocator(&self) -> &A {
143 self.inner.allocator()
144 }
145
146 /// Creates a new reference without allocating.
147 ///
148 /// Equivalent to `Clone::clone`.
149 #[inline]
150 pub fn new_ref(&self) -> Self {
151 unsafe {
152 self.inner.as_ref().ref_count.add_ref();
153 RefCountedVector {
154 inner: HeaderBuffer::from_raw(self.inner.header)
155 }
156 }
157 }
158
159 /// Extracts a slice containing the entire vector.
160 #[inline]
161 pub fn as_slice(&self) -> &[T] {
162 unsafe {
163 core::slice::from_raw_parts(self.data_ptr(), self.len())
164 }
165 }
166
167 /// Returns true if this is the only existing handle to the buffer.
168 ///
169 /// When this function returns true, mutable methods and converting to a `Vector`
170 /// is very fast (does not involve additional memory allocations or copies).
171 #[inline]
172 pub fn is_unique(&self) -> bool {
173 unsafe { self.inner.as_ref().ref_count.get() == 1 }
174 }
175
176 /// Clears the vector, removing all values.
177 pub fn clear(&mut self)
178 where
179 A: Clone,
180 {
181 if self.is_unique() {
182 unsafe {
183 raw::clear(self.data_ptr(), self.vec_header_mut());
184 }
185 return;
186 }
187
188 *self =
189 Self::try_with_capacity_in(self.capacity(), self.inner.allocator().clone()).unwrap();
190 }
191
192 /// Returns true if the two vectors share the same underlying storage.
193 pub fn ptr_eq(&self, other: &Self) -> bool {
194 self.inner.header == other.inner.header
195 }
196
197 /// Allocates a duplicate of this buffer (infallible).
198 pub fn copy_buffer(&self) -> Self
199 where
200 T: Copy,
201 A: Clone,
202 {
203 self.try_copy_buffer().unwrap()
204 }
205
206 /// Tries to allocate a duplicate of this buffer.
207 pub fn try_copy_buffer(&self) -> Result<Self, AllocError>
208 where
209 T: Copy,
210 A: Clone,
211 {
212 unsafe {
213 let header = self.inner.as_ref();
214 let len = header.vec.len;
215 let cap = header.vec.cap;
216
217 if len > cap {
218 return Err(AllocError);
219 }
220
221 let allocator = header.allocator.clone();
222 let mut clone = Self::try_with_capacity_in(cap as usize, allocator)?;
223
224 if len > 0 {
225 core::ptr::copy_nonoverlapping(self.data_ptr(), clone.data_ptr(), len as usize);
226 clone.vec_header_mut().len = len;
227 }
228
229 Ok(clone)
230 }
231 }
232
233 #[inline]
234 pub fn data_ptr(&self) -> *mut T {
235 unsafe { (self.inner.as_ptr() as *mut u8).add(raw::header_size::<raw::Header<R, A>, T>()) as *mut T }
236 }
237
238 // SAFETY: call this only if the vector is unique.
239 pub(crate) unsafe fn vec_header_mut(&mut self) -> &mut raw::VecHeader {
240 &mut self.inner.as_mut().vec
241 }
242
243 pub(crate) fn vec_header(&self) -> &raw::VecHeader {
244 unsafe { &self.inner.as_ref().vec }
245 }
246}
247
248/// Mutable methods that can cause the vector to be cloned and therefore require both the items and
249/// the allocator to be cloneable.
250impl<T: Clone, R: RefCount, A: Allocator + Clone> RefCountedVector<T, R, A> {
251 /// Converts this RefCountedVector into an immutable one, allocating a new copy if there are other references.
252 #[inline]
253 pub fn into_unique(mut self) -> Vector<T, A> {
254 self.ensure_unique();
255
256 unsafe {
257 let data = NonNull::new_unchecked(self.data_ptr());
258 let header = self.vec_header().clone();
259 let allocator = self.inner.as_ref().allocator.clone();
260
261 mem::forget(self);
262
263 Vector {
264 raw: RawVector {
265 data,
266 header,
267 },
268 allocator,
269 }
270 }
271 }
272
273 /// Appends an element to the back of a collection.
274 ///
275 /// # Panics
276 ///
277 /// Panics if the new capacity exceeds `u32::MAX` bytes.
278 pub fn push(&mut self, val: T) {
279 self.reserve(1);
280 unsafe {
281 raw::push_assuming_capacity(self.data_ptr(), &mut self.vec_header_mut(), val);
282 }
283 }
284
285 /// Removes the last element from the vector and returns it, or `None` if it is empty.
286 pub fn pop(&mut self) -> Option<T> {
287 self.ensure_unique();
288
289 unsafe {
290 raw::pop(self.data_ptr(), &mut self.vec_header_mut())
291 }
292 }
293
294 /// Removes an element from the vector and returns it.
295 ///
296 /// The removed element is replaced by the last element of the vector.
297 ///
298 /// # Panics
299 ///
300 /// Panics if index is out of bounds.
301 #[inline]
302 pub fn swap_remove(&mut self, idx: usize) -> T {
303 self.ensure_unique();
304
305 let len = self.len();
306 assert!(idx < len);
307
308 unsafe {
309 let data_ptr = self.data_ptr();
310 let ptr = data_ptr.add(idx);
311 let item = ptr::read(ptr);
312
313 let last_idx = len - 1;
314 if idx != last_idx {
315 let last_ptr = data_ptr.add(last_idx);
316 ptr::write(ptr, ptr::read(last_ptr));
317 }
318
319 self.vec_header_mut().len = last_idx as BufferSize;
320
321 item
322 }
323 }
324
325 /// Appends an element if there is sufficient spare capacity, otherwise an error is returned
326 /// with the element.
327 ///
328 /// Like other mutable operations, this method may reallocate if the vector is not unique.
329 /// However it will not reallocate when there’s insufficient capacity.
330 /// The caller should use reserve or try_reserve to ensure that there is enough capacity.
331 pub fn push_within_capacity(&mut self, val: T) -> Result<(), T> {
332 if self.remaining_capacity() == 0 {
333 return Err(val);
334 }
335
336 self.ensure_unique();
337 unsafe {
338 raw::push_assuming_capacity(self.data_ptr(), &mut self.vec_header_mut(), val);
339 }
340
341 Ok(())
342 }
343
344 /// Clones and appends the contents of the slice to the back of a collection.
345 pub fn extend_from_slice(&mut self, slice: &[T]) {
346 self.reserve(slice.len());
347 unsafe {
348 raw::extend_from_slice_assuming_capacity(self.data_ptr(), self.vec_header_mut(), slice);
349 }
350 }
351
352 /// Appends the contents of an iterator to the back of a collection.
353 pub fn extend(&mut self, data: impl IntoIterator<Item = T>) {
354 let mut iter = data.into_iter();
355
356 let (min, max) = iter.size_hint();
357 self.reserve(max.unwrap_or(min));
358
359 unsafe {
360 if raw::extend_within_capacity(self.data_ptr(), self.vec_header_mut(), &mut iter) {
361 return;
362 }
363 }
364
365 for item in iter {
366 self.push(item);
367 }
368 }
369
370 /// Ensures this shared vector uniquely owns its storage, allocating a new copy
371 /// If there are other references to it.
372 ///
373 /// In principle this is mostly useful internally to provide safe mutable methods
374 /// as it does not observaly affect most of the shared vector behavior, however
375 /// it has a few niche use cases, for example to provoke copies earlier for more
376 /// predictable performance or in some unsafe endeavors.
377 #[inline]
378 pub fn ensure_unique(&mut self) {
379 if !self.is_unique() {
380 *self = self.try_clone_buffer(None).unwrap();
381 }
382 }
383
384 /// Extracts a mutable slice containing the entire vector.
385 ///
386 /// Like other mutable methods, this will clone the vector's storage
387 /// if it is not unique to ensure safe mutations.
388 #[inline]
389 pub fn as_mut_slice(&mut self) -> &mut [T]
390 where
391 T: Clone,
392 A: Clone,
393 {
394 self.ensure_unique();
395 unsafe {
396 core::slice::from_raw_parts_mut(self.data_ptr(), self.len())
397 }
398 }
399
400 /// Allocates a duplicate of this buffer (infallible).
401 pub fn clone_buffer(&self) -> Self
402 where
403 T: Clone,
404 A: Clone,
405 {
406 self.try_clone_buffer(None).unwrap()
407 }
408
409 fn try_clone_buffer(&self, new_cap: Option<BufferSize>) -> Result<Self, AllocError>
410 where
411 T: Clone,
412 A: Clone,
413 {
414 unsafe {
415 let header = self.inner.as_ref();
416 let len = header.vec.len;
417 let cap = if let Some(cap) = new_cap {
418 cap
419 } else {
420 header.vec.cap
421 };
422 let allocator = header.allocator.clone();
423
424 if len > cap {
425 return Err(AllocError);
426 }
427
428 let mut clone = Self::try_with_capacity_in(cap as usize, allocator)?;
429
430 raw::extend_from_slice_assuming_capacity(
431 clone.data_ptr(),
432 clone.vec_header_mut(),
433 self.as_slice()
434 );
435
436 Ok(clone)
437 }
438 }
439
440 /// Ensures the vector can be safely mutated and has enough extra capacity to
441 /// add `additional` more items.
442 ///
443 /// This will allocate new storage for the vector if the vector is not unique or if
444 /// the capacity is not sufficient to accomodate `self.len() + additional` items.
445 /// The vector may reserve more space to speculatively avoid frequent reallocations.
446 #[inline]
447 pub fn reserve(&mut self, additional: usize) {
448 let is_unique = self.is_unique();
449 let enough_capacity = self.remaining_capacity() >= additional;
450
451 if !is_unique || !enough_capacity {
452 // Hopefully the least common case.
453 self.try_realloc_additional(is_unique, enough_capacity, additional)
454 .unwrap();
455 }
456 }
457
458 /// Tries to reserve at least `additional` extra elements to be inserted in the given vector.
459 ///
460 /// The vector may reserve more space to speculatively avoid frequent reallocations.
461 /// After calling try_reserve, capacity will be greater than or equal to `self.len() + additional`
462 /// if it returns `Ok(())`.
463 /// Does nothing if capacity is already sufficient. This method preserves the contents even if an
464 /// error occurs.
465 pub fn try_reserve(&mut self, additional: usize) -> Result<(), AllocError> {
466 let is_unique = self.is_unique();
467 let enough_capacity = self.remaining_capacity() >= additional;
468
469 if !is_unique || !enough_capacity {
470 // Hopefully the least common case.
471 self.try_realloc_additional(is_unique, enough_capacity, additional)?;
472 }
473
474 Ok(())
475 }
476
477 /// Reserves the minimum capacity for at least `additional` elements to be inserted in the given vector.
478 ///
479 /// Unlike `reserve`, this will not deliberately over-allocate to speculatively avoid frequent allocations.
480 /// After calling `try_reserve_exact`, capacity will be greater than or equal to `self.len() + additional` if
481 /// it returns `Ok(())`.
482 /// This will also allocate if the vector is not unique.
483 /// Does nothing if the capacity is already sufficient and the vector is unique.
484 ///
485 /// Note that the allocator may give the collection more space than it requests. Therefore, capacity can not
486 /// be relied upon to be precisely minimal. Prefer `try_reserve` if future insertions are expected.
487 pub fn reserve_exact(&mut self, additional: usize) {
488 self.try_reserve_exact(additional).unwrap();
489 }
490
491 /// Tries to reserve the minimum capacity for at least `additional` elements to be inserted in the given vector.
492 ///
493 /// Unlike `try_reserve`, this will not deliberately over-allocate to speculatively avoid frequent allocations.
494 /// After calling `reserve_exact`, capacity will be greater than or equal to `self.len() + additional`.
495 /// This will also allocate if the vector is not unique.
496 /// Does nothing if the capacity is already sufficient and the vector is unique.
497 ///
498 /// Note that the allocator may give the collection more space than it requests. Therefore, capacity can not
499 /// be relied upon to be precisely minimal. Prefer `try_reserve` if future insertions are expected.
500 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), AllocError> {
501 let is_unique = self.is_unique();
502 let enough_capacity = self.remaining_capacity() >= additional;
503
504 if !is_unique || !enough_capacity {
505 // Hopefully the least common case.
506 self.try_realloc_with_capacity(is_unique, additional)?;
507 }
508
509 Ok(())
510 }
511
512 /// Shrinks the capacity of the vector with a lower bound.
513 ///
514 /// The capacity will remain at least as large as both the length and the supplied value.
515 /// If the current capacity is less than the lower limit, this is a no-op.
516 pub fn shrink_to(&mut self, min_capacity: usize) {
517 let min_capacity = min_capacity.max(self.len());
518 if self.capacity() <= min_capacity {
519 return;
520 }
521
522 let is_unique = self.is_unique();
523 self.try_realloc_with_capacity(is_unique, min_capacity)
524 .unwrap();
525 }
526
527 /// Shrinks the capacity of the vector as much as possible.
528 pub fn shrink_to_fit(&mut self) {
529 self.shrink_to(self.len())
530 }
531
532 /// Moves all the elements of `other` into `self`, leaving `other` empty.
533 ///
534 /// If `other is not unique, the elements are cloned instead of moved.
535 pub fn append(&mut self, other: &mut Self) {
536 self.reserve(other.len());
537
538 unsafe {
539 if other.is_unique() {
540 // Fast path: memcpy
541 raw::move_data(
542 other.data_ptr(), &mut other.inner.header.as_mut().vec,
543 self.data_ptr(), &mut self.inner.as_mut().vec,
544 )
545 } else {
546 // Slow path, clone each item.
547 raw::extend_from_slice_assuming_capacity(self.data_ptr(), self.vec_header_mut(), other.as_slice());
548
549 *other =
550 Self::try_with_capacity_in(other.capacity(), self.inner.allocator().clone())
551 .unwrap();
552 }
553 }
554 }
555
556 #[cold]
557 fn try_realloc_additional(
558 &mut self,
559 is_unique: bool,
560 enough_capacity: bool,
561 additional: usize,
562 ) -> Result<(), AllocError> {
563 let new_cap = if enough_capacity {
564 self.capacity()
565 } else {
566 grow_amortized(self.len(), additional)
567 };
568
569 self.try_realloc_with_capacity(is_unique, new_cap)
570 }
571
572 #[cold]
573 fn try_realloc_with_capacity(
574 &mut self,
575 is_unique: bool,
576 new_cap: usize,
577 ) -> Result<(), AllocError> {
578 let allocator = self.inner.allocator().clone();
579 if is_unique && self.capacity() > 0 {
580 // The buffer is not large enough, we'll have to create a new one, however we
581 // know that we have the only reference to it so we'll move the data with
582 // a simple memcpy instead of cloning it.
583
584 unsafe {
585 use crate::raw::{buffer_layout, Header};
586 let old_cap = self.capacity();
587 let old_header = self.inner.header;
588 let old_layout = buffer_layout::<Header<R, A>, T>(old_cap).unwrap();
589 let new_layout = buffer_layout::<Header<R, A>, T>(new_cap).unwrap();
590
591 let new_alloc = if new_layout.size() >= old_layout.size() {
592 allocator.grow(old_header.cast(), old_layout, new_layout)
593 } else {
594 allocator.shrink(old_header.cast(), old_layout, new_layout)
595 }?;
596
597 self.inner.header = new_alloc.cast();
598 self.inner.as_mut().vec.cap = new_cap as BufferSize;
599
600 return Ok(());
601 }
602 }
603
604 // The slowest path, we pay for both the new allocation and the need to clone
605 // each item one by one.
606 let mut new_vec = Self::try_with_capacity_in(new_cap, allocator)?;
607 new_vec.extend_from_slice(self.as_slice());
608
609 mem::swap(self, &mut new_vec);
610
611 Ok(())
612 }
613
614
615 // TODO: remove this one?
616 /// Returns the concatenation of two vectors.
617 pub fn concatenate(mut self, mut other: Self) -> Self
618 where
619 T: Clone,
620 A: Clone,
621 {
622 self.append(&mut other);
623
624 self
625 }
626}
627
628impl<T, R: RefCount, A: Allocator> Drop for RefCountedVector<T, R, A> {
629 fn drop(&mut self) {
630 unsafe {
631 if self.inner.as_ref().ref_count.release_ref() {
632 let header = self.vec_header().clone();
633 // See the implementation of std Arc for the need to use this fence. Note that
634 // we only need it for the atomic reference counted version but I don't expect
635 // this to make a measurable difference.
636 core::sync::atomic::fence(Ordering::Acquire);
637
638 raw::drop_items(self.data_ptr(), header.len);
639 raw::dealloc::<T, R, A>(self.inner.header, header.cap);
640 }
641 }
642 }
643}
644
645
646unsafe impl<T: Sync, A: Allocator + Send> Send for AtomicSharedVector<T, A> {}
647
648unsafe impl<T: Send + Sync, A: Allocator + Sync> Sync for AtomicSharedVector<T, A> {}
649
650impl<T, R: RefCount, A: Allocator> Clone for RefCountedVector<T, R, A> {
651 fn clone(&self) -> Self {
652 self.new_ref()
653 }
654}
655
656impl<T: PartialEq<T>, R: RefCount, A: Allocator> PartialEq<RefCountedVector<T, R, A>>
657 for RefCountedVector<T, R, A>
658{
659 fn eq(&self, other: &Self) -> bool {
660 self.ptr_eq(other) || self.as_slice() == other.as_slice()
661 }
662}
663
664impl<T: PartialEq<T>, R: RefCount, A: Allocator> PartialEq<&[T]> for RefCountedVector<T, R, A> {
665 fn eq(&self, other: &&[T]) -> bool {
666 self.as_slice() == *other
667 }
668}
669
670impl<T, R: RefCount, A: Allocator> AsRef<[T]> for RefCountedVector<T, R, A> {
671 fn as_ref(&self) -> &[T] {
672 self.as_slice()
673 }
674}
675
676impl<T, R: RefCount> Default for RefCountedVector<T, R, Global> {
677 fn default() -> Self {
678 Self::new()
679 }
680}
681
682impl<'a, T, R: RefCount, A: Allocator> IntoIterator for &'a RefCountedVector<T, R, A> {
683 type Item = &'a T;
684 type IntoIter = core::slice::Iter<'a, T>;
685 fn into_iter(self) -> core::slice::Iter<'a, T> {
686 self.as_slice().iter()
687 }
688}
689
690impl<'a, T: Clone, R: RefCount, A: Allocator + Clone> IntoIterator
691 for &'a mut RefCountedVector<T, R, A>
692{
693 type Item = &'a mut T;
694 type IntoIter = core::slice::IterMut<'a, T>;
695 fn into_iter(self) -> core::slice::IterMut<'a, T> {
696 self.as_mut_slice().iter_mut()
697 }
698}
699
700impl<T, R, A, I> Index<I> for RefCountedVector<T, R, A>
701where
702 R: RefCount,
703 A: Allocator,
704 I: core::slice::SliceIndex<[T]>,
705{
706 type Output = <I as core::slice::SliceIndex<[T]>>::Output;
707 fn index(&self, index: I) -> &Self::Output {
708 self.as_slice().index(index)
709 }
710}
711
712impl<T, R, A, I> IndexMut<I> for RefCountedVector<T, R, A>
713where
714 T: Clone,
715 R: RefCount,
716 A: Allocator + Clone,
717 I: core::slice::SliceIndex<[T]>,
718{
719 fn index_mut(&mut self, index: I) -> &mut Self::Output {
720 self.as_mut_slice().index_mut(index)
721 }
722}
723
724impl<T, R: RefCount, A: Allocator> Deref for RefCountedVector<T, R, A> {
725 type Target = [T];
726 fn deref(&self) -> &[T] {
727 self.as_slice()
728 }
729}
730
731impl<T: Clone, R: RefCount, A: Allocator + Clone> DerefMut for RefCountedVector<T, R, A> {
732 fn deref_mut(&mut self) -> &mut [T] {
733 self.as_mut_slice()
734 }
735}
736
737impl<T: Debug, R: RefCount, A: Allocator> Debug for RefCountedVector<T, R, A> {
738 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
739 self.as_slice().fmt(f)
740 }
741}
742
743impl<T: Clone, A: Allocator + Clone> From<Vector<T, A>> for SharedVector<T, A> {
744 fn from(vector: Vector<T, A>) -> Self {
745 vector.into_shared()
746 }
747}
748
749impl<T: Clone, A: Allocator + Clone> From<Vector<T, A>> for AtomicSharedVector<T, A> {
750 fn from(vector: Vector<T, A>) -> Self {
751 vector.into_shared_atomic()
752 }
753}
754
755// In order to give us a chance to catch leaks and double-frees, test with values that implement drop.
756#[cfg(test)]
757fn num(val: u32) -> Box<u32> {
758 Box::new(val)
759}
760
761#[test]
762fn basic_shared() {
763 basic_shared_impl::<DefaultRefCount>();
764 basic_shared_impl::<AtomicRefCount>();
765
766 fn basic_shared_impl<R: RefCount>() {
767 let mut a: RefCountedVector<Box<u32>, R> = RefCountedVector::with_capacity(64);
768 a.push(num(1));
769 a.push(num(2));
770
771 let mut b = a.new_ref();
772 b.push(num(4));
773
774 a.push(num(3));
775
776 assert_eq!(a.as_slice(), &[num(1), num(2), num(3)]);
777 assert_eq!(b.as_slice(), &[num(1), num(2), num(4)]);
778
779 let popped = a.pop();
780 assert_eq!(a.as_slice(), &[num(1), num(2)]);
781 assert_eq!(popped, Some(num(3)));
782
783 let mut b2 = b.new_ref();
784 let popped = b2.pop();
785 assert_eq!(b2.as_slice(), &[num(1), num(2)]);
786 assert_eq!(popped, Some(num(4)));
787
788 println!("concatenate");
789 let c = a.concatenate(b2);
790 assert_eq!(c.as_slice(), &[num(1), num(2), num(1), num(2)]);
791 }
792}
793
794#[test]
795fn empty_buffer() {
796 let _: AtomicSharedVector<u32> = AtomicSharedVector::new();
797 let _: AtomicSharedVector<u32> = AtomicSharedVector::new();
798
799 let _: SharedVector<()> = SharedVector::new();
800 let _: SharedVector<()> = SharedVector::new();
801
802 let _: AtomicSharedVector<()> = AtomicSharedVector::new();
803 let _: AtomicSharedVector<()> = AtomicSharedVector::new();
804
805 let _: Vector<()> = Vector::new();
806}
807
808#[test]
809#[rustfmt::skip]
810fn grow() {
811 let mut a = Vector::with_capacity(0);
812
813 a.push(num(1));
814 a.push(num(2));
815 a.push(num(3));
816
817 a.extend_from_slice(&[num(4), num(5), num(6), num(7), num(8), num(9), num(10), num(12), num(12), num(13), num(14), num(15), num(16), num(17), num(18)]);
818
819 assert_eq!(
820 a.as_slice(),
821 &[num(1), num(2), num(3), num(4), num(5), num(6), num(7), num(8), num(9), num(10), num(12), num(12), num(13), num(14), num(15), num(16), num(17), num(18)]
822 );
823
824 let mut b = SharedVector::new();
825 b.push(num(1));
826 b.push(num(2));
827 b.push(num(3));
828
829 assert_eq!(b.as_slice(), &[num(1), num(2), num(3)]);
830
831 let mut b = AtomicSharedVector::new();
832 b.push(num(1));
833 b.push(num(2));
834 b.push(num(3));
835
836 assert_eq!(b.as_slice(), &[num(1), num(2), num(3)]);
837}
838
839#[test]
840fn ensure_unique_empty() {
841 let mut v: SharedVector<u32> = SharedVector::new();
842 v.ensure_unique();
843}
844
845
846#[test]
847fn shrink_to_zero() {
848 let mut v: SharedVector<u32> = SharedVector::new();
849 v.shrink_to(0);
850}