aligned_buffer/
unique.rs

1use crate::{
2	alloc::{BufferAllocator, Global},
3	cap::Cap,
4	raw::{RawAlignedBuffer, RawBufferError},
5	SharedAlignedBuffer, DEFAULT_BUFFER_ALIGNMENT,
6};
7use core::fmt;
8use std::{
9	cmp, ops,
10	ptr::{self, NonNull},
11	slice::SliceIndex,
12};
13
14#[derive(Debug, thiserror::Error)]
15#[error("failed to reserve capacity")]
16pub struct TryReserveError {
17	#[source]
18	source: Box<RawBufferError>,
19}
20
21impl From<RawBufferError> for TryReserveError {
22	#[inline]
23	fn from(source: RawBufferError) -> Self {
24		Self {
25			source: Box::new(source),
26		}
27	}
28}
29
30/// A unique (owned) aligned buffer. This can be used to write data to the buffer,
31/// before converting it to a [`SharedAlignedBuffer`] to get cheap clones and sharing
32/// of the buffer data. This type is effectively a `Vec<u8>` with a custom alignment.
33///
34/// [`SharedAlignedBuffer`]: crate::SharedAlignedBuffer
35pub struct UniqueAlignedBuffer<const ALIGNMENT: usize = DEFAULT_BUFFER_ALIGNMENT, A = Global>
36where
37	A: BufferAllocator<ALIGNMENT>,
38{
39	pub(crate) buf: RawAlignedBuffer<ALIGNMENT, A>,
40	pub(crate) len: usize,
41}
42
43impl<const ALIGNMENT: usize> UniqueAlignedBuffer<ALIGNMENT> {
44	/// Constructs a new, empty `UniqueAlignedBuffer`.
45	///
46	/// The buffer will not allocate until elements are pushed onto it.
47	///
48	/// # Examples
49	///
50	/// ```
51	/// # #![allow(unused_mut)]
52	/// # use aligned_buffer::UniqueAlignedBuffer;
53	/// let mut buf = UniqueAlignedBuffer::<32>::new();
54	/// ```
55	#[inline]
56	#[must_use]
57	pub const fn new() -> Self {
58		Self::new_in(Global)
59	}
60
61	/// Constructs a new, empty `UniqueAlignedBuffer` with at least the specified capacity.
62	///
63	/// The buffer will be able to hold at least `capacity` elements without
64	/// reallocating. This method is allowed to allocate for more elements than
65	/// `capacity`. If `capacity` is 0, the vector will not allocate.
66	///
67	/// It is important to note that although the returned vector has the
68	/// minimum *capacity* specified, the vector will have a zero *length*. For
69	/// an explanation of the difference between length and capacity, see
70	/// *[Capacity and reallocation]*.
71	///
72	/// If it is important to know the exact allocated capacity of a `UniqueAlignedBuffer`,
73	/// always use the [`capacity`] method after construction.
74	///
75	/// [Capacity and reallocation]: #capacity-and-reallocation
76	/// [`capacity`]: UniqueAlignedBuffer::capacity
77	///
78	/// # Panics
79	///
80	/// Panics if the new capacity is too large.
81	///
82	/// # Examples
83	///
84	/// ```
85	/// # use aligned_buffer::UniqueAlignedBuffer;
86	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
87	///
88	/// // The vector contains no items, even though it has capacity for more
89	/// assert_eq!(buf.len(), 0);
90	/// assert!(buf.capacity() >= 10);
91	///
92	/// // These are all done without reallocating...
93	/// for i in 0..10 {
94	///     buf.push(i);
95	/// }
96	/// assert_eq!(buf.len(), 10);
97	/// assert!(buf.capacity() >= 10);
98	///
99	/// // ...but this may make the vector reallocate
100	/// buf.push(11);
101	/// assert_eq!(buf.len(), 11);
102	/// assert!(buf.capacity() >= 11);
103	/// ```
104	#[inline]
105	#[must_use]
106	pub fn with_capacity(capacity: usize) -> Self {
107		Self::with_capacity_in(capacity, Global)
108	}
109
110	/// Decomposes a `UniqueAlignedBuffer` into its raw components.
111	///
112	/// Returns the raw pointer to the underlying data, the length of
113	/// the buffer, and the allocated capacity of the buffer.
114	/// These are the same arguments in the same
115	/// order as the arguments to [`from_raw_parts`].
116	///
117	/// After calling this function, the caller is responsible for the
118	/// memory previously managed by the `UniqueAlignedBuffer`. The only
119	/// ways to do this are to convert the raw pointer, length, and capacity
120	/// back into a `UniqueAlignedBuffer` with the [`from_raw_parts`] function.
121	///
122	/// Note that it is valid to shrink the length of the buffer (even set it
123	/// to zero) and call `from_raw_parts` with the reduced length. This is
124	/// effectively the same as calling [`truncate`] or [`set_len`].
125	///
126	/// [`from_raw_parts`]: UniqueAlignedBuffer::from_raw_parts
127	/// [`truncate`]: UniqueAlignedBuffer::truncate
128	/// [`set_len`]: UniqueAlignedBuffer::set_len
129	///
130	/// # Examples
131	///
132	/// ```
133	/// # use aligned_buffer::UniqueAlignedBuffer;
134	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
135	/// buf.extend([1, 2, 3]);
136	///
137	/// assert_eq!(&*buf, &[1, 2, 3]);
138	/// let (ptr, len, cap) = buf.into_raw_parts();
139	///
140	/// let rebuilt = unsafe {
141	///     UniqueAlignedBuffer::<32>::from_raw_parts(ptr, 2, cap)
142	/// };
143	/// assert_eq!(&*rebuilt, &[1, 2]);
144	/// ```
145	pub fn into_raw_parts(self) -> (NonNull<u8>, usize, Cap) {
146		let (ptr, cap) = self.buf.into_raw_parts();
147		(ptr, self.len, cap)
148	}
149
150	/// Creates a `UniqueAlignedBuffer` directly from a pointer, a capacity, and a length.
151	///
152	/// # Safety
153	///
154	/// This is highly unsafe, due to the number of invariants that aren't
155	/// checked:
156	///
157	/// * `ptr` must have been allocated using the global allocator, such as via
158	///   the [`alloc::alloc`] function.
159	/// * `ptr` needs to be correctly offset into the allocation based on `ALIGNMENT`.
160	/// * `ptr` needs to point to an allocation with the correct size.
161	/// * In front of `ptr` there is a valid `RawAlignedBuffer` header.
162	/// * `length` needs to be less than or equal to `capacity`.
163	/// * The first `length` bytes must be properly initialized.
164	/// * `capacity` needs to be the capacity that the pointer was allocated with.
165	/// * The allocated size in bytes must be no larger than `isize::MAX`.
166	///   See the safety documentation of `pointer::offset`.
167	///
168	/// These requirements are always upheld by any `ptr` that has been allocated
169	/// via `UniqueAlignedBuffer`. Other allocation sources are allowed if the invariants are
170	/// upheld.
171	///
172	/// Violating these may cause problems like corrupting the allocator's
173	/// internal data structures. For example it is normally **not** safe
174	/// to build a `UniqueAlignedBuffer` from a pointer to a C `char` array with length
175	/// `size_t`, doing so is only safe if the array was initially allocated by
176	/// a `UniqueAlignedBuffer`.
177	///
178	/// The ownership of `ptr` is effectively transferred to the
179	/// `UniqueAlignedBuffer` which may then deallocate, reallocate or change the
180	/// contents of memory pointed to by the pointer at will. Ensure
181	/// that nothing else uses the pointer after calling this
182	/// function.
183	///
184	/// [`alloc::alloc`]: std::alloc::alloc
185
186	///
187	/// # Examples
188	///
189	/// ```
190	/// # use aligned_buffer::UniqueAlignedBuffer;
191	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
192	/// buf.extend([1, 2, 3]);
193	///
194	/// assert_eq!(&*buf, &[1, 2, 3]);
195	/// let (ptr, len, cap) = buf.into_raw_parts();
196	///
197	/// let rebuilt = unsafe {
198	///     UniqueAlignedBuffer::<32>::from_raw_parts(ptr, 2, cap)
199	/// };
200	/// assert_eq!(&*rebuilt, &[1, 2]);
201	/// ```
202	#[inline]
203	pub unsafe fn from_raw_parts(ptr: NonNull<u8>, len: usize, capacity: Cap) -> Self {
204		let buf = RawAlignedBuffer::from_raw_parts(ptr, capacity);
205		Self { buf, len }
206	}
207}
208
209impl<const ALIGNMENT: usize, A> UniqueAlignedBuffer<ALIGNMENT, A>
210where
211	A: BufferAllocator<ALIGNMENT>,
212{
213	/// Constructs a new, empty `UniqueAlignedBuffer`.
214	///
215	/// The buffer will not allocate until elements are pushed onto it.
216	///
217	/// # Examples
218	///
219	/// ```
220	/// # #![allow(unused_mut)]
221	/// # use aligned_buffer::{UniqueAlignedBuffer, alloc::Global};
222	/// let mut buf = UniqueAlignedBuffer::<32>::new_in(Global);
223	/// ```
224	#[inline]
225	#[must_use]
226	pub const fn new_in(alloc: A) -> Self {
227		let buf = RawAlignedBuffer::new_in(alloc);
228		Self { buf, len: 0 }
229	}
230
231	/// Constructs a new, empty `UniqueAlignedBuffer` with at least the specified capacity.
232	///
233	/// The buffer will be able to hold at least `capacity` elements without
234	/// reallocating. This method is allowed to allocate for more elements than
235	/// `capacity`. If `capacity` is 0, the vector will not allocate.
236	///
237	/// It is important to note that although the returned vector has the
238	/// minimum *capacity* specified, the vector will have a zero *length*. For
239	/// an explanation of the difference between length and capacity, see
240	/// *[Capacity and reallocation]*.
241	///
242	/// If it is important to know the exact allocated capacity of a `UniqueAlignedBuffer`,
243	/// always use the [`capacity`] method after construction.
244	///
245	/// [Capacity and reallocation]: #capacity-and-reallocation
246	/// [`capacity`]: UniqueAlignedBuffer::capacity
247	///
248	/// # Panics
249	///
250	/// Panics if the new capacity is too large.
251	///
252	/// # Examples
253	///
254	/// ```
255	/// # use aligned_buffer::{UniqueAlignedBuffer, alloc::Global};
256	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity_in(10, Global);
257	///
258	/// // The vector contains no items, even though it has capacity for more
259	/// assert_eq!(buf.len(), 0);
260	/// assert!(buf.capacity() >= 10);
261	///
262	/// // These are all done without reallocating...
263	/// for i in 0..10 {
264	///     buf.push(i);
265	/// }
266	/// assert_eq!(buf.len(), 10);
267	/// assert!(buf.capacity() >= 10);
268	///
269	/// // ...but this may make the vector reallocate
270	/// buf.push(11);
271	/// assert_eq!(buf.len(), 11);
272	/// assert!(buf.capacity() >= 11);
273	/// ```
274	#[inline]
275	#[must_use]
276	pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
277		let buf = RawAlignedBuffer::with_capacity_in(capacity, alloc);
278		Self { buf, len: 0 }
279	}
280
281	/// Decomposes a `UniqueAlignedBuffer` into its raw components.
282	///
283	/// Returns the raw pointer to the underlying data, the length of
284	/// the buffer, and the allocated capacity of the buffer.
285	/// These are the same arguments in the same
286	/// order as the arguments to [`from_raw_parts_in`].
287	///
288	/// After calling this function, the caller is responsible for the
289	/// memory previously managed by the `UniqueAlignedBuffer`. The only
290	/// ways to do this are to convert the raw pointer, length, and capacity
291	/// back into a `UniqueAlignedBuffer` with the [`from_raw_parts`] function.
292	///
293	/// Note that it is valid to shrink the length of the buffer (even set it
294	/// to zero) and call `from_raw_parts` with the reduced length. This is
295	/// effectively the same as calling [`truncate`] or [`set_len`].
296	///
297	/// [`from_raw_parts_in`]: UniqueAlignedBuffer::from_raw_parts_in
298	/// [`from_raw_parts`]: UniqueAlignedBuffer::from_raw_parts
299	/// [`truncate`]: UniqueAlignedBuffer::truncate
300	/// [`set_len`]: UniqueAlignedBuffer::set_len
301	///
302	/// # Examples
303	///
304	/// ```
305	/// # use aligned_buffer::UniqueAlignedBuffer;
306	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
307	/// buf.extend([1, 2, 3]);
308	///
309	/// assert_eq!(&*buf, &[1, 2, 3]);
310	/// let (ptr, len, cap, alloc) = buf.into_raw_parts_with_alloc();
311	///
312	/// let rebuilt = unsafe {
313	///     UniqueAlignedBuffer::<32>::from_raw_parts_in(ptr, 2, cap, alloc)
314	/// };
315	/// assert_eq!(&*rebuilt, &[1, 2]);
316	/// ```
317	pub fn into_raw_parts_with_alloc(self) -> (NonNull<u8>, usize, Cap, A) {
318		let (ptr, cap, alloc) = self.buf.into_raw_parts_with_alloc();
319		(ptr, self.len, cap, alloc)
320	}
321
322	/// Creates a `UniqueAlignedBuffer` directly from a pointer, a capacity, and a length.
323	///
324	/// # Safety
325	///
326	/// This is highly unsafe, due to the number of invariants that aren't
327	/// checked:
328	///
329	/// * `ptr` must have been allocated using the global allocator, such as via
330	///   the [`alloc::alloc`] function.
331	/// * `ptr` needs to be correctly offset into the allocation based on `ALIGNMENT`.
332	/// * `ptr` needs to point to an allocation with the correct size.
333	/// * In front of `ptr` there is a valid `RawAlignedBuffer` header.
334	/// * `length` needs to be less than or equal to `capacity`.
335	/// * The first `length` bytes must be properly initialized.
336	/// * `capacity` needs to be the capacity that the pointer was allocated with.
337	/// * The allocated size in bytes must be no larger than `isize::MAX`.
338	///   See the safety documentation of `pointer::offset`.
339	///
340	/// These requirements are always upheld by any `ptr` that has been allocated
341	/// via `UniqueAlignedBuffer`. Other allocation sources are allowed if the invariants are
342	/// upheld.
343	///
344	/// Violating these may cause problems like corrupting the allocator's
345	/// internal data structures. For example it is normally **not** safe
346	/// to build a `UniqueAlignedBuffer` from a pointer to a C `char` array with length
347	/// `size_t`, doing so is only safe if the array was initially allocated by
348	/// a `UniqueAlignedBuffer`.
349	///
350	/// The ownership of `ptr` is effectively transferred to the
351	/// `UniqueAlignedBuffer` which may then deallocate, reallocate or change the
352	/// contents of memory pointed to by the pointer at will. Ensure
353	/// that nothing else uses the pointer after calling this
354	/// function.
355	///
356	/// [`alloc::alloc`]: std::alloc::alloc
357	///
358	/// # Examples
359	///
360	/// ```
361	/// # use aligned_buffer::UniqueAlignedBuffer;
362	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
363	/// buf.extend([1, 2, 3]);
364	///
365	/// assert_eq!(&*buf, &[1, 2, 3]);
366	/// let (ptr, len, cap, alloc) = buf.into_raw_parts_with_alloc();
367	///
368	/// let rebuilt = unsafe {
369	///     UniqueAlignedBuffer::<32>::from_raw_parts_in(ptr, 2, cap, alloc)
370	/// };
371	/// assert_eq!(&*rebuilt, &[1, 2]);
372	/// ```
373	#[inline]
374	pub unsafe fn from_raw_parts_in(ptr: NonNull<u8>, len: usize, capacity: Cap, alloc: A) -> Self {
375		let buf = RawAlignedBuffer::from_raw_parts_in(ptr, capacity, alloc);
376		Self { buf, len }
377	}
378
379	/// Returns the total number of elements the buffer can hold without
380	/// reallocating.
381	///
382	/// # Examples
383	///
384	/// ```
385	/// # use aligned_buffer::UniqueAlignedBuffer;
386	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
387	/// buf.push(42);
388	/// assert!(buf.capacity() >= 10);
389	/// ```
390	#[inline]
391	pub fn capacity(&self) -> usize {
392		// when the buffer is owned (unique), cap_or_len is the capacity
393		self.buf.cap_or_len()
394	}
395
396	/// Reserves capacity for at least `additional` more elements to be inserted
397	/// in the given `UniqueAlignedBuffer`. The collection may reserve more space to
398	/// speculatively avoid frequent reallocations. After calling `reserve`,
399	/// capacity will be greater than or equal to `self.len() + additional`.
400	/// Does nothing if capacity is already sufficient.
401	///
402	/// # Panics
403	///
404	/// Panics if the new capacity is too large.
405	///
406	/// # Examples
407	///
408	/// ```
409	/// # use aligned_buffer::UniqueAlignedBuffer;
410	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
411	/// buf.reserve(20);
412	/// assert!(buf.capacity() >= 20);
413	/// ```
414	pub fn reserve(&mut self, additional: usize) {
415		// SAFETY: We're the unieue owner of the buffer.
416		unsafe {
417			self.buf.reserve(self.len, additional);
418		}
419	}
420
421	/// Reserves the minimum capacity for at least `additional` more elements to
422	/// be inserted in the given `UniqueAlignedBuffer`. Unlike [`reserve`], this will not
423	/// deliberately over-allocate to speculatively avoid frequent allocations.
424	/// After calling `reserve_exact`, capacity will be greater than or equal to
425	/// `self.len() + additional`. Does nothing if the capacity is already
426	/// sufficient.
427	///
428	/// Note that the allocator may give the collection more space than it
429	/// requests. Therefore, capacity can not be relied upon to be precisely
430	/// minimal. Prefer [`reserve`] if future insertions are expected.
431	///
432	/// [`reserve`]: UniqueAlignedBuffer::reserve
433	///
434	/// # Panics
435	///
436	/// Panics if the new capacity is too large.
437	///
438	/// # Examples
439	///
440	/// ```
441	/// # use aligned_buffer::UniqueAlignedBuffer;
442	/// let mut buf = UniqueAlignedBuffer::<32>::with_capacity(10);
443	/// buf.reserve_exact(20);
444	/// assert!(buf.capacity() >= 20);
445	/// ```
446	pub fn reserve_exact(&mut self, additional: usize) {
447		// SAFETY: We're the unieue owner of the buffer.
448		unsafe {
449			self.buf.reserve_exact(self.len, additional);
450		}
451	}
452
453	/// Tries to reserve capacity for at least `additional` more elements to be inserted
454	/// in the given `UniqueAlignedBuffer`. The collection may reserve more space to speculatively avoid
455	/// frequent reallocations. After calling `try_reserve`, capacity will be
456	/// greater than or equal to `self.len() + additional` if it returns
457	/// `Ok(())`. Does nothing if capacity is already sufficient. This method
458	/// preserves the contents even if an error occurs.
459	///
460	/// # Errors
461	///
462	/// If the capacity overflows, or the allocator reports a failure, then an error
463	/// is returned.
464	///
465	/// # Examples
466	///
467	/// ```
468	/// # use aligned_buffer::UniqueAlignedBuffer;
469	/// # use aligned_buffer::TryReserveError;
470	/// fn process_data(data: &[u32]) -> Result<UniqueAlignedBuffer<64>, TryReserveError> {
471	///     let mut output = UniqueAlignedBuffer::<64>::new();
472	///
473	///     // Pre-reserve the memory, exiting if we can't
474	///     output.try_reserve(data.len() * std::mem::size_of::<u32>())?;
475	///
476	///     // Now we know this can't OOM in the middle of our complex work
477	///     output.extend(data.iter().flat_map(|&val| {
478	///         u32::to_le_bytes(val * 2 + 5) // very complicated
479	///     }));
480	///
481	///     Ok(output)
482	/// }
483	/// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
484	/// ```
485	pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
486		// SAFETY: We're the unieue owner of the buffer.
487		unsafe {
488			self
489				.buf
490				.try_reserve(self.len, additional)
491				.map_err(TryReserveError::from)
492		}
493	}
494
495	/// Tries to reserve the minimum capacity for at least `additional`
496	/// elements to be inserted in the given `UniqueAlignedBuffer`. Unlike [`try_reserve`],
497	/// this will not deliberately over-allocate to speculatively avoid frequent
498	/// allocations. After calling `try_reserve_exact`, capacity will be greater
499	/// than or equal to `self.len() + additional` if it returns `Ok(())`.
500	/// Does nothing if the capacity is already sufficient.
501	///
502	/// Note that the allocator may give the collection more space than it
503	/// requests. Therefore, capacity can not be relied upon to be precisely
504	/// minimal. Prefer [`try_reserve`] if future insertions are expected.
505	///
506	/// [`try_reserve`]: Vec::try_reserve
507	///
508	/// # Errors
509	///
510	/// If the capacity overflows, or the allocator reports a failure, then an error
511	/// is returned.
512	///
513	/// # Examples
514	///
515	/// ```
516	/// # use aligned_buffer::UniqueAlignedBuffer;
517	/// # use aligned_buffer::TryReserveError;
518	///
519	/// fn process_data(data: &[u32]) -> Result<UniqueAlignedBuffer<64>, TryReserveError> {
520	///     let mut output = UniqueAlignedBuffer::<64>::new();
521	///
522	///     // Pre-reserve the memory, exiting if we can't
523	///     output.try_reserve_exact(data.len() * std::mem::size_of::<u32>())?;
524	///
525	///     // Now we know this can't OOM in the middle of our complex work
526	///     output.extend(data.iter().flat_map(|&val| {
527	///         u32::to_le_bytes(val * 2 + 5) // very complicated
528	///     }));
529	///
530	///     Ok(output)
531	/// }
532	/// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
533	/// ```
534	pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
535		// SAFETY: We're the unieue owner of the buffer.
536		unsafe {
537			self
538				.buf
539				.try_reserve_exact(self.len, additional)
540				.map_err(TryReserveError::from)
541		}
542	}
543
544	/// Shrinks the capacity of the buffer as much as possible.
545	///
546	/// It will drop down as close as possible to the length but the allocator
547	/// may still inform the buffer that there is space for a few more elements.
548	///
549	/// # Examples
550	///
551	/// ```
552	/// # use aligned_buffer::UniqueAlignedBuffer;
553	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
554	/// buf.extend([1, 2, 3]);
555	/// assert!(buf.capacity() >= 10);
556	/// buf.shrink_to_fit();
557	/// assert!(buf.capacity() >= 3);
558	/// ```
559	pub fn shrink_to_fit(&mut self) {
560		// The capacity is never less than the length, and there's nothing to do when
561		// they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit`
562		// by only calling it with a greater capacity.
563		if self.capacity() > self.len {
564			// SAFETY: We're the unieue owner of the buffer.
565			unsafe {
566				self.buf.shrink_to_fit(self.len);
567			}
568		}
569	}
570
571	/// Shrinks the capacity of the buffer with a lower bound.
572	///
573	/// The capacity will remain at least as large as both the length
574	/// and the supplied value.
575	///
576	/// If the current capacity is less than the lower limit, this is a no-op.
577	///
578	/// # Examples
579	///
580	/// ```
581	/// # use aligned_buffer::UniqueAlignedBuffer;
582	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
583	/// buf.extend([1, 2, 3]);
584	/// assert!(buf.capacity() >= 10);
585	/// buf.shrink_to(4);
586	/// assert!(buf.capacity() >= 4);
587	/// buf.shrink_to(0);
588	/// assert!(buf.capacity() >= 3);
589	/// ```
590	pub fn shrink_to(&mut self, min_capacity: usize) {
591		if self.capacity() > min_capacity {
592			// SAFETY: We're the unieue owner of the buffer.
593			unsafe {
594				self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
595			}
596		}
597	}
598
599	/// Shortens the buffer, keeping the first `len` elements and dropping
600	/// the rest.
601	///
602	/// If `len` is greater or equal to the vector's current length, this has
603	/// no effect.
604	///
605	/// Note that this method has no effect on the allocated capacity
606	/// of the buffer.
607	///
608	/// # Examples
609	///
610	/// Truncating a five element buffer to two elements:
611	///
612	/// ```
613	/// # use aligned_buffer::UniqueAlignedBuffer;
614	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
615	/// buf.extend([1, 2, 3, 4, 5]);
616	/// buf.truncate(2);
617	/// assert_eq!(&*buf, &[1, 2]);
618	/// ```
619	///
620	/// No truncation occurs when `len` is greater than the vector's current
621	/// length:
622	///
623	/// ```
624	/// # use aligned_buffer::UniqueAlignedBuffer;
625	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
626	/// buf.extend([1, 2, 3, 4, 5]);
627	/// buf.truncate(8);
628	/// assert_eq!(&*buf, &[1, 2, 3, 4, 5]);
629	/// ```
630	///
631	/// Truncating when `len == 0` is equivalent to calling the [`clear`]
632	/// method.
633	///
634	/// ```
635	/// # use aligned_buffer::UniqueAlignedBuffer;
636	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
637	/// buf.extend([1, 2, 3, 4, 5]);
638	/// buf.truncate(0);
639	/// assert_eq!(&*buf, &[]);
640	/// assert!(buf.is_empty());
641	/// ```
642	///
643	/// [`clear`]: UniqueAlignedBuffer::clear
644	pub fn truncate(&mut self, len: usize) {
645		// Since we're dealing with plain old data, we can just change the len
646		// without having to drop anything.
647		self.len = cmp::min(len, self.len);
648	}
649
650	/// Extracts a slice containing the entire buffer.
651	///
652	/// Equivalent to `&s[..]`.
653	///
654	/// # Examples
655	///
656	/// ```
657	/// # use aligned_buffer::UniqueAlignedBuffer;
658	/// use std::io::{self, Write};
659	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
660	/// buf.extend([1, 2, 3, 5, 8]);
661	/// io::sink().write(buf.as_slice()).unwrap();
662	/// ```
663	#[inline]
664	pub fn as_slice(&self) -> &[u8] {
665		self
666	}
667
668	/// Extracts a mutable slice of the entire buffer.
669	///
670	/// Equivalent to `&mut s[..]`.
671	///
672	/// # Examples
673	///
674	/// ```
675	/// # use aligned_buffer::UniqueAlignedBuffer;
676	/// use std::io::{self, Read};
677	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
678	/// buf.extend([0; 3]);
679	/// io::repeat(0b101).read_exact(buf.as_mut_slice()).unwrap();
680	/// ```
681	#[inline]
682	pub fn as_mut_slice(&mut self) -> &mut [u8] {
683		self
684	}
685
686	/// Returns a raw pointer to the buffer's data, or a dangling raw pointer
687	/// valid for zero sized reads if the vector didn't allocate.
688	///
689	/// The caller must ensure that the buffer outlives the pointer this
690	/// function returns, or else it will end up pointing to garbage.
691	/// Modifying the buffer may cause its buffer to be reallocated,
692	/// which would also make any pointers to it invalid.
693	///
694	/// The caller must also ensure that the memory the pointer (non-transitively) points to
695	/// is never written to using this pointer or any pointer derived from it. If you need to
696	/// mutate the contents of the slice, use [`as_mut_ptr`].
697	///
698	/// This method guarantees that for the purpose of the aliasing model, this method
699	/// does not materialize a reference to the underlying slice, and thus the returned pointer
700	/// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
701	/// Note that calling other methods that materialize mutable references to the slice,
702	/// or mutable references to specific elements you are planning on accessing through this pointer,
703	/// as well as writing to those elements, may still invalidate this pointer.
704	/// See the second example below for how this guarantee can be used.
705	///
706	///
707	/// # Examples
708	///
709	/// ```
710	/// # use aligned_buffer::UniqueAlignedBuffer;
711	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
712	/// buf.extend([1, 2, 4]);
713	/// let buf_ptr = buf.as_ptr();
714	///
715	/// unsafe {
716	///     for i in 0..buf.len() {
717	///         assert_eq!(*buf_ptr.add(i), 1 << i);
718	///     }
719	/// }
720	/// ```
721	///
722	/// Due to the aliasing guarantee, the following code is legal:
723	///
724	/// ```rust
725	/// # use aligned_buffer::UniqueAlignedBuffer;
726	/// unsafe {
727	///     let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
728	///     buf.extend([1, 2, 4]);
729	///     let ptr1 = buf.as_ptr();
730	///     let _ = ptr1.read();
731	///     let ptr2 = buf.as_mut_ptr().offset(2);
732	///     ptr2.write(2);
733	///     // Notably, the write to `ptr2` did *not* invalidate `ptr1`
734	///     // because it mutated a different element:
735	///     let _ = ptr1.read();
736	/// }
737	/// ```
738	///
739	/// [`as_mut_ptr`]: UniqueAlignedBuffer::as_mut_ptr
740	/// [`as_ptr`]: UniqueAlignedBuffer::as_ptr
741	#[inline]
742	pub fn as_ptr(&self) -> *const u8 {
743		// We shadow the slice method of the same name to avoid going through
744		// `deref`, which creates an intermediate reference.
745		self.buf.ptr()
746	}
747
748	/// Returns an unsafe mutable pointer to the buffer's data, or a dangling
749	/// raw pointer valid for zero sized reads if the buffer didn't allocate.
750	///
751	/// The caller must ensure that the buffer outlives the pointer this
752	/// function returns, or else it will end up pointing to garbage.
753	/// Modifying the vector may cause its buffer to be reallocated,
754	/// which would also make any pointers to it invalid.
755	///
756	/// This method guarantees that for the purpose of the aliasing model, this method
757	/// does not materialize a reference to the underlying slice, and thus the returned pointer
758	/// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
759	/// Note that calling other methods that materialize references to the slice,
760	/// or references to specific elements you are planning on accessing through this pointer,
761	/// may still invalidate this pointer.
762	/// See the second example below for how this guarantee can be used.
763	///
764	///
765	/// # Examples
766	///
767	/// ```
768	/// # use aligned_buffer::UniqueAlignedBuffer;
769	/// // Allocate buffer big enough for 4 elements.
770	/// let size = 4;
771	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(size);
772	/// let buf_ptr = buf.as_mut_ptr();
773	///
774	/// // Initialize elements via raw pointer writes, then set length.
775	/// unsafe {
776	///     for i in 0..size {
777	///         *buf_ptr.add(i) = i as u8;
778	///     }
779	///     buf.set_len(size);
780	/// }
781	/// assert_eq!(&*buf, &[0, 1, 2, 3]);
782	/// ```
783	///
784	/// Due to the aliasing guarantee, the following code is legal:
785	///
786	/// ```rust
787	/// # use aligned_buffer::UniqueAlignedBuffer;
788	/// unsafe {
789	///     let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
790	///     buf.extend([0]);
791	///     let ptr1 = buf.as_mut_ptr();
792	///     ptr1.write(1);
793	///     let ptr2 = buf.as_mut_ptr();
794	///     ptr2.write(2);
795	///     // Notably, the write to `ptr2` did *not* invalidate `ptr1`:
796	///     ptr1.write(3);
797	/// }
798	/// ```
799	///
800	/// [`as_mut_ptr`]: UniqueAlignedBuffer::as_mut_ptr
801	/// [`as_ptr`]: UniqueAlignedBuffer::as_ptr
802	#[inline]
803	pub fn as_mut_ptr(&mut self) -> *mut u8 {
804		// We shadow the slice method of the same name to avoid going through
805		// `deref_mut`, which creates an intermediate reference.
806		self.buf.ptr()
807	}
808
809	/// Forces the length of the buffer to `new_len`.
810	///
811	/// This is a low-level operation that maintains none of the normal
812	/// invariants of the type. Normally changing the length of a buffer
813	/// is done using one of the safe operations instead, such as
814	/// [`truncate`], [`resize`], [`extend`], or [`clear`].
815	///
816	/// [`truncate`]: UniqueAlignedBuffer::truncate
817	/// [`resize`]: UniqueAlignedBuffer::resize
818	/// [`extend`]: Extend::extend
819	/// [`clear`]: UniqueAlignedBuffer::clear
820	///
821	/// # Safety
822	///
823	/// - `new_len` must be less than or equal to [`capacity()`].
824	/// - The elements at `old_len..new_len` must be initialized.
825	///
826	/// [`capacity()`]: UniqueAlignedBuffer::capacity
827	///
828	/// # Examples
829	///
830	/// This method can be useful for situations in which the buffer
831	/// is serving as a buffer for other code, particularly over FFI:
832	///
833	/// ```no_run
834	/// # #![allow(dead_code)]
835	/// # // This is just a minimal skeleton for the doc example;
836	/// # // don't use this as a starting point for a real library.
837	/// # use aligned_buffer::UniqueAlignedBuffer;
838	/// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
839	/// # const Z_OK: i32 = 0;
840	/// # extern "C" {
841	/// #     fn deflateGetDictionary(
842	/// #         strm: *mut std::ffi::c_void,
843	/// #         dictionary: *mut u8,
844	/// #         dictLength: *mut usize,
845	/// #     ) -> i32;
846	/// # }
847	/// # impl StreamWrapper {
848	/// pub fn get_dictionary(&self) -> Option<UniqueAlignedBuffer<16>> {
849	///     // Per the FFI method's docs, "32768 bytes is always enough".
850	///     let mut dict = UniqueAlignedBuffer::<16>::with_capacity(32_768);
851	///     let mut dict_length = 0;
852	///     // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
853	///     // 1. `dict_length` elements were initialized.
854	///     // 2. `dict_length` <= the capacity (32_768)
855	///     // which makes `set_len` safe to call.
856	///     unsafe {
857	///         // Make the FFI call...
858	///         let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
859	///         if r == Z_OK {
860	///             // ...and update the length to what was initialized.
861	///             dict.set_len(dict_length);
862	///             Some(dict)
863	///         } else {
864	///             None
865	///         }
866	///     }
867	/// }
868	/// # }
869	/// ```
870	///
871	/// Normally, here, one would use [`clear`] instead to correctly drop
872	/// the contents and thus not leak memory.
873	#[inline]
874	pub unsafe fn set_len(&mut self, new_len: usize) {
875		debug_assert!(new_len <= self.capacity());
876
877		self.len = new_len;
878	}
879
880	/// Appends an element to the back of a buffer.
881	///
882	/// # Panics
883	///
884	/// Panics if the new capacity is too large.
885	///
886	/// # Examples
887	///
888	/// ```
889	/// # use aligned_buffer::UniqueAlignedBuffer;
890	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
891	/// buf.extend([1, 2]);
892	/// buf.push(3);
893	/// assert_eq!(&*buf, &[1, 2, 3]);
894	/// ```
895	#[inline]
896	pub fn push(&mut self, value: u8) {
897		// This will panic or abort if we would allocate too much.
898		if self.len == self.capacity() {
899			// SAFETY: We're the unieue owner of the buffer.
900			unsafe {
901				self.buf.reserve(self.len, 1);
902			}
903		}
904
905		unsafe {
906			let end = self.as_mut_ptr().add(self.len);
907			ptr::write(end, value);
908			self.len += 1;
909		}
910	}
911
912	/// Moves all the elements of `other` into `self`, leaving `other` empty.
913	///
914	/// # Panics
915	///
916	/// Panics if the new capacity exceeds `isize::MAX` bytes.
917	///
918	/// # Examples
919	///
920	/// ```
921	/// # use aligned_buffer::UniqueAlignedBuffer;
922	/// let mut buf = UniqueAlignedBuffer::<128>::with_capacity(10);
923	/// buf.extend([1, 2, 3]);
924	/// let mut vec = vec![4u8, 5, 6];
925	/// buf.append(&vec);
926	/// assert_eq!(&*buf, &[1, 2, 3, 4, 5, 6]);
927	/// assert_eq!(vec, [4, 5, 6]);
928	/// ```
929	#[inline]
930	pub fn append(&mut self, other: &(impl AsRef<[u8]> + ?Sized)) {
931		// Safety: `other` cannot overlap with `self.as_mut_slice()`,
932		// because `self` is a unique reference.
933		unsafe {
934			self.append_elements(other.as_ref() as *const [u8]);
935		}
936	}
937
938	/// Appends elements to `self` from other buffer.
939	///
940	/// # Safety
941	/// This function requires that `other` does not overlap with `self.as_mut_slice()`.
942	#[inline]
943	unsafe fn append_elements(&mut self, other: *const [u8]) {
944		let count = unsafe { (*other).len() };
945		self.reserve(count);
946		let len = self.len();
947		unsafe { ptr::copy_nonoverlapping(other as *const u8, self.as_mut_ptr().add(len), count) };
948		self.len += count;
949	}
950
951	/// Clears the buffer, removing all values.
952	///
953	/// Note that this method has no effect on the allocated capacity
954	/// of the buffer.
955	///
956	/// # Examples
957	///
958	/// ```
959	/// # use aligned_buffer::UniqueAlignedBuffer;
960	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
961	/// buf.extend([1, 2, 3]);
962	///
963	/// buf.clear();
964	///
965	/// assert!(buf.is_empty());
966	/// ```
967	#[inline]
968	pub fn clear(&mut self) {
969		self.len = 0;
970	}
971
972	/// Returns the number of elements in the buffer, also referred to
973	/// as its 'length'.
974	///
975	/// # Examples
976	///
977	/// ```
978	/// # use aligned_buffer::UniqueAlignedBuffer;
979	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
980	/// buf.extend([1, 2, 3]);
981	/// assert_eq!(buf.len(), 3);
982	/// ```
983	#[inline]
984	pub fn len(&self) -> usize {
985		self.len
986	}
987
988	/// Returns `true` if the buffer contains no data.
989	///
990	/// # Examples
991	///
992	/// ```
993	/// # use aligned_buffer::UniqueAlignedBuffer;
994	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
995	/// assert!(buf.is_empty());
996	///
997	/// buf.push(1);
998	/// assert!(!buf.is_empty());
999	/// ```
1000	pub fn is_empty(&self) -> bool {
1001		self.len() == 0
1002	}
1003
1004	/// Resizes the `UniqueAlignedBuffer` in-place so that `len` is equal to `new_len`.
1005	///
1006	/// If `new_len` is greater than `len`, the `UniqueAlignedBuffer` is extended by the
1007	/// difference, with each additional slot filled with `value`.
1008	/// If `new_len` is less than `len`, the `UniqueAlignedBuffer` is simply truncated.
1009	///
1010	/// If you only need to resize to a smaller size, use [`UniqueAlignedBuffer::truncate`].
1011	///
1012	/// # Examples
1013	///
1014	/// ```
1015	/// # use aligned_buffer::UniqueAlignedBuffer;
1016	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
1017	/// buf.resize(3, 42);
1018	/// assert_eq!(&*buf, &[42, 42, 42]);
1019	///
1020	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
1021	/// buf.extend([1, 2, 3, 4]);
1022	/// buf.resize(2, 0);
1023	/// assert_eq!(&*buf, &[1, 2]);
1024	/// ```
1025	pub fn resize(&mut self, new_len: usize, value: u8) {
1026		let len = self.len();
1027
1028		if new_len > len {
1029			self.extend_with(new_len - len, value)
1030		} else {
1031			self.truncate(new_len);
1032		}
1033	}
1034
1035	/// Copies and appends all elements in a slice to the `UniqueAlignedBuffer`.
1036	///
1037	/// Iterates over the slice `other`, copies each element, and then appends
1038	/// it to this `UniqueAlignedBuffer`. The `other` slice is traversed in-order.
1039	///
1040	/// Note that this function is same as [`extend`] except that it is
1041	/// specialized to work with slices instead. If and when Rust gets
1042	/// specialization this function will likely be deprecated (but still
1043	/// available).
1044	///
1045	/// # Examples
1046	///
1047	/// ```
1048	/// # use aligned_buffer::UniqueAlignedBuffer;
1049	/// let mut buf = UniqueAlignedBuffer::<16>::with_capacity(10);
1050	/// buf.extend([1]);
1051	/// buf.extend_from_slice(&[2, 3, 4]);
1052	/// assert_eq!(&*buf, &[1, 2, 3, 4]);
1053	/// ```
1054	///
1055	/// [`extend`]: UniqueAlignedBuffer::extend
1056	pub fn extend_from_slice(&mut self, other: &[u8]) {
1057		self.append(other);
1058	}
1059
1060	/// Extend the vector by `n` clones of value.
1061	fn extend_with(&mut self, n: usize, value: u8) {
1062		self.reserve(n);
1063
1064		unsafe {
1065			let mut ptr = self.as_mut_ptr().add(self.len());
1066			// Use SetLenOnDrop to work around bug where compiler
1067			// might not realize the store through `ptr` through self.set_len()
1068			// don't alias.
1069			let mut local_len = SetLenOnDrop::new(&mut self.len);
1070
1071			// Write all elements
1072			for _ in 0..n {
1073				ptr::write(ptr, value);
1074				ptr = ptr.add(1);
1075			}
1076
1077			local_len.increment_len(n);
1078			// len set by scope guard
1079		}
1080	}
1081
1082	/// Converts a `UniqueAlignedBuffer` into a `SharedAlignedBuffer`
1083	/// that can be safely cloned and shared between threads.
1084	pub fn into_shared(mut self) -> SharedAlignedBuffer<ALIGNMENT, A> {
1085		self.buf.reset_len(self.len());
1086		debug_assert_eq!(self.buf.cap_or_len(), self.len());
1087		SharedAlignedBuffer { buf: self.buf }
1088	}
1089}
1090
1091impl<const ALIGNMENT: usize, A> ops::Deref for UniqueAlignedBuffer<ALIGNMENT, A>
1092where
1093	A: BufferAllocator<ALIGNMENT>,
1094{
1095	type Target = [u8];
1096
1097	#[inline]
1098	fn deref(&self) -> &Self::Target {
1099		unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len()) }
1100	}
1101}
1102
1103impl<const ALIGNMENT: usize, A> ops::DerefMut for UniqueAlignedBuffer<ALIGNMENT, A>
1104where
1105	A: BufferAllocator<ALIGNMENT>,
1106{
1107	#[inline]
1108	fn deref_mut(&mut self) -> &mut [u8] {
1109		unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
1110	}
1111}
1112
1113impl<I: SliceIndex<[u8]>, const ALIGNMENT: usize, A> ops::Index<I>
1114	for UniqueAlignedBuffer<ALIGNMENT, A>
1115where
1116	A: BufferAllocator<ALIGNMENT>,
1117{
1118	type Output = I::Output;
1119
1120	#[inline]
1121	fn index(&self, index: I) -> &Self::Output {
1122		ops::Index::index(&**self, index)
1123	}
1124}
1125
1126impl<I: SliceIndex<[u8]>, const ALIGNMENT: usize, A> ops::IndexMut<I>
1127	for UniqueAlignedBuffer<ALIGNMENT, A>
1128where
1129	A: BufferAllocator<ALIGNMENT>,
1130{
1131	#[inline]
1132	fn index_mut(&mut self, index: I) -> &mut Self::Output {
1133		ops::IndexMut::index_mut(&mut **self, index)
1134	}
1135}
1136
1137impl<const ALIGNMENT: usize> FromIterator<u8> for UniqueAlignedBuffer<ALIGNMENT, Global> {
1138	#[inline]
1139	fn from_iter<T: IntoIterator<Item = u8>>(iter: T) -> Self {
1140		let iter = iter.into_iter();
1141		let (lower, _) = iter.size_hint();
1142		let mut buf = Self::with_capacity(lower);
1143		buf.extend(iter);
1144		buf
1145	}
1146}
1147
1148impl<const ALIGNMENT: usize, A> Extend<u8> for UniqueAlignedBuffer<ALIGNMENT, A>
1149where
1150	A: BufferAllocator<ALIGNMENT>,
1151{
1152	#[inline]
1153	fn extend<T: IntoIterator<Item = u8>>(&mut self, iter: T) {
1154		let mut iter = iter.into_iter();
1155		let (lower, _) = iter.size_hint();
1156		self.reserve(lower);
1157		let free = self.capacity() - self.len();
1158
1159		unsafe {
1160			let mut ptr = self.as_mut_ptr().add(self.len());
1161			// Use SetLenOnDrop to work around bug where compiler
1162			// might not realize the store through `ptr` through self.set_len()
1163			// don't alias.
1164			let mut local_len = SetLenOnDrop::new(&mut self.len);
1165
1166			// Write elements until we run out of space or the iterator ends
1167			// (whichever comes first). We don't use `for-each` because we need to
1168			// keep the iterator alive in case not all elements fit in the
1169			// allocated capacity. This can happen if the iterator is not an
1170			// exact size iterator, or simply gives out a lower bound that is
1171			// not exact.
1172			// Note: if we could specialize on the iterator type, we could use
1173			// ExactSizeIterator to avoid the free check.
1174			for _ in 0..free {
1175				let Some(byte) = iter.next() else {
1176					// We're done, so we can just return
1177					return;
1178				};
1179
1180				ptr::write(ptr, byte);
1181				ptr = ptr.add(1);
1182				// Increment the length in every step in case next() panics
1183				local_len.increment_len(1);
1184			}
1185
1186			// len set by scope guard
1187		}
1188
1189		// write the remainder of the iter using push
1190		for byte in iter {
1191			self.push(byte);
1192		}
1193	}
1194}
1195
1196impl<const ALIGNMENT: usize, A> Default for UniqueAlignedBuffer<ALIGNMENT, A>
1197where
1198	A: BufferAllocator<ALIGNMENT> + Default,
1199{
1200	fn default() -> Self {
1201		Self::new_in(A::default())
1202	}
1203}
1204
1205impl<const ALIGNMENT: usize, A> fmt::Debug for UniqueAlignedBuffer<ALIGNMENT, A>
1206where
1207	A: BufferAllocator<ALIGNMENT>,
1208{
1209	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1210		fmt::Debug::fmt(&**self, f)
1211	}
1212}
1213
1214impl<const ALIGNMENT: usize, A> AsRef<[u8]> for UniqueAlignedBuffer<ALIGNMENT, A>
1215where
1216	A: BufferAllocator<ALIGNMENT>,
1217{
1218	#[inline]
1219	fn as_ref(&self) -> &[u8] {
1220		self
1221	}
1222}
1223
1224impl<const ALIGNMENT: usize, A> AsMut<[u8]> for UniqueAlignedBuffer<ALIGNMENT, A>
1225where
1226	A: BufferAllocator<ALIGNMENT>,
1227{
1228	#[inline]
1229	fn as_mut(&mut self) -> &mut [u8] {
1230		self
1231	}
1232}
1233
1234impl<const ALIGNMENT: usize, A> TryFrom<SharedAlignedBuffer<ALIGNMENT, A>>
1235	for UniqueAlignedBuffer<ALIGNMENT, A>
1236where
1237	A: BufferAllocator<ALIGNMENT>,
1238{
1239	type Error = SharedAlignedBuffer<ALIGNMENT, A>;
1240
1241	#[inline]
1242	fn try_from(value: SharedAlignedBuffer<ALIGNMENT, A>) -> Result<Self, Self::Error> {
1243		SharedAlignedBuffer::try_unique(value)
1244	}
1245}
1246
1247struct SetLenOnDrop<'a> {
1248	len: &'a mut usize,
1249	local_len: usize,
1250}
1251
1252impl<'a> SetLenOnDrop<'a> {
1253	#[inline]
1254	pub(super) fn new(len: &'a mut usize) -> Self {
1255		SetLenOnDrop {
1256			local_len: *len,
1257			len,
1258		}
1259	}
1260
1261	#[inline]
1262	pub(super) fn increment_len(&mut self, increment: usize) {
1263		self.local_len += increment;
1264	}
1265}
1266
1267impl Drop for SetLenOnDrop<'_> {
1268	#[inline]
1269	fn drop(&mut self) {
1270		*self.len = self.local_len;
1271	}
1272}