wgpu_types/write_only.rs
1#![deny(
2 elided_lifetimes_in_paths,
3 reason = "make all lifetime relationships around our unsafe code explicit, \
4 because they are important to soundness"
5)]
6
7//! The [`WriteOnly`] type.
8//!
9//! This type gets its own module in order to provide an encapsulation boundary around the
10//! substantial `unsafe` code required to implement [`WriteOnly`].
11//!
12//! Portions of this code and documentation have been copied from the Rust standard library.
13
14use core::{
15 any::TypeId,
16 fmt,
17 marker::PhantomData,
18 mem,
19 ops::{Bound, RangeBounds},
20 ptr::NonNull,
21};
22
23use crate::link_to_wgpu_item;
24
25/// Like `&'a mut T`, but allows only write operations.
26///
27/// This pointer type is obtained from [`BufferViewMut`] and
28/// [`QueueWriteBufferView`].
29/// It is an unfortunate necessity due to the fact that mapped GPU memory may be [write combining],
30/// which means it cannot work normally with all of the things that Rust `&mut` access allows you to
31/// do.
32///
33/// ([`WriteOnly`] can also be used as an interface to write to *uninitialized* memory, but this is
34/// not a feature which `wgpu` currently offers for GPU buffers.)
35///
36/// The methods of `WriteOnly<[T]>` are similar to those available for
37/// [slice references, `&mut [T]`][primitive@slice],
38/// with some changes to ownership intended to minimize the pain of explicit reborrowing.
39///
40// FIXME: Add an introduction to the necessity of explicit reborrowing.
41///
42/// [write combining]: https://en.wikipedia.org/wiki/Write_combining
43#[doc = link_to_wgpu_item!(struct BufferViewMut)]
44#[doc = link_to_wgpu_item!(struct QueueWriteBufferView)]
45pub struct WriteOnly<'a, T: ?Sized> {
46 /// The data which this write-only reference allows **writing** to.
47 ///
48 /// This field is not `&mut T`, because if it were, it would assert to the compiler
49 /// that spurious reads may be inserted, and is is unclear whether those spurious reads
50 /// are acceptable.
51 ptr: NonNull<T>,
52
53 /// Enforces that this type
54 ///
55 /// * is only valid for `'a`
56 /// * is invariant in `T`
57 /// * implements auto traits as a reference to `T`
58 ///
59 /// In theory, [`WriteOnly`] should be *contravariant* in `T`, but this would be tricky
60 /// to implement (`ptr` would need to be type-erased) and is very unlikely to be useful.
61 _phantom: PhantomData<&'a mut T>,
62}
63
64// SAFETY:
65// `WriteOnly<T>` is like `&mut T` in that
66// * It provides only exclusive access to the memory it points to, so `T: Sync` is not required.
67// * Sending it creates the opportunity to send a `T`, so `T: Send` is required.
68unsafe impl<T: Send> Send for WriteOnly<'_, T> {}
69
70// SAFETY:
71// `WriteOnly<T>` does not ever expose any `&T`, and therefore may unconditionally implement `Sync`.
72unsafe impl<T: ?Sized> Sync for WriteOnly<'_, T> {}
73
74impl<'a, T: ?Sized> WriteOnly<'a, T> {
75 // Note: Every method is marked `#[inline]` because the premise of this API design is that
76 // `WriteOnly` should be, when compiled, as cheap as manipulating `&mut` rather than
77 // having any additional function call cost.
78
79 /// Constructs a [`WriteOnly`] pointer from a raw pointer.
80 ///
81 /// # Safety
82 ///
83 /// By calling [`WriteOnly::new()`], you are giving safe code the opportunity to write to
84 /// this memory if it is given the resulting [`WriteOnly`]. Therefore:
85 ///
86 /// * `ptr` must be valid for ordinary, non-`volatile`, writes.
87 /// (It need not be valid for reads, including reads that occur as part of atomic operations
88 /// — that’s the whole point.)
89 /// * `ptr` must be aligned to at least the alignment of the type `T`.
90 /// * No other accesses to the memory pointed to by `ptr` may be performed until the
91 /// lifetime `'a` ends. (Similar to
92 /// [the conditions to construct `&'a mut T`][std::ptr#pointer-to-reference-conversion].)
93 ///
94 /// The memory pointed to need not contain a valid `T`, but if it does, it still will after
95 /// the `WriteOnly` pointer is used; that is, safe (or sound unsafe) use of `WriteOnly` will not
96 /// “de-initialize” the memory.
97 #[inline]
98 #[must_use]
99 pub unsafe fn new(ptr: NonNull<T>) -> Self {
100 Self {
101 ptr,
102 _phantom: PhantomData,
103 }
104 }
105
106 /// Constructs a [`WriteOnly`] pointer from an ordinary read-write `&mut` reference.
107 ///
108 /// This may be used to write code which can write either to a mapped GPU buffer or
109 /// normal memory.
110 ///
111 /// # Example
112 ///
113 /// ```
114 /// # use wgpu_types as wgpu;
115 /// fn write_numbers(slice: wgpu::WriteOnly<[u32]>) {
116 /// for (i, mut elem) in slice.into_iter().enumerate() {
117 /// elem.write(i as u32);
118 /// }
119 /// }
120 ///
121 /// let mut buf: [u32; 4] = [0; 4];
122 /// write_numbers(wgpu::WriteOnly::from_mut(&mut buf));
123 /// assert_eq!(buf, [0, 1, 2, 3]);
124 /// ```
125 #[inline]
126 #[must_use]
127 pub fn from_mut(reference: &mut T) -> Self {
128 // SAFETY: `&mut`’s safety conditions imply ours.
129 // FIXME: Use `NonNull::from_mut()` when MSRV ≥ 1.89.0
130 unsafe { Self::new(NonNull::new_unchecked(&raw mut *reference)) }
131 }
132
133 /// Writes `value` into the memory pointed to by `self`.
134 ///
135 /// This can only be used when `T` is a [`Sized`] type.
136 /// For slices, use [`copy_from_slice()`][Self::copy_from_slice] or
137 /// [`write_iter()`][Self::write_iter] instead.
138 #[inline]
139 pub fn write(self, value: T)
140 where
141 // Ideally, we want "does not have a destructor" to avoid any need for dropping (which
142 // would imply reading) or forgetting the values that write operations overwrite.
143 // However, there is no such trait bound and `T: Copy` is the closest approximation.
144 T: Copy,
145 {
146 // SAFETY:
147 // `self.ptr` is valid for writes, and `self`’s lifetime ensures the write cannot alias.
148 //
149 // Not forgetting values:
150 // `T` is `Copy`, so overwriting the old value of `*self.ptr` is trivial and does not
151 // forget anything.
152 unsafe { self.ptr.write(value) }
153 }
154
155 /// Returns a raw pointer to the memory this [`WriteOnly`] refers to.
156 ///
157 /// This operation may be used to manually perform writes in situations where the safe API of
158 /// [`WriteOnly`] is not sufficient, e.g. for random access from multiple threads.
159 ///
160 /// You must take care when using this pointer:
161 ///
162 /// * The `WriteOnly` type makes no guarantee that the memory pointed to by this pointer is
163 /// readable or initialized. Therefore, it must not be converted to `&mut T`, nor read any
164 /// other way.
165 /// * You may not write an invalid value unless you also overwrite it with a valid value
166 /// later. That is, you may not make the memory less initialized than it already was.
167 ///
168 /// See also [`as_raw_element_ptr()`][WriteOnly::as_raw_element_ptr], which returns a pointer
169 /// to the first element of a slice.
170 ///
171 /// [write combining]: https://en.wikipedia.org/wiki/Write_combining
172 #[inline]
173 pub fn as_raw_ptr(&mut self) -> NonNull<T> {
174 self.ptr
175 }
176}
177
178/// Methods for write-only references to slices.
179impl<'a, T> WriteOnly<'a, [T]> {
180 /// Returns the length of the referenced slice; the number of elements that may be written.
181 ///
182 /// # Example
183 ///
184 /// ```
185 /// # use wgpu_types as wgpu;
186 /// let example_slice: &mut [u8] = &mut [0; 10];
187 /// assert_eq!(wgpu::WriteOnly::from_mut(example_slice).len(), example_slice.len());
188 /// ```
189 #[inline]
190 #[must_use]
191 pub const fn len(&self) -> usize {
192 self.ptr.len()
193 }
194
195 /// Returns `true` if the referenced slice has a length of 0.
196 #[inline]
197 #[must_use]
198 pub const fn is_empty(&self) -> bool {
199 self.len() == 0
200 }
201
202 /// Returns another slice reference borrowing from this one,
203 /// covering a sub-range and with a shorter lifetime.
204 ///
205 /// You can also use `.slice(..)` to perform an explicit reborrow without shrinking.
206 ///
207 /// See also [`into_slice()`][Self::into_slice] when the same lifetime is needed.
208 ///
209 /// # Example
210 ///
211 /// ```
212 /// # use wgpu_types as wgpu;
213 /// // Ordinarily you would get a `WriteOnly` from `wgpu::Buffer` instead.
214 /// let mut data: [u8; 9] = [0; 9];
215 /// let mut wo = wgpu::WriteOnly::from_mut(data.as_mut_slice());
216 ///
217 /// wo.slice(..3).copy_from_slice(&[1, 2, 3]);
218 /// wo.slice(3..6).copy_from_slice(&[4, 5, 6]);
219 /// wo.slice(6..).copy_from_slice(&[7, 8, 9]);
220 ///
221 /// assert_eq!(data, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
222 /// ```
223 #[inline]
224 #[must_use]
225 pub fn slice<'b, S: RangeBounds<usize>>(&'b mut self, bounds: S) -> WriteOnly<'b, [T]> {
226 // SAFETY: We are duplicating `self.ptr`, but the lifetime annotations on this function
227 // ensure exclusive access.
228 let reborrow = unsafe { WriteOnly::<'b, [T]>::new(self.ptr) };
229
230 reborrow.into_slice(bounds)
231 }
232
233 /// Shrinks this slice reference in the same way as [`slice()`](Self::slice), but
234 /// consumes `self` and returns a slice reference with the same lifetime,
235 /// instead of a shorter lifetime.
236 #[inline]
237 #[must_use]
238 pub fn into_slice<S: RangeBounds<usize>>(mut self, bounds: S) -> Self {
239 let (checked_start, checked_new_len) =
240 checked_range_to_start_len(self.len(), bounds.start_bound(), bounds.end_bound());
241
242 WriteOnly {
243 // FIXME: When `feature(slice_ptr_get)` <https://github.com/rust-lang/rust/issues/74265>
244 // is stable, replace this with `NonNull::get_unchecked_mut()`.
245 // Unfortunately, we’ll still need to do explicit destructuring of `bounds`
246 // for bounds checking.
247 ptr: NonNull::slice_from_raw_parts(
248 // SAFETY of add(): we already did a bounds check.
249 unsafe { self.as_raw_element_ptr().add(checked_start) },
250 checked_new_len,
251 ),
252 _phantom: PhantomData,
253 }
254 }
255
256 /// Writes the items of `iter` into `self`.
257 ///
258 /// The iterator must produce exactly `self.len()` items.
259 ///
260 /// If the items are in a slice, use [`copy_from_slice()`][Self::copy_from_slice] instead.
261 ///
262 /// # Panics
263 ///
264 /// Panics if `iter` produces more or fewer items than `self.len()`.
265 ///
266 /// # Example
267 ///
268 /// ```
269 /// # use wgpu_types as wgpu;
270 /// // Ordinarily you would get a `WriteOnly` from `wgpu::Buffer` instead.
271 /// let mut buf: [u8; 10] = [0; 10];
272 /// let wo = wgpu::WriteOnly::from_mut(buf.as_mut_slice());
273 ///
274 /// wo.write_iter((1..).take(10));
275 ///
276 /// assert_eq!(buf, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
277 /// ```
278 #[inline]
279 #[track_caller]
280 pub fn write_iter<I>(self, iter: I)
281 where
282 T: Copy, // required by write()
283 I: IntoIterator<Item = T>,
284 {
285 let self_len = self.len();
286 let mut slot_iter = self.into_iter();
287
288 // Call `for_each()` to take advantage of the iterator’s custom implementation, if it has
289 // one. This may be superior to a `for` loop for `chain()`ed iterators and other cases where
290 // the implementation of `Iterator::next()` would need to branch, and is typically
291 // equivalent to a `for` loop for other iterators.
292 iter.into_iter().for_each(|item| {
293 let Some(slot) = slot_iter.next() else {
294 panic!("iterator given to write_iter() produced more than {self_len} elements");
295 };
296
297 slot.write(item);
298 });
299
300 let remaining_len = slot_iter.len();
301 if remaining_len != 0 {
302 panic!(
303 "iterator given to write_iter() produced {iter_len} elements \
304 but must produce {self_len} elements",
305 // infer how many elements the iterator produced by how many of ours were consumed
306 iter_len = self_len - remaining_len,
307 );
308 };
309 }
310
311 /// Writes copies of `value` to every element of `self`.
312 ///
313 /// # Example
314 ///
315 /// ```
316 /// # use wgpu_types as wgpu;
317 /// // Ordinarily you would get a `WriteOnly` from `wgpu::Buffer` instead.
318 /// let mut buf = vec![0; 10];
319 /// let mut wo = wgpu::WriteOnly::from_mut(buf.as_mut_slice());
320 ///
321 /// wo.fill(1);
322 ///
323 /// assert_eq!(buf, [1; 10]);
324 /// ```
325 #[inline]
326 pub fn fill(&mut self, value: T)
327 where
328 // Ideally, we want "does not have a destructor" to avoid any need for dropping (which
329 // would imply reading) or forgetting the values that write operations overwrite.
330 // However, there is no such trait bound and `T: Copy` is the closest approximation.
331 T: Copy + 'static,
332 {
333 let ty = TypeId::of::<T>();
334 if ty == TypeId::of::<u8>() || ty == TypeId::of::<i8>() || ty == TypeId::of::<bool>() {
335 // The type consists of a single _initialized_ byte, so we can call out to
336 // `write_bytes()` (a.k.a. `memset` in C).
337 //
338 // Note that we cannot just check that the size is 1, because some types may allow
339 // uninitialized bytes (trivially, `MaybeUninit<u8>`)
340
341 // SAFETY:
342 // * We just checked that `T` can soundly be transmuted to `u8`.
343 // * `T` is `Copy` so we don’t need to worry about duplicating it with `transmute_copy`.
344 // * `write_bytes()` is given a pointer which is guaranteed by our own invariants
345 // to be valid to write to.
346 unsafe {
347 let value_as_byte = mem::transmute_copy::<T, u8>(&value);
348 self.as_raw_element_ptr()
349 .cast::<u8>()
350 .write_bytes(value_as_byte, self.len());
351 }
352 } else {
353 // Generic loop for all other types.
354 self.slice(..)
355 .into_iter()
356 .for_each(|elem| elem.write(value));
357 }
358 }
359
360 /// Copies all elements from src into `self`.
361 ///
362 /// # Panics
363 ///
364 /// Panics if the length of `src` is not the same as `self`.
365 ///
366 /// # Example
367 ///
368 /// ```
369 /// # use wgpu_types as wgpu;
370 /// // Ordinarily you would get a `WriteOnly` from `wgpu::Buffer` instead.
371 /// let mut buf = vec![0; 5];
372 /// let mut wo = wgpu::WriteOnly::from_mut(buf.as_mut_slice());
373 ///
374 /// wo.copy_from_slice(&[2, 3, 5, 7, 11]);
375 ///
376 /// assert_eq!(*buf, [2, 3, 5, 7, 11]);
377 #[inline]
378 #[track_caller]
379 pub fn copy_from_slice(&mut self, src: &[T])
380 where
381 // Ideally, we want "does not have a destructor" to avoid any need for dropping (which
382 // would imply reading) or forgetting the values that write operations overwrite.
383 // However, there is no such trait bound and `T: Copy` is the closest approximation.
384 T: Copy,
385 {
386 let src_len = src.len();
387 let dst_len = self.len();
388 if src_len != dst_len {
389 // wording chosen to match <[_]>::copy_from_slice()'s message
390 panic!(
391 "source slice length ({src_len}) does not match \
392 destination slice length ({dst_len})"
393 );
394 }
395
396 let src_ptr: *const T = src.as_ptr();
397 let dst_ptr: *mut T = self.as_raw_element_ptr().as_ptr();
398
399 // SAFETY:
400 // * `src_ptr` is readable because it was constructed from a reference.
401 // * `dst_ptr` is writable because that is an invariant of `WriteOnly`.
402 // * `dst_ptr` cannot alias `src_ptr` because `self` is exclusive *and*
403 // because `src_ptr` is immutable.
404 // * We checked that the byte lengths match.
405 // * Lack of data races will be enforced by the type
406 unsafe { dst_ptr.copy_from_nonoverlapping(src_ptr, src.len()) }
407 }
408
409 /// Splits this slice reference into `N`-element arrays, starting at the beginning of the slice,
410 /// and a reference to the remainder with length strictly less than `N`.
411 ///
412 /// This method is analogous to [`<[T]>::as_chunks_mut()`][slice::as_chunks_mut]
413 /// but for `WriteOnly<[T]>` access.
414 /// (It takes ownership instead of `&mut self` in order to avoid reborrowing issues.
415 /// Use [`.slice(..)`][Self::slice] first if reborrowing is needed.)
416 ///
417 /// # Panics
418 ///
419 /// Panics if `N` is zero.
420 ///
421 /// # Example
422 ///
423 /// `into_chunks()` is useful for writing a sequence of elements from CPU memory to GPU memory
424 /// when a transformation is required.
425 /// (If a transformation is not required, use [`WriteOnly::copy_from_slice()`].)
426 ///
427 /// ```
428 /// # use wgpu_types as wgpu;
429 /// fn write_text_as_chars(text: &str, output: wgpu::WriteOnly<[u8]>) {
430 /// let (mut output, _remainder) = output.into_chunks::<{ size_of::<u32>() }>();
431 /// output.write_iter(text.chars().map(|ch| (ch as u32).to_ne_bytes()));
432 /// }
433 /// #
434 /// # let mut buf = [255; 8];
435 /// # write_text_as_chars("hi", wgpu::WriteOnly::from_mut(buf.as_mut_slice()));
436 /// # assert_eq!(
437 /// # buf,
438 /// # [
439 /// # u32::from(b'h').to_ne_bytes(),
440 /// # u32::from(b'i').to_ne_bytes(),
441 /// # ].as_flattened(),
442 /// # );
443 /// ```
444 #[inline]
445 #[must_use]
446 pub fn into_chunks<const N: usize>(self) -> (WriteOnly<'a, [[T; N]]>, WriteOnly<'a, [T]>) {
447 // This implementation is identical to the Rust standard library implementation as of
448 // Rust 1.93.0, except for being broken down into fewer pieces and less uncheckedness.
449
450 assert!(N != 0, "chunk size must be non-zero");
451 let len_in_chunks = self.len() / N;
452 let len_in_elements_rounded_down = len_in_chunks * N;
453 let (multiple_of_n, remainder) = self.split_at(len_in_elements_rounded_down);
454 // SAFETY: We already panicked for zero, and ensured by construction
455 // that the length of the subslice is a multiple of N.
456 let array_slice = unsafe {
457 WriteOnly::new(NonNull::slice_from_raw_parts(
458 multiple_of_n.ptr.cast::<[T; N]>(),
459 len_in_chunks,
460 ))
461 };
462 (array_slice, remainder)
463 }
464
465 /// Divides one write-only slice reference into two at an index.
466 ///
467 /// The first will contain all indices from `[0, mid)` (excluding
468 /// the index `mid` itself) and the second will contain all
469 /// indices from `[mid, len)` (excluding the index `len` itself).
470 ///
471 /// # Panics
472 ///
473 /// Panics if `mid > len`.
474 #[inline]
475 #[must_use]
476 #[track_caller]
477 pub fn split_at(self, mid: usize) -> (WriteOnly<'a, [T]>, WriteOnly<'a, [T]>) {
478 match self.split_at_checked(mid) {
479 Ok(slices) => slices,
480 Err(_) => panic!("mid > len"),
481 }
482 }
483
484 /// Divides one write-only slice reference into two at an index, returning [`Err`] if the
485 /// slice is too short.
486 ///
487 /// If `mid ≤ len`, returns a pair of slices where the first will contain all
488 /// indices from `[0, mid)` (excluding the index `mid` itself) and the
489 /// second will contain all indices from `[mid, len)` (excluding the index
490 /// `len` itself).
491 ///
492 /// Otherwise, if `mid > len`, returns [`Err`] with the original slice.
493 #[inline]
494 pub fn split_at_checked(self, mid: usize) -> Result<(Self, Self), Self> {
495 if mid <= self.len() {
496 let Self { ptr, _phantom: _ } = self;
497 let element_ptr = ptr.cast::<T>();
498 Ok(unsafe {
499 (
500 Self::new(NonNull::slice_from_raw_parts(element_ptr, mid)),
501 Self::new(NonNull::slice_from_raw_parts(
502 element_ptr.add(mid),
503 ptr.len() - mid,
504 )),
505 )
506 })
507 } else {
508 Err(self)
509 }
510 }
511
512 /// Removes the subslice corresponding to the given range and returns a mutable reference to it.
513 ///
514 /// Returns [`None`] and does not modify the slice if the given range is out of bounds.
515 ///
516 /// # Panics
517 ///
518 /// Panics if `R` is not a one-sided range such as `..n` or `n..`.
519 // (The `OneSidedRange` trait `std` uses to statically enforce this is unstable.)
520 pub fn split_off<R>(&mut self, range: R) -> Option<Self>
521 where
522 R: RangeBounds<usize>,
523 {
524 match (range.start_bound(), range.end_bound()) {
525 (Bound::Included(&mid), Bound::Unbounded) => {
526 match mem::take(self).split_at_checked(mid) {
527 Ok((front, back)) => {
528 *self = front;
529 Some(back)
530 }
531 Err(short) => {
532 *self = short;
533 None
534 }
535 }
536 }
537 (Bound::Excluded(&before_mid), Bound::Unbounded) => {
538 let mid = before_mid.checked_add(1)?;
539 match mem::take(self).split_at_checked(mid) {
540 Ok((front, back)) => {
541 *self = front;
542 Some(back)
543 }
544 Err(short) => {
545 *self = short;
546 None
547 }
548 }
549 }
550 (Bound::Unbounded, Bound::Included(&before_mid)) => {
551 let mid = before_mid.checked_add(1)?;
552 match mem::take(self).split_at_checked(mid) {
553 Ok((front, back)) => {
554 *self = back;
555 Some(front)
556 }
557 Err(short) => {
558 *self = short;
559 None
560 }
561 }
562 }
563 (Bound::Unbounded, Bound::Excluded(&mid)) => {
564 match mem::take(self).split_at_checked(mid) {
565 Ok((front, back)) => {
566 *self = back;
567 Some(front)
568 }
569 Err(short) => {
570 *self = short;
571 None
572 }
573 }
574 }
575 _ => {
576 panic!("split_off() requires a one-sided range")
577 }
578 }
579 }
580
581 /// Shrinks `self` to no longer refer to its first element, and returns a reference to that
582 /// element.
583 ///
584 /// Returns `None` if `self` is empty.
585 #[inline]
586 #[must_use]
587 pub fn split_off_first(&mut self) -> Option<WriteOnly<'a, T>> {
588 let len = self.len();
589 if let Some(new_len) = len.checked_sub(1) {
590 let ptr: NonNull<T> = self.as_raw_element_ptr();
591
592 // SAFETY: covers exactly everything but the first element
593 *self = unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(ptr.add(1), new_len)) };
594
595 // SAFETY: self was not empty so ptr is not dangling, and we will avoid aliasing
596 Some(unsafe { WriteOnly::new(ptr) })
597 } else {
598 None
599 }
600 }
601
602 /// Shrinks `self` to no longer refer to its last element, and returns a reference to that
603 /// element.
604 ///
605 /// Returns `None` if `self` is empty.
606 #[inline]
607 #[must_use]
608 pub fn split_off_last(&mut self) -> Option<WriteOnly<'a, T>> {
609 let len = self.len();
610 if let Some(new_len) = len.checked_sub(1) {
611 let ptr: NonNull<T> = self.as_raw_element_ptr();
612
613 // SAFETY: covers exactly everything but the first element
614 *self = unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(ptr, new_len)) };
615
616 // SAFETY: self was not empty so ptr is not dangling, and we will avoid aliasing
617 Some(unsafe { WriteOnly::new(ptr.add(new_len)) })
618 } else {
619 None
620 }
621 }
622
623 /// Reinterprets a reference to `[T]` as a reference to `[U]`.
624 ///
625 /// This may be used, for example, to copy a slice of `struct`s into a `[u8]` buffer.
626 ///
627 /// This method is `unsafe`, can easily be used incorrectly, and its use is often not necessary;
628 /// consider converting your data to bytes explicitly instead.
629 /// Consider using [`.into_chunks()`][Self::into_chunks] instead if possible.
630 /// When this method is used, consider wrapping it in a function that provides a narrower
631 /// type signature that can be safe.
632 ///
633 /// # Safety
634 ///
635 /// All values of type `U` must also be valid values of type `T`.
636 ///
637 /// Note that this is a requirement which is significant even if `T = [u8; N]`.
638 /// For example, if `T` contains any padding (uninitialized) bytes, then it is not valid to
639 /// interpret those bytes as `u8`s, and such a cast is unsound.
640 ///
641 /// A way to ensure soundness of this operation is to ensure that `T` and `U` satisfy traits
642 /// from a helper library, such as `T: bytemuck::AnyBitPattern, U: bytemuck::NoUninit`.
643 ///
644 /// # Panics
645 ///
646 /// Panics if the size of type `U` does not equal the size of type `T`,
647 /// or if the alignment of type `U` is greater than the alignment of type `T`.
648 ///
649 /// This panic occurs regardless of the run-time length or alignment of the slice;
650 /// any call to `cast_elements()` with a particular type `T` and typ` U` will
651 /// either always succeed or always fail.
652 #[inline]
653 #[track_caller]
654 pub unsafe fn cast_elements<U>(self) -> WriteOnly<'a, [U]> {
655 assert_eq!(
656 size_of::<T>(),
657 size_of::<U>(),
658 "sizes of the two element types must be equal"
659 );
660 assert!(
661 align_of::<U>() <= align_of::<T>(),
662 "alignment of the new element type must be \
663 less than or equal to the alignment of the old element type"
664 );
665 unsafe {
666 WriteOnly::new(NonNull::slice_from_raw_parts(
667 self.ptr.cast::<U>(),
668 self.len(),
669 ))
670 }
671 }
672
673 /// Returns a raw pointer to the first element of this [`WriteOnly`] slice reference.
674 ///
675 /// See [`WriteOnly::as_raw_ptr()`] for information on how this pointer is, or is not,
676 /// sound to use.
677 #[inline]
678 pub fn as_raw_element_ptr(&mut self) -> NonNull<T> {
679 self.ptr.cast::<T>()
680 }
681}
682
683// This impl does not have `T: ?Sized` so we can have a separate impl for slices
684impl<T> fmt::Debug for WriteOnly<'_, T> {
685 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
686 write!(f, "WriteOnly({ty})", ty = core::any::type_name::<T>())
687 }
688}
689impl<T> fmt::Debug for WriteOnly<'_, [T]> {
690 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
691 // We don't format this as `[{ty}; {len}]` in order to not mislead readers into
692 // thinking the type is an array type.
693 write!(
694 f,
695 "WriteOnly([{ty}], len = {len})",
696 ty = core::any::type_name::<T>(),
697 len = self.len(),
698 )
699 }
700}
701
702impl<'a, T> Default for WriteOnly<'a, [T]> {
703 /// Returns an empty slice reference, just like `<&mut [T]>::default()` would.
704 ///
705 /// This may be used as a placeholder value for operations like
706 /// [`mem::take()`][core::mem::take].
707 /// It is equivalent to `WriteOnly::from_mut(&mut [])`.
708 fn default() -> Self {
709 Self::from_mut(&mut [])
710 }
711}
712
713impl<'a, T> Default for WriteOnly<'a, [T; 0]> {
714 fn default() -> Self {
715 Self::from_mut(&mut [])
716 }
717}
718
719impl<'a, 'b: 'a, T: ?Sized> From<&'b mut T> for WriteOnly<'a, T> {
720 /// Equivalent to [`WriteOnly::from_mut()`].
721 fn from(reference: &'a mut T) -> WriteOnly<'a, T> {
722 Self::from_mut(reference)
723 }
724}
725
726// Ideally we'd also implement CoerceUnsized for this same conversion, but that’s unstable.
727// <https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html>
728impl<'a, 'b: 'a, T, const N: usize> From<WriteOnly<'b, [T; N]>> for WriteOnly<'a, [T]> {
729 fn from(array_wo: WriteOnly<'b, [T; N]>) -> WriteOnly<'a, [T]> {
730 WriteOnly {
731 _phantom: PhantomData,
732 ptr: array_wo.ptr, // implicit unsizing coercion of the pointer value
733 }
734 }
735}
736
737impl<'a, T> IntoIterator for WriteOnly<'a, [T]> {
738 type Item = WriteOnly<'a, T>;
739 type IntoIter = WriteOnlyIter<'a, T>;
740
741 /// Produces an iterator over [`WriteOnly<T>`][WriteOnly] for each element of
742 /// this `WriteOnly<[T]>`.
743 ///
744 /// See also [`WriteOnly::write_iter()`] for the case where you already have an iterator
745 /// of data to write.
746 fn into_iter(self) -> Self::IntoIter {
747 WriteOnlyIter { slice: self }
748 }
749}
750impl<'a, T, const N: usize> IntoIterator for WriteOnly<'a, [T; N]> {
751 type Item = WriteOnly<'a, T>;
752 type IntoIter = WriteOnlyIter<'a, T>;
753
754 fn into_iter(self) -> Self::IntoIter {
755 WriteOnlyIter { slice: self.into() }
756 }
757}
758
759/// Iterator over the elements of [`WriteOnly<[T]>`][WriteOnly].
760///
761/// It can be created by calling [`IntoIterator::into_iter()`] on a [`WriteOnly<[T]>`][WriteOnly].
762///
763/// See also [`WriteOnly::write_iter()`].
764pub struct WriteOnlyIter<'a, T> {
765 // Note: This is not the same as a [`slice::IterMut`], and may be less efficient.
766 // We’re being less ambitious in exchange for less unsafe code.
767 slice: WriteOnly<'a, [T]>,
768}
769
770impl<'a, T> Iterator for WriteOnlyIter<'a, T> {
771 type Item = WriteOnly<'a, T>;
772
773 fn next(&mut self) -> Option<Self::Item> {
774 self.slice.split_off_first()
775 }
776
777 fn size_hint(&self) -> (usize, Option<usize>) {
778 let len = self.slice.len();
779 (len, Some(len))
780 }
781}
782impl<'a, T> ExactSizeIterator for WriteOnlyIter<'a, T> {}
783
784impl<'a, T> DoubleEndedIterator for WriteOnlyIter<'a, T> {
785 fn next_back(&mut self) -> Option<Self::Item> {
786 self.slice.split_off_last()
787 }
788}
789
790#[track_caller]
791#[inline]
792fn checked_range_to_start_len(
793 len: usize,
794 slice_start: Bound<&usize>,
795 slice_end: Bound<&usize>,
796) -> (usize, usize) {
797 // FIXME: cleaner panic messages
798 let start: usize = match slice_start {
799 Bound::Included(&i) => i,
800 Bound::Excluded(&i) => i
801 .checked_add(1)
802 .expect("range bounds must be in numeric range"),
803 Bound::Unbounded => 0,
804 };
805 let end: usize = match slice_end {
806 Bound::Included(&i) => i
807 .checked_add(1)
808 .expect("range bounds must be in numeric range"),
809 Bound::Excluded(&i) => i,
810 Bound::Unbounded => len,
811 };
812 let new_len: usize = end
813 .checked_sub(start)
814 .expect("range must not have end > start");
815 assert!(end <= len, "provided range was outside slice");
816 // We checked start <= end and end <= len, so we also know that start <= self.len() here.
817
818 (start, new_len)
819}
820
821/// Note: These tests are most useful if run under Miri to detect undefined behavior.
822#[cfg(test)]
823mod tests {
824 use alloc::format;
825 use alloc::string::String;
826 use core::panic::{AssertUnwindSafe, UnwindSafe};
827
828 use super::*;
829
830 /// Helper for tests explicitly checking panics rather than using `#[should_panic]`
831 fn expect_panic(f: impl FnOnce()) -> String {
832 let payload = std::panic::catch_unwind(AssertUnwindSafe(f))
833 .expect_err("function should have panicked");
834
835 match payload.downcast::<String>() {
836 Ok(string) => *string,
837 Err(payload) => {
838 if let Some(&string) = payload.downcast_ref::<&'static str>() {
839 String::from(string)
840 } else {
841 panic!("non-string panic payload with type {:?}", payload.type_id());
842 }
843 }
844 }
845 }
846
847 #[test]
848 fn debug() {
849 let mut arr = [1u8, 2, 3];
850 assert_eq!(
851 format!("{:#?}", WriteOnly::from_mut(&mut arr)),
852 "WriteOnly([u8; 3])"
853 );
854 assert_eq!(
855 format!("{:#?}", WriteOnly::from_mut(arr.as_mut_slice())),
856 "WriteOnly([u8], len = 3)"
857 );
858 assert_eq!(
859 format!("{:#?}", WriteOnly::from_mut(&mut arr[0])),
860 "WriteOnly(u8)"
861 );
862 }
863
864 #[test]
865 fn default() {
866 let empty = WriteOnly::<[u8]>::default();
867 assert_eq!(empty.len(), 0);
868
869 WriteOnly::<[char; 0]>::default().write([]);
870 }
871
872 #[test]
873 fn array_to_slice() {
874 let mut array = [0u8; 3];
875 let array_wo = WriteOnly::from_mut(&mut array);
876
877 // Ideally this could be an implicit unsizing coercion too, but that's not stable.
878 let mut slice_wo: WriteOnly<'_, [u8]> = array_wo.into();
879 slice_wo.copy_from_slice(&[1, 2, 3]);
880
881 assert_eq!(array, [1, 2, 3]);
882 }
883
884 /// The rest of the tests and examples use `from_mut()` on `[T]` or arrays only,
885 /// so let’s have at least one test of a type that hasn’t got any `[` or `]` in it.
886 #[test]
887 fn from_mut_for_non_slice() {
888 let mut val = 1u32;
889 let wo = WriteOnly::from_mut(&mut val);
890 wo.write(2);
891 assert_eq!(val, 2);
892 }
893
894 #[test]
895 #[should_panic = "iterator given to write_iter() produced 3 elements but must produce 4 elements"]
896 fn write_iter_too_short() {
897 let mut buf = [0u8; 4];
898 let wo = WriteOnly::from_mut(buf.as_mut_slice());
899
900 wo.write_iter(1..=3);
901 }
902
903 #[test]
904 #[should_panic = "iterator given to write_iter() produced more than 4 elements"]
905 fn write_iter_too_long() {
906 let mut buf = [0u8; 4];
907 let wo = WriteOnly::from_mut(buf.as_mut_slice());
908
909 wo.write_iter(1..=5);
910 }
911
912 #[test]
913 fn write_iter_to_empty_slice_success() {
914 let mut buf: [u8; 0] = [];
915 let wo = WriteOnly::from_mut(buf.as_mut_slice());
916
917 // does nothing, but shouldn’t panic
918 wo.write_iter(core::iter::empty());
919 }
920
921 #[test]
922 #[should_panic = "iterator given to write_iter() produced more than 0 elements"]
923 fn write_iter_to_empty_slice_too_long() {
924 let mut buf: [u8; 0] = [];
925 let wo = WriteOnly::from_mut(buf.as_mut_slice());
926 wo.write_iter(core::iter::once(1));
927 }
928
929 /// Tests that the slice length from `into_chunks()` is correct and that iteration works.
930 #[test]
931 fn into_chunks_has_correct_length_and_iterator_iterates() {
932 let mut buf = [0u32; 8];
933
934 let wo = WriteOnly::from_mut(buf.as_mut_slice());
935 assert_eq!(wo.len(), 8);
936
937 let (chunks, remainder): (WriteOnly<'_, [[u32; 4]]>, WriteOnly<'_, [u32]>) =
938 wo.into_chunks::<4>();
939 assert_eq!((chunks.len(), remainder.len()), (2, 0));
940
941 for elem in chunks {
942 elem.write([1, 2, 3, 4]);
943 }
944 assert_eq!(buf, [1, 2, 3, 4, 1, 2, 3, 4]);
945 }
946
947 #[test]
948 fn into_chunks_with_remainder() {
949 let mut buf = [0u8; 5];
950 let wo = WriteOnly::from_mut(buf.as_mut_slice());
951
952 let (mut chunks, mut remainder) = wo.into_chunks::<2>();
953 chunks.fill([1, 2]);
954 remainder.fill(100);
955
956 assert_eq!(buf, [1, 2, 1, 2, 100]);
957 }
958
959 #[test]
960 fn double_ended_iterator() {
961 let mut buf = [0u8; 3];
962 let mut iter = WriteOnly::from_mut(buf.as_mut_slice()).into_iter();
963
964 iter.next_back().unwrap().write(3);
965 iter.next().unwrap().write(1);
966 iter.next_back().unwrap().write(2);
967
968 assert!(iter.next().is_none());
969 assert!(iter.next_back().is_none());
970 assert_eq!(buf, [1, 2, 3]);
971 }
972
973 /// Test that slicing correctly panics on an out-of-bounds range.
974 #[test]
975 #[expect(clippy::reversed_empty_ranges)]
976 fn slice_bounds_check_failures() {
977 // RangeBounds isn’t dyn compatible, so we can’t make a list of test cases and have to
978 // use a generic function.
979 fn assert_oob(range: impl RangeBounds<usize> + UnwindSafe + fmt::Debug + Clone) {
980 let panic_message_1 = expect_panic({
981 let range = range.clone();
982 let target: WriteOnly<'_, [char]> =
983 WriteOnly::from_mut(['a', 'b', 'c', 'd'].as_mut_slice());
984 || {
985 _ = { target }.slice(range);
986 }
987 });
988 // TODO: have more consistent errors so this assertion can be stronger
989 assert!(
990 panic_message_1.contains("range"),
991 "expected .slice({range:?}) to panic with an out-of-bounds report,
992 but got {panic_message_1:?}"
993 );
994
995 let panic_message_2 = expect_panic({
996 let range = range.clone();
997 let target: WriteOnly<'_, [char]> =
998 WriteOnly::from_mut(['a', 'b', 'c', 'd'].as_mut_slice());
999 || {
1000 _ = target.into_slice(range);
1001 }
1002 });
1003 assert!(
1004 panic_message_2.contains("range"),
1005 "expected .into_slice({range:?}) to panic with an out-of-bounds report,
1006 but got {panic_message_2:?}"
1007 );
1008 }
1009
1010 assert_oob(..5);
1011 assert_oob(..=4);
1012 assert_oob(..usize::MAX);
1013 assert_oob(..=usize::MAX);
1014 assert_oob(2..5);
1015 assert_oob(2..=4);
1016 assert_oob(2..usize::MAX);
1017 assert_oob(2..=usize::MAX);
1018 assert_oob(5..4);
1019 assert_oob(5..=3);
1020 }
1021
1022 #[test]
1023 fn slice_full_range() {
1024 let mut buf = [0u8; 4];
1025 let mut wo = WriteOnly::from_mut(buf.as_mut_slice());
1026 let mut wo2 = wo.slice(..);
1027 wo2.fill(7);
1028 assert_eq!(buf, [7, 7, 7, 7]);
1029 }
1030
1031 #[test]
1032 fn split_off_out_of_bounds() {
1033 let mut buf = ['X'; 2];
1034 let mut wo = WriteOnly::from_mut(buf.as_mut_slice());
1035
1036 assert!(wo.split_off(3..).is_none());
1037 assert!(wo.split_off(..3).is_none());
1038
1039 // wo is unchanged by the attempts
1040 assert_eq!(wo.len(), 2);
1041 }
1042
1043 /// Tests [`WriteOnly::split_off()`] with every kind of range it supports.
1044 #[test]
1045 fn split_off_success() {
1046 let mut buf = ['X'; 5];
1047 let mut wo = WriteOnly::from_mut(buf.as_mut_slice());
1048
1049 // this particular combination of `Bound`s has no corresponding `Range*` type
1050 wo.split_off((Bound::Excluded(3), Bound::Unbounded))
1051 .unwrap()
1052 .copy_from_slice(&['e']);
1053 assert_eq!(wo.len(), 4);
1054
1055 wo.split_off((Bound::Included(3), Bound::Unbounded))
1056 .unwrap()
1057 .copy_from_slice(&['d']);
1058 assert_eq!(wo.len(), 3);
1059
1060 wo.split_off(..=0).unwrap().copy_from_slice(&['a']);
1061 assert_eq!(wo.len(), 2);
1062
1063 wo.split_off(..1).unwrap().copy_from_slice(&['b']);
1064 assert_eq!(wo.len(), 1);
1065
1066 wo.copy_from_slice(&['c']);
1067
1068 assert_eq!(buf, ['a', 'b', 'c', 'd', 'e']);
1069 }
1070
1071 #[test]
1072 #[should_panic = "split_off() requires a one-sided range"]
1073 fn split_off_interior_range() {
1074 _ = WriteOnly::from_mut([1, 2, 3].as_mut_slice()).split_off(1..2);
1075 }
1076
1077 /// Tests both [`WriteOnly::split_off_first()`] and [`WriteOnly::split_off_last()`],
1078 /// with the same sequence of operations as [`split_off_success()`].
1079 #[test]
1080 fn split_off_first_and_last_success() {
1081 let mut buf = ['X'; 5];
1082 let mut wo = WriteOnly::from_mut(buf.as_mut_slice());
1083
1084 wo.split_off_last().unwrap().write('e');
1085 wo.split_off_last().unwrap().write('d');
1086 wo.split_off_first().unwrap().write('a');
1087 wo.split_off_first().unwrap().write('b');
1088 wo.copy_from_slice(&['c']);
1089
1090 assert_eq!(buf, ['a', 'b', 'c', 'd', 'e']);
1091 }
1092
1093 #[test]
1094 fn split_off_first_and_last_empty() {
1095 let mut buf: [i32; 0] = [];
1096 let mut wo = WriteOnly::from_mut(buf.as_mut_slice());
1097
1098 assert!(wo.split_off_first().is_none());
1099 assert!(wo.split_off_last().is_none());
1100 }
1101
1102 #[test]
1103 #[should_panic(expected = "sizes of the two element types must be equal")]
1104 fn cast_elements_size_mismatch() {
1105 let mut buf = [0u8; 4];
1106 let wo = WriteOnly::from_mut(buf.as_mut_slice());
1107 unsafe { wo.cast_elements::<u16>() };
1108 }
1109
1110 #[test]
1111 #[should_panic(expected = "alignment of the new element type must be \
1112 less than or equal to the alignment of the old element type")]
1113 fn cast_elements_alignment_mismatch() {
1114 #[repr(align(8))]
1115 struct BigAlign {
1116 _unused: u64,
1117 }
1118
1119 // arrays are only as aligned as their elements
1120 let mut buf = [[0u8; 8]; 1];
1121 let wo = WriteOnly::from_mut(buf.as_mut_slice());
1122
1123 unsafe { wo.cast_elements::<BigAlign>() };
1124 }
1125
1126 // fill() has specialized implementation for byte-like types, so test all those, and
1127 // also a type that isn’t byte sized, and a type that is byte sized but not initialized.
1128 #[test]
1129 fn fill_byte_u8() {
1130 let mut buf = [0u8; 5];
1131 WriteOnly::from_mut(buf.as_mut_slice()).fill(42);
1132 assert_eq!(buf, [42; 5]);
1133 }
1134 #[test]
1135 fn fill_byte_i8() {
1136 let mut buf = [0i8; 5];
1137 WriteOnly::from_mut(buf.as_mut_slice()).fill(-42);
1138 assert_eq!(buf, [-42; 5]);
1139 }
1140 #[test]
1141 fn fill_byte_bool() {
1142 let mut buf = [false; 5];
1143 WriteOnly::from_mut(buf.as_mut_slice()).fill(true);
1144 assert_eq!(buf, [true; 5]);
1145 }
1146 #[test]
1147 fn fill_nonbyte_u16() {
1148 let mut buf = [0u16; 5];
1149 WriteOnly::from_mut(buf.as_mut_slice()).fill(12345);
1150 assert_eq!(buf, [12345; 5]);
1151 }
1152 #[test]
1153 fn fill_nonbyte_uninit() {
1154 let mut buf = [mem::MaybeUninit::<u8>::uninit(); 5];
1155 WriteOnly::from_mut(buf.as_mut_slice()).fill(mem::MaybeUninit::uninit());
1156 // Can't do a comparison, but we can at least let Miri notice if we just did UB.
1157 }
1158}