write_only/slice/
non_volatile.rs

1// This Source Code Form is subject to the terms of the Mozilla Public
2// License, v. 2.0. If a copy of the MPL was not distributed with this
3// file, You can obtain one at https://mozilla.org/MPL/2.0/.
4
5use core::{marker::PhantomData, mem};
6
7use crate::{PutAt, PutFromSliceAt, WriteAt, WriteFromSliceAt};
8
9/// A write-only **slice** with **dropping non-volatile** write access.
10pub struct WriteOnlySlice<'a, T: 'a> {
11    data: *mut T,
12    len: usize,
13    _phantom: PhantomData<&'a T>,
14}
15
16impl<'a, T: 'a> WriteOnlySlice<'a, T> {
17    /// Forms a write-only slice from a pointer and a length.
18    ///
19    /// The `len` argument is the number of **elements**, not the number of bytes.
20    ///
21    /// # Safety
22    ///
23    /// Behavior is undefined if any of the following conditions are violated:
24    ///
25    /// * `data` must be [valid](http://doc.rust-lang.org/core/ptr/index.html#safety) for reads for `len * mem::size_of::<T>()` many bytes,
26    ///   and it must be properly aligned. This means in particular:
27    ///
28    ///     * The entire memory range of this slice must be contained within a single allocated object!
29    ///       Slices can never span across multiple allocated objects. See [below](#incorrect-usage)
30    ///       for an example incorrectly not taking this into account.
31    ///     * `data` must be non-null and aligned even for zero-length slices. One
32    ///       reason for this is that enum layout optimizations may rely on references
33    ///       (including slices of any length) being aligned and non-null to distinguish
34    ///       them from other data. You can obtain a pointer that is usable as `data`
35    ///       for zero-length slices using [`::core::ptr::NonNull::dangling()`].
36    ///
37    /// * `data` must point to `len` consecutive properly initialized items of type `T`.
38    ///
39    /// * The memory referenced by the returned slice must not be mutated for the duration
40    ///   of lifetime `'a`, except inside an `UnsafeCell`.
41    ///
42    /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
43    ///   See the safety documentation of
44    ///   [`pointer::offset`](https://doc.rust-lang.org/std/primitive.pointer.html#method.offset).
45    ///
46    /// # Caveat
47    ///
48    /// The lifetime for the returned slice is inferred from its usage. To
49    /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
50    /// source lifetime is safe in the context, such as by providing a helper
51    /// function taking the lifetime of a host value for the slice, or by explicit
52    /// annotation.
53    #[inline]
54    pub unsafe fn from_raw_parts(data: *mut T, len: usize) -> Self {
55        debug_assert!(
56            !data.is_null() && (data.align_offset(mem::align_of::<u16>()) == 0),
57            "attempt to create unaligned or null slice"
58        );
59        debug_assert!(
60            mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
61            "attempt to create slice covering at least half the address space"
62        );
63        // SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
64        Self {
65            data,
66            len,
67            _phantom: PhantomData,
68        }
69    }
70
71    #[inline]
72    pub fn len(&self) -> usize {
73        self.len
74    }
75
76    #[inline]
77    pub fn is_empty(&self) -> bool {
78        self.len == 0
79    }
80}
81
82impl<'a, T: 'a> PutAt<T> for WriteOnlySlice<'a, T> {
83    #[inline]
84    fn put_at(&mut self, index: usize, value: T) {
85        assert!(index < self.len);
86
87        unsafe {
88            self.put_at_unchecked(index, value);
89        }
90    }
91
92    #[inline]
93    unsafe fn put_at_unchecked(&mut self, index: usize, value: T) {
94        *self.data.add(index) = value;
95    }
96}
97
98impl<'a, T: 'a> WriteAt<T> for WriteOnlySlice<'a, T> {
99    #[inline]
100    fn write_at(&mut self, index: usize, value: T) {
101        assert!(index < self.len);
102
103        unsafe {
104            self.write_at_unchecked(index, value);
105        }
106    }
107
108    #[inline]
109    unsafe fn write_at_unchecked(&mut self, index: usize, value: T) {
110        self.data.add(index).write(value);
111    }
112}
113
114impl<'a, T: 'a> PutFromSliceAt<T> for WriteOnlySlice<'a, T> {
115    #[inline]
116    fn put_cloning_from_slice_at(&mut self, src: &[T], offset: usize)
117    where
118        T: Clone,
119    {
120        assert!(offset + src.len() <= self.len);
121
122        // SAFETY: `self` is valid for `self.len()` elements by definition,
123        // and `src` was checked to have a length less than `self.len() - offset`.
124        // The slices cannot overlap because mutable references are exclusive.
125
126        for (index, item) in src.iter().enumerate() {
127            unsafe {
128                *self.data.add(offset + index) = item.clone();
129            }
130        }
131    }
132}
133
134impl<'a, T: 'a> WriteFromSliceAt<T> for WriteOnlySlice<'a, T> {
135    #[inline]
136    fn write_cloning_from_slice_at(&mut self, src: &[T], offset: usize)
137    where
138        T: Clone,
139    {
140        assert!(offset + src.len() <= self.len);
141
142        // SAFETY: `self` is valid for `self.len()` elements by definition,
143        // and `src` was checked to have a length less than `self.len() - offset`.
144        // The slices cannot overlap because mutable references are exclusive.
145
146        for (index, item) in src.iter().enumerate() {
147            unsafe {
148                self.data.add(offset + index).write(item.clone());
149            }
150        }
151    }
152
153    #[inline]
154    fn write_copying_from_slice_at(&mut self, src: &[T], offset: usize)
155    where
156        T: Copy,
157    {
158        assert!(src.len() <= self.len - offset);
159
160        // SAFETY: `self` is valid for `self.len()` elements by definition,
161        // and `src` was checked to have a length less than `self.len - offset`.
162        // The slices cannot overlap because mutable references are exclusive.
163        unsafe {
164            self.data
165                .add(offset)
166                .copy_from_nonoverlapping(src.as_ptr(), src.len());
167        }
168    }
169}
170
171impl<'a, T: 'a> From<&'a mut [T]> for WriteOnlySlice<'a, T> {
172    #[inline]
173    fn from(slice: &'a mut [T]) -> Self {
174        unsafe { Self::from_raw_parts(slice.as_mut_ptr(), slice.len()) }
175    }
176}
177
178#[cfg(test)]
179mod tests {
180    use super::*;
181
182    use droptest::prelude::*;
183
184    #[test]
185    fn from_raw_parts() {
186        let registry = DropRegistry::default();
187        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
188
189        let reference = unsafe { WriteOnlySlice::from_raw_parts(&mut guards, 3) };
190
191        std::mem::drop(reference);
192
193        assert_drop_stats!(registry, { created: 3, dropped: 0 });
194
195        std::mem::drop(guards);
196
197        assert_drop_stats!(registry, { created: 3, dropped: 3 });
198    }
199
200    #[test]
201    fn from() {
202        let registry = DropRegistry::default();
203        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
204
205        let reference = WriteOnlySlice::from(&mut guards[..]);
206
207        std::mem::drop(reference);
208
209        assert_drop_stats!(registry, { created: 3, dropped: 0 });
210
211        std::mem::drop(guards);
212
213        assert_drop_stats!(registry, { created: 3, dropped: 3 });
214    }
215
216    #[test]
217    fn put_at() {
218        let registry = DropRegistry::default();
219        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
220            (0..3).map(|i| registry.new_guard_for(i).by_id()).unzip();
221        let (new_id, new_guard) = registry.new_guard_for(3).by_id();
222
223        let mut slice = WriteOnlySlice::from(&mut guards[..]);
224        slice.put_at(1, new_guard);
225
226        assert_eq!(guards[1].id(), new_id);
227        assert_eq!(guards[1].value(), &3);
228
229        assert_drop!(registry, old_ids[1]);
230        assert_drop_stats!(registry, { created: 4, dropped: 1 });
231    }
232
233    #[test]
234    #[should_panic]
235    fn put_at_out_of_bounds() {
236        let registry = DropRegistry::default();
237        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
238        let new_guard = registry.new_guard_for(3);
239
240        let mut slice = WriteOnlySlice::from(&mut guards[..]);
241        slice.put_at(10, new_guard);
242    }
243
244    #[test]
245    fn write_at() {
246        let registry = DropRegistry::default();
247        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
248            (0..3).map(|i| registry.new_guard_for(i).by_id()).unzip();
249        let (new_id, new_guard) = registry.new_guard_for(3).by_id();
250
251        let mut slice = WriteOnlySlice::from(&mut guards[..]);
252        slice.write_at(1, new_guard);
253
254        assert_eq!(guards[1].id(), new_id);
255        assert_eq!(guards[1].value(), &3);
256
257        assert_no_drop!(registry, old_ids[1]);
258        assert_drop_stats!(registry, { created: 4, dropped: 0 });
259    }
260
261    #[test]
262    #[should_panic]
263    fn write_at_out_of_bounds() {
264        let registry = DropRegistry::default();
265        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
266        let new_guard = registry.new_guard_for(3);
267
268        let mut slice = WriteOnlySlice::from(&mut guards[..]);
269        slice.write_at(10, new_guard);
270    }
271
272    #[test]
273    fn put_cloning_from_slice_at() {
274        let registry = DropRegistry::default();
275        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
276            (0..5).map(|i| registry.new_guard_for(i).by_id()).unzip();
277        let new_guards: Vec<_> = (5..8).map(|i| registry.new_guard_for(i)).collect();
278
279        let mut slice = WriteOnlySlice::from(&mut guards[..]);
280        slice.put_cloning_from_slice_at(&new_guards[..], 1);
281
282        assert_ne!(guards[1].id(), old_ids[1]);
283        assert_eq!(guards[1].value(), &5);
284        assert_ne!(guards[2].id(), old_ids[1]);
285        assert_eq!(guards[2].value(), &6);
286        assert_ne!(guards[3].id(), old_ids[2]);
287        assert_eq!(guards[3].value(), &7);
288
289        assert_drop!(registry, old_ids[1]);
290        assert_drop!(registry, old_ids[2]);
291        assert_drop!(registry, old_ids[3]);
292        assert_drop_stats!(registry, { created: 11, dropped: 3 });
293    }
294
295    #[test]
296    fn write_cloning_from_slice_at() {
297        let registry = DropRegistry::default();
298        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
299            (0..5).map(|i| registry.new_guard_for(i).by_id()).unzip();
300        let new_guards: Vec<_> = (5..8).map(|i| registry.new_guard_for(i)).collect();
301
302        let mut slice = WriteOnlySlice::from(&mut guards[..]);
303        slice.write_cloning_from_slice_at(&new_guards[..], 1);
304
305        assert_ne!(guards[1].id(), old_ids[1]);
306        assert_eq!(guards[1].value(), &5);
307        assert_ne!(guards[2].id(), old_ids[1]);
308        assert_eq!(guards[2].value(), &6);
309        assert_ne!(guards[3].id(), old_ids[2]);
310        assert_eq!(guards[3].value(), &7);
311
312        assert_no_drop!(registry, old_ids[1]);
313        assert_no_drop!(registry, old_ids[2]);
314        assert_no_drop!(registry, old_ids[3]);
315        assert_drop_stats!(registry, { created: 11, dropped: 0 });
316    }
317
318    #[test]
319    fn write_copying_from_slice_at() {
320        let mut values: Vec<_> = (0..5).collect();
321        let new_values: Vec<_> = (5..8).collect();
322
323        let mut slice = WriteOnlySlice::from(&mut values[..]);
324        slice.write_copying_from_slice_at(&new_values[..], 1);
325
326        assert_eq!(values, &[0, 5, 6, 7, 4]);
327    }
328}