write_only/slice/
volatile.rs

1// This Source Code Form is subject to the terms of the Mozilla Public
2// License, v. 2.0. If a copy of the MPL was not distributed with this
3// file, You can obtain one at https://mozilla.org/MPL/2.0/.
4
5use core::{marker::PhantomData, mem, ptr};
6
7use crate::{WriteAt, WriteFromSliceAt};
8
9/// A write-only **slice** with **non-dropping volatile** write access.
10pub struct VolatileWriteOnlySlice<'a, T: 'a> {
11    data: *mut T,
12    len: usize,
13    _phantom: PhantomData<&'a T>,
14}
15
16impl<'a, T: 'a> VolatileWriteOnlySlice<'a, T> {
17    /// Forms a write-only slice from a pointer and a length.
18    ///
19    /// The `len` argument is the number of **elements**, not the number of bytes.
20    ///
21    /// # Safety
22    ///
23    /// Behavior is undefined if any of the following conditions are violated:
24    ///
25    /// * `data` must be [valid](http://doc.rust-lang.org/core/ptr/index.html#safety) for reads for `len * mem::size_of::<T>()` many bytes,
26    ///   and it must be properly aligned. This means in particular:
27    ///
28    ///     * The entire memory range of this slice must be contained within a single allocated object!
29    ///       Slices can never span across multiple allocated objects. See [below](#incorrect-usage)
30    ///       for an example incorrectly not taking this into account.
31    ///     * `data` must be non-null and aligned even for zero-length slices. One
32    ///       reason for this is that enum layout optimizations may rely on references
33    ///       (including slices of any length) being aligned and non-null to distinguish
34    ///       them from other data. You can obtain a pointer that is usable as `data`
35    ///       for zero-length slices using [`::core::ptr::NonNull::dangling()`].
36    ///
37    /// * `data` must point to `len` consecutive properly initialized values of type `T`.
38    ///
39    /// * The memory referenced by the returned slice must not be mutated for the duration
40    ///   of lifetime `'a`, except inside an `UnsafeCell`.
41    ///
42    /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
43    ///   See the safety documentation of
44    ///   [`pointer::offset`](https://doc.rust-lang.org/std/primitive.pointer.html#method.offset).
45    ///
46    /// # Caveat
47    ///
48    /// The lifetime for the returned slice is inferred from its usage. To
49    /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
50    /// source lifetime is safe in the context, such as by providing a helper
51    /// function taking the lifetime of a host value for the slice, or by explicit
52    /// annotation.
53    #[inline]
54    pub unsafe fn from_raw_parts(data: *mut T, len: usize) -> Self {
55        debug_assert!(
56            !data.is_null() && (data.align_offset(mem::align_of::<u16>()) == 0),
57            "attempt to create unaligned or null slice"
58        );
59        debug_assert!(
60            mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
61            "attempt to create slice covering at least half the address space"
62        );
63        // SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
64        Self {
65            data,
66            len,
67            _phantom: PhantomData,
68        }
69    }
70
71    #[inline]
72    pub fn len(&self) -> usize {
73        self.len
74    }
75
76    #[inline]
77    pub fn is_empty(&self) -> bool {
78        self.len == 0
79    }
80}
81
82impl<'a, T: 'a> WriteAt<T> for VolatileWriteOnlySlice<'a, T> {
83    #[inline]
84    fn write_at(&mut self, index: usize, value: T) {
85        assert!(index < self.len);
86
87        unsafe {
88            self.write_at_unchecked(index, value);
89        }
90    }
91
92    #[inline]
93    unsafe fn write_at_unchecked(&mut self, index: usize, value: T) {
94        self.data.add(index).write_volatile(value);
95    }
96}
97
98impl<'a, T: 'a> WriteFromSliceAt<T> for VolatileWriteOnlySlice<'a, T> {
99    #[inline]
100    fn write_cloning_from_slice_at(&mut self, src: &[T], offset: usize)
101    where
102        T: Clone,
103    {
104        assert!(offset + src.len() <= self.len);
105
106        // SAFETY: `self` is valid for `self.len()` elements by definition,
107        // and `src` was checked to have a length less than `self.len() - offset`.
108        // The slices cannot overlap because mutable references are exclusive.
109
110        for (index, item) in src.iter().enumerate() {
111            unsafe {
112                self.data.add(offset + index).write_volatile(item.clone());
113            }
114        }
115    }
116
117    #[inline]
118    fn write_copying_from_slice_at(&mut self, src: &[T], offset: usize)
119    where
120        T: Copy,
121    {
122        assert!(src.len() <= self.len - offset);
123
124        // SAFETY: `self` is valid for `self.len()` elements by definition,
125        // and `src` was checked to have a length less than `self.len - offset`.
126        // The slices cannot overlap because mutable references are exclusive.
127        unsafe {
128            // FIXME(regexident): Replace with efficient `core::ptr::volatile_copy_nonoverlapping`,
129            // if ever stabilized: https://github.com/rust-lang/rust/issues/58041
130
131            #[cfg(feature = "core_intrinsics")]
132            core::intrinsics::volatile_copy_nonoverlapping_memory(
133                self.data,
134                src.as_ptr(),
135                src.len(),
136            );
137
138            #[cfg(not(feature = "core_intrinsics"))]
139            {
140                let dst_ptr = self.data.add(offset);
141                for (index, item) in src.iter().enumerate() {
142                    ptr::write_volatile(dst_ptr.add(index), *item);
143                }
144            }
145        }
146    }
147}
148
149impl<'a, T: 'a> From<&'a mut [T]> for VolatileWriteOnlySlice<'a, T> {
150    #[inline]
151    fn from(slice: &'a mut [T]) -> Self {
152        unsafe { Self::from_raw_parts(slice.as_mut_ptr(), slice.len()) }
153    }
154}
155
156#[cfg(test)]
157mod tests {
158    use super::*;
159
160    use droptest::prelude::*;
161
162    #[test]
163    fn from_raw_parts() {
164        let registry = DropRegistry::default();
165        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
166
167        let reference = unsafe { VolatileWriteOnlySlice::from_raw_parts(&mut guards, 3) };
168
169        std::mem::drop(reference);
170
171        assert_drop_stats!(registry, { created: 3, dropped: 0 });
172
173        std::mem::drop(guards);
174
175        assert_drop_stats!(registry, { created: 3, dropped: 3 });
176    }
177
178    #[test]
179    fn from() {
180        let registry = DropRegistry::default();
181        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
182
183        let reference = VolatileWriteOnlySlice::from(&mut guards[..]);
184
185        std::mem::drop(reference);
186
187        assert_drop_stats!(registry, { created: 3, dropped: 0 });
188
189        std::mem::drop(guards);
190
191        assert_drop_stats!(registry, { created: 3, dropped: 3 });
192    }
193
194    #[test]
195    fn write_at() {
196        let registry = DropRegistry::default();
197        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
198            (0..3).map(|i| registry.new_guard_for(i).by_id()).unzip();
199        let (new_id, new_guard) = registry.new_guard_for(3).by_id();
200
201        let mut slice = VolatileWriteOnlySlice::from(&mut guards[..]);
202        slice.write_at(1, new_guard);
203
204        assert_eq!(guards[1].id(), new_id);
205        assert_eq!(guards[1].value(), &3);
206
207        assert_no_drop!(registry, old_ids[1]);
208        assert_drop_stats!(registry, { created: 4, dropped: 0 });
209    }
210
211    #[test]
212    #[should_panic]
213    fn write_at_out_of_bounds() {
214        let registry = DropRegistry::default();
215        let mut guards: Vec<_> = (0..3).map(|i| registry.new_guard_for(i)).collect();
216        let new_guard = registry.new_guard_for(3);
217
218        let mut slice = VolatileWriteOnlySlice::from(&mut guards[..]);
219        slice.write_at(10, new_guard);
220    }
221
222    #[test]
223    fn write_cloning_from_slice_at() {
224        let registry = DropRegistry::default();
225        let (old_ids, mut guards): (Vec<_>, Vec<_>) =
226            (0..5).map(|i| registry.new_guard_for(i).by_id()).unzip();
227        let new_guards: Vec<_> = (5..8).map(|i| registry.new_guard_for(i)).collect();
228
229        let mut slice = VolatileWriteOnlySlice::from(&mut guards[..]);
230        slice.write_cloning_from_slice_at(&new_guards[..], 1);
231
232        assert_ne!(guards[1].id(), old_ids[1]);
233        assert_eq!(guards[1].value(), &5);
234        assert_ne!(guards[2].id(), old_ids[1]);
235        assert_eq!(guards[2].value(), &6);
236        assert_ne!(guards[3].id(), old_ids[2]);
237        assert_eq!(guards[3].value(), &7);
238
239        assert_no_drop!(registry, old_ids[1]);
240        assert_no_drop!(registry, old_ids[2]);
241        assert_no_drop!(registry, old_ids[3]);
242        assert_drop_stats!(registry, { created: 11, dropped: 0 });
243    }
244
245    #[test]
246    fn write_copying_from_slice_at() {
247        let mut values: Vec<_> = (0..5).collect();
248        let new_values: Vec<_> = (5..8).collect();
249
250        let mut slice = VolatileWriteOnlySlice::from(&mut values[..]);
251        slice.write_copying_from_slice_at(&new_values[..], 1);
252
253        assert_eq!(values, &[0, 5, 6, 7, 4]);
254    }
255}