Skip to main content

safe_mmio/
lib.rs

1// Copyright 2025 The safe-mmio Authors.
2// This project is dual-licensed under Apache 2.0 and MIT terms.
3// See LICENSE-APACHE and LICENSE-MIT for details.
4
5//! Types for safe MMIO device access, especially in systems with an MMU.
6
7#![no_std]
8#![deny(clippy::undocumented_unsafe_blocks)]
9#![deny(unsafe_op_in_unsafe_fn)]
10
11#[cfg(target_arch = "aarch64")]
12mod aarch64_mmio;
13pub mod fields;
14mod physical;
15#[cfg(not(target_arch = "aarch64"))]
16mod volatile_mmio;
17
18use crate::fields::{ReadOnly, ReadPure, ReadPureWrite, ReadWrite, WriteOnly};
19use core::{
20    array,
21    fmt::Debug,
22    marker::PhantomData,
23    ops::{Deref, Range},
24    ptr::{self, NonNull, slice_from_raw_parts_mut},
25};
26pub use physical::PhysicalInstance;
27use zerocopy::{FromBytes, Immutable, IntoBytes};
28
29/// A unique owned pointer to the registers of some MMIO device.
30///
31/// It is guaranteed to be valid and unique; no other access to the MMIO space of the device may
32/// happen for the lifetime `'a`.
33///
34/// A `UniqueMmioPointer` may be created from a mutable reference, but this should only be used for
35/// testing purposes, as references should never be constructed for real MMIO address space.
36pub struct UniqueMmioPointer<'a, T: ?Sized>(SharedMmioPointer<'a, T>);
37
38// Implement Debug, Eq and PartialEq manually rather than deriving to avoid an unneccessary bound on
39// T.
40
41impl<T: ?Sized> Debug for UniqueMmioPointer<'_, T> {
42    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
43        f.debug_tuple("UniqueMmioPointer")
44            .field(&self.0.regs)
45            .finish()
46    }
47}
48
49impl<T: ?Sized> PartialEq for UniqueMmioPointer<'_, T> {
50    fn eq(&self, other: &Self) -> bool {
51        self.0 == other.0
52    }
53}
54
55impl<T: ?Sized> Eq for UniqueMmioPointer<'_, T> {}
56
57impl<T: ?Sized> UniqueMmioPointer<'_, T> {
58    /// Creates a new `UniqueMmioPointer` from a non-null raw pointer.
59    ///
60    /// # Safety
61    ///
62    /// `regs` must be a properly aligned and valid pointer to some MMIO address space of type T,
63    /// which is mapped as device memory and valid to read and write from any thread with volatile
64    /// operations. There must not be any other aliases which are used to access the same MMIO
65    /// region while this `UniqueMmioPointer` exists.
66    ///
67    /// If `T` contains any fields wrapped in [`ReadOnly`], [`WriteOnly`] or [`ReadWrite`] then they
68    /// must indeed be safe to perform MMIO reads or writes on.
69    pub const unsafe fn new(regs: NonNull<T>) -> Self {
70        Self(SharedMmioPointer {
71            regs,
72            phantom: PhantomData,
73        })
74    }
75
76    /// Creates a new `UniqueMmioPointer` with the same lifetime as this one.
77    ///
78    /// This is used internally by the [`field!`] macro and shouldn't be called directly.
79    ///
80    /// # Safety
81    ///
82    /// `regs` must be a properly aligned and valid pointer to some MMIO address space of type T,
83    /// within the allocation that `self` points to.
84    pub const unsafe fn child<U: ?Sized>(&mut self, regs: NonNull<U>) -> UniqueMmioPointer<'_, U> {
85        UniqueMmioPointer(SharedMmioPointer {
86            regs,
87            phantom: PhantomData,
88        })
89    }
90
91    /// Returns a raw mut pointer to the MMIO registers.
92    pub const fn ptr_mut(&mut self) -> *mut T {
93        self.0.regs.as_ptr()
94    }
95
96    /// Returns a `NonNull<T>` pointer to the MMIO registers.
97    pub const fn ptr_nonnull(&mut self) -> NonNull<T> {
98        self.0.regs
99    }
100
101    /// Returns a new `UniqueMmioPointer` with a lifetime no greater than this one.
102    pub const fn reborrow(&mut self) -> UniqueMmioPointer<'_, T> {
103        let ptr = self.ptr_nonnull();
104        // SAFETY: `ptr` must be properly aligned and valid and within our allocation because it is
105        // exactly our allocation.
106        unsafe { self.child(ptr) }
107    }
108}
109
110impl<'a, T: ?Sized> UniqueMmioPointer<'a, T> {
111    /// Creates a new `UniqueMmioPointer` with the same lifetime as this one, but not tied to the
112    /// lifetime this one is borrowed for.
113    ///
114    /// This is used internally by the [`split_fields!`] macro and shouldn't be called directly.
115    ///
116    /// # Safety
117    ///
118    /// `regs` must be a properly aligned and valid pointer to some MMIO address space of type T,
119    /// within the allocation that `self` points to. `split_child` must not be called for the same
120    /// child field more than once, and the original `UniqueMmioPointer` must not be used after
121    /// `split_child` has been called for one or more of its fields.
122    pub const unsafe fn split_child<U: ?Sized>(
123        &mut self,
124        regs: NonNull<U>,
125    ) -> UniqueMmioPointer<'a, U> {
126        UniqueMmioPointer(SharedMmioPointer {
127            regs,
128            phantom: PhantomData,
129        })
130    }
131}
132
133impl<T: FromBytes + IntoBytes> UniqueMmioPointer<'_, ReadWrite<T>> {
134    /// Performs an MMIO read of the entire `T`.
135    pub fn read(&mut self) -> T {
136        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
137        // being wrapped in `ReadWrite` implies that it is safe to read.
138        unsafe { self.read_unsafe().0 }
139    }
140}
141
142impl<T: Immutable + IntoBytes> UniqueMmioPointer<'_, ReadWrite<T>> {
143    /// Performs an MMIO write of the entire `T`.
144    pub fn write(&mut self, value: T) {
145        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
146        // being wrapped in `ReadWrite` implies that it is safe to write.
147        unsafe {
148            self.write_unsafe(ReadWrite(value));
149        }
150    }
151}
152
153impl<T: FromBytes + Immutable + IntoBytes> UniqueMmioPointer<'_, ReadWrite<T>> {
154    /// Performs an MMIO read of the entire `T`, applies the given function to it, and then performs
155    /// an MMIO write of the resulting value.
156    ///
157    /// This is equivalent to calling [`read`](Self::read) then [`write`](Self::write).
158    pub fn modify(&mut self, f: impl FnOnce(T) -> T) {
159        let value = self.read();
160        self.write(f(value));
161    }
162
163    /// Performs an MMIO read of the entire `T`, calls the given function to modify it, and then
164    /// performs an MMIO write of the resulting value.
165    ///
166    /// This is equivalent to calling [`read`](Self::read) then [`write`](Self::write).
167    pub fn modify_mut(&mut self, f: impl FnOnce(&mut T)) {
168        let mut value = self.read();
169        f(&mut value);
170        self.write(value);
171    }
172}
173
174impl<T: Immutable + IntoBytes> UniqueMmioPointer<'_, ReadPureWrite<T>> {
175    /// Performs an MMIO write of the entire `T`.
176    pub fn write(&mut self, value: T) {
177        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
178        // being wrapped in `ReadPureWrite` implies that it is safe to write.
179        unsafe {
180            self.write_unsafe(ReadPureWrite(value));
181        }
182    }
183}
184
185impl<T: FromBytes + Immutable + IntoBytes> UniqueMmioPointer<'_, ReadPureWrite<T>> {
186    /// Performs an MMIO read of the entire `T`, applies the given function to it, and then performs
187    /// an MMIO write of the resulting value.
188    ///
189    /// This is equivalent to calling [`read`](Self::read) then [`write`](Self::write).
190    pub fn modify(&mut self, f: impl FnOnce(T) -> T) {
191        let value = self.read();
192        self.write(f(value));
193    }
194
195    /// Performs an MMIO read of the entire `T`, calls the given function to modify it, and then
196    /// performs an MMIO write of the resulting value.
197    ///
198    /// This is equivalent to calling [`read`](Self::read) then [`write`](Self::write).
199    pub fn modify_mut(&mut self, f: impl FnOnce(&mut T)) {
200        let mut value = self.read();
201        f(&mut value);
202        self.write(value);
203    }
204}
205
206impl<T: FromBytes + IntoBytes> UniqueMmioPointer<'_, ReadOnly<T>> {
207    /// Performs an MMIO read of the entire `T`.
208    pub fn read(&mut self) -> T {
209        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
210        // being wrapped in `ReadOnly` implies that it is safe to read.
211        unsafe { self.read_unsafe().0 }
212    }
213}
214
215impl<T: Immutable + IntoBytes> UniqueMmioPointer<'_, WriteOnly<T>> {
216    /// Performs an MMIO write of the entire `T`.
217    pub fn write(&mut self, value: T) {
218        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
219        // being wrapped in `WriteOnly` implies that it is safe to write.
220        unsafe {
221            self.write_unsafe(WriteOnly(value));
222        }
223    }
224}
225
226impl<'a, T> UniqueMmioPointer<'a, [T]> {
227    /// Returns a `UniqueMmioPointer` to an element of this slice, or `None` if the index is out of
228    /// bounds.
229    ///
230    /// # Example
231    ///
232    /// ```
233    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
234    ///
235    /// let mut slice: UniqueMmioPointer<[ReadWrite<u32>]>;
236    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
237    /// # slice = UniqueMmioPointer::from(fake.as_mut_slice());
238    /// let mut element = slice.get(1).unwrap();
239    /// element.write(42);
240    /// ```
241    pub const fn get(&mut self, index: usize) -> Option<UniqueMmioPointer<'_, T>> {
242        if index >= self.0.len() {
243            return None;
244        }
245        // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
246        // unique, as promised by the caller of `UniqueMmioPointer::new`.
247        let regs = NonNull::new(unsafe { &raw mut (*self.ptr_mut())[index] }).unwrap();
248        // SAFETY: We created regs from the raw slice in self.regs, so it must also be valid, unique
249        // and within the allocation of self.regs.
250        Some(unsafe { self.child(regs) })
251    }
252
253    /// Returns a `UniqueMmioPointer` to a range of elements of this slice, or `None` if the range
254    /// is out of bounds.
255    ///
256    /// # Example
257    ///
258    /// ```
259    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
260    ///
261    /// let mut slice: UniqueMmioPointer<[ReadWrite<u32>]>;
262    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
263    /// # slice = UniqueMmioPointer::from(fake.as_mut_slice());
264    /// let mut range = slice.get_range(1..3).unwrap();
265    /// range.get(0).unwrap().write(100);
266    /// range.get(1).unwrap().write(200);
267    /// assert_eq!(None, range.get(2));
268    /// assert_eq!(100, slice.get(1).unwrap().read());
269    /// assert_eq!(200, slice.get(2).unwrap().read());
270    /// ```
271    pub fn get_range(&mut self, range: Range<usize>) -> Option<UniqueMmioPointer<'_, [T]>> {
272        if range.start > range.end || range.end > self.0.len() {
273            return None;
274        }
275
276        let regs_start = if !range.is_empty() {
277            // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
278            // unique, as promised by the caller of `UniqueMmioPointer::new`. range.start is within the
279            // boundaries of the slice.
280            unsafe { &raw mut (*self.ptr_mut())[range.start] }
281        } else {
282            // Based on the documentation of core::slice::from_raw_parts_mut, NonNull::dangling()
283            // should be used for creating zero-length slices.
284            NonNull::dangling().as_ptr()
285        };
286
287        let regs = NonNull::new(slice_from_raw_parts_mut(regs_start, range.len())).unwrap();
288
289        // SAFETY: We created regs from the valid start address of regs_start and `range` is within
290        // the boundaries of self.regs, so it must also be valid, unique and within the allocation
291        // of self.regs.
292        Some(unsafe { self.child(regs) })
293    }
294
295    /// Returns a new iterator of the items of the slice.
296    pub fn iter(&mut self) -> UniqueMmioPointerIterator<'_, T> {
297        UniqueMmioPointerIterator {
298            tail: self.reborrow(),
299        }
300    }
301
302    /// Returns a `UniqueMmioPointer` to an element of this slice, or `None` if the index is out of
303    /// bounds.
304    ///
305    /// Unlike [`UniqueMmioPointer::get`] this takes ownership of the original pointer. This is
306    /// useful when you want to store the resulting pointer without keeping the original pointer
307    /// around.
308    ///
309    /// # Example
310    ///
311    /// ```
312    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
313    ///
314    /// let mut slice: UniqueMmioPointer<[ReadWrite<u32>]>;
315    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
316    /// # slice = UniqueMmioPointer::from(fake.as_mut_slice());
317    /// let mut element = slice.take(1).unwrap();
318    /// element.write(42);
319    /// // `slice` can no longer be used at this point.
320    /// ```
321    pub const fn take(mut self, index: usize) -> Option<UniqueMmioPointer<'a, T>> {
322        if index >= self.0.len() {
323            return None;
324        }
325        // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
326        // unique, as promised by the caller of `UniqueMmioPointer::new`.
327        let regs = NonNull::new(unsafe { &raw mut (*self.ptr_mut())[index] }).unwrap();
328        // SAFETY: We created regs from the raw slice in self.regs, so it must also be valid, unique
329        // and within the allocation of self.regs. `self` is dropped immediately after this and we
330        // don't split out any other children.
331        Some(unsafe { self.split_child(regs) })
332    }
333}
334
335impl<'a, T, const LEN: usize> UniqueMmioPointer<'a, [T; LEN]> {
336    /// Splits a `UniqueMmioPointer` to an array into an array of `UniqueMmioPointer`s.
337    pub fn split(mut self) -> [UniqueMmioPointer<'a, T>; LEN] {
338        array::from_fn(|i| {
339            UniqueMmioPointer(SharedMmioPointer {
340                // SAFETY: self.regs is always unique and valid for MMIO access. We make sure the
341                // pointers we split it into don't overlap, so the same applies to each of them.
342                regs: NonNull::new(unsafe { &raw mut (*self.ptr_mut())[i] }).unwrap(),
343                phantom: PhantomData,
344            })
345        })
346    }
347
348    /// Splits a `UniqueMmioPointer` to an array into an array of `UniqueMmioPointer`s, taking only
349    /// the `chosen` indices.
350    ///
351    /// Panics if `chosen` contains the same index more than once, or any index out of bounds.
352    pub fn split_some<const N: usize>(
353        mut self,
354        chosen: [usize; N],
355    ) -> [UniqueMmioPointer<'a, T>; N] {
356        for (i, a) in chosen.iter().enumerate() {
357            for (j, b) in chosen.iter().enumerate() {
358                assert!(i == j || a != b, "chosen array must not contain duplicates");
359            }
360        }
361        chosen.map(|chosen_index| {
362            UniqueMmioPointer(SharedMmioPointer {
363                // SAFETY: self.regs is always unique and valid for MMIO access. We checked that
364                // `chosen` doesn't contain duplicates so the pointers we split it into don't
365                // overlap, so the same applies to each of them.
366                regs: NonNull::new(unsafe { &raw mut (*self.ptr_mut())[chosen_index] }).unwrap(),
367                phantom: PhantomData,
368            })
369        })
370    }
371
372    /// Converts this array pointer to an equivalent slice pointer.
373    pub const fn as_mut_slice(&mut self) -> UniqueMmioPointer<'_, [T]> {
374        let regs = NonNull::new(self.ptr_mut()).unwrap();
375        // SAFETY: We created regs from the raw array in self.regs, so it must also be valid, unique
376        // and within the allocation of self.regs.
377        unsafe { self.child(regs) }
378    }
379
380    /// Returns a `UniqueMmioPointer` to an element of this array, or `None` if the index is out of
381    /// bounds.
382    ///
383    /// # Example
384    ///
385    /// ```
386    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
387    ///
388    /// let mut slice: UniqueMmioPointer<[ReadWrite<u32>; 3]>;
389    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
390    /// # slice = UniqueMmioPointer::from(&mut fake);
391    /// let mut element = slice.get(1).unwrap();
392    /// element.write(42);
393    /// slice.get(2).unwrap().write(100);
394    /// ```
395    pub const fn get(&mut self, index: usize) -> Option<UniqueMmioPointer<'_, T>> {
396        if index >= LEN {
397            return None;
398        }
399        // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
400        // unique, as promised by the caller of `UniqueMmioPointer::new`.
401        let regs = NonNull::new(unsafe { &raw mut (*self.ptr_mut())[index] }).unwrap();
402        // SAFETY: We created regs from the raw array in self.regs, so it must also be valid, unique
403        // and within the allocation of self.regs.
404        Some(unsafe { self.child(regs) })
405    }
406
407    /// Returns a `UniqueMmioPointer` to a range of elements of this array, or `None` if the range
408    /// is out of bounds.
409    ///
410    /// # Example
411    ///
412    /// ```
413    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
414    ///
415    /// let mut slice: UniqueMmioPointer<[ReadWrite<u32>; 3]>;
416    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
417    /// # slice = UniqueMmioPointer::from(&mut fake);
418    /// let mut range = slice.get_range(1..3).unwrap();
419    /// range.get(0).unwrap().write(100);
420    /// range.get(1).unwrap().write(200);
421    /// assert_eq!(None, range.get(2));
422    /// assert_eq!(100, slice.get(1).unwrap().read());
423    /// assert_eq!(200, slice.get(2).unwrap().read());
424    /// ```
425    pub fn get_range(&mut self, range: Range<usize>) -> Option<UniqueMmioPointer<'_, [T]>> {
426        if range.start > range.end || range.end > LEN {
427            return None;
428        }
429
430        let regs_start = if !range.is_empty() {
431            // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
432            // unique, as promised by the caller of `UniqueMmioPointer::new`. range.start is within the
433            // boundaries of the array.
434            unsafe { &raw mut (*self.ptr_mut())[range.start] }
435        } else {
436            // Based on the documentation of core::slice::from_raw_parts_mut, NonNull::dangling()
437            // should be used for creating zero-length slices.
438            NonNull::dangling().as_ptr()
439        };
440
441        let regs = NonNull::new(slice_from_raw_parts_mut(regs_start, range.len())).unwrap();
442
443        // SAFETY: We created regs from the valid start address of regs_start and `range` is within
444        // the boundaries of self.regs, so it must also be valid, unique and within the allocation
445        // of self.regs.
446        Some(unsafe { self.child(regs) })
447    }
448
449    /// Returns a new iterator to the items of the array.
450    pub fn iter(&mut self) -> UniqueMmioPointerIterator<'_, T> {
451        UniqueMmioPointerIterator {
452            tail: self.as_mut_slice(),
453        }
454    }
455
456    /// Returns a `UniqueMmioPointer` to an element of this array, or `None` if the index is out of
457    /// bounds.
458    ///
459    /// Unlike [`UniqueMmioPointer::get`] this takes ownership of the original pointer. This is
460    /// useful when you want to store the resulting pointer without keeping the original pointer
461    /// around.
462    ///
463    /// # Example
464    ///
465    /// ```
466    /// use safe_mmio::{UniqueMmioPointer, fields::ReadWrite};
467    ///
468    /// let mut array: UniqueMmioPointer<[ReadWrite<u32>; 3]>;
469    /// # let mut fake = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
470    /// # array = UniqueMmioPointer::from(&mut fake);
471    /// let mut element = array.take(1).unwrap();
472    /// element.write(42);
473    /// // `array` can no longer be used at this point.
474    /// ```
475    pub const fn take(mut self, index: usize) -> Option<UniqueMmioPointer<'a, T>> {
476        if index >= LEN {
477            return None;
478        }
479        // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
480        // unique, as promised by the caller of `UniqueMmioPointer::new`.
481        let regs = NonNull::new(unsafe { &raw mut (*self.ptr_mut())[index] }).unwrap();
482        // SAFETY: We created regs from the raw array in self.regs, so it must also be valid, unique
483        // and within the allocation of self.regs. `self` is dropped immediately after this and we
484        // don't split out any other children.
485        Some(unsafe { self.split_child(regs) })
486    }
487}
488
489impl<'a, T, const LEN: usize> From<UniqueMmioPointer<'a, [T; LEN]>> for UniqueMmioPointer<'a, [T]> {
490    fn from(mut value: UniqueMmioPointer<'a, [T; LEN]>) -> Self {
491        let regs = NonNull::new(value.ptr_mut()).unwrap();
492        // SAFETY: regs comes from a UniqueMmioPointer so already satisfies all the safety
493        // requirements.
494        unsafe { UniqueMmioPointer::new(regs) }
495    }
496}
497
498impl<'a, T> From<UniqueMmioPointer<'a, T>> for UniqueMmioPointer<'a, [T; 1]> {
499    fn from(mut value: UniqueMmioPointer<'a, T>) -> Self {
500        let regs = NonNull::new(value.ptr_mut()).unwrap().cast();
501        // SAFETY: regs comes from a UniqueMmioPointer so already satisfies all the safety
502        // requirements.
503        unsafe { UniqueMmioPointer::new(regs) }
504    }
505}
506
507impl<'a, T> From<UniqueMmioPointer<'a, T>> for UniqueMmioPointer<'a, [T]> {
508    fn from(mut value: UniqueMmioPointer<'a, T>) -> Self {
509        let array: *mut [T; 1] = value.ptr_mut().cast();
510        let regs = NonNull::new(array).unwrap();
511        // SAFETY: regs comes from a UniqueMmioPointer so already satisfies all the safety
512        // requirements.
513        unsafe { UniqueMmioPointer::new(regs) }
514    }
515}
516
517impl<'a, T, const LEN: usize> From<UniqueMmioPointer<'a, [T; LEN]>>
518    for [UniqueMmioPointer<'a, T>; LEN]
519{
520    fn from(mut value: UniqueMmioPointer<'a, [T; LEN]>) -> Self {
521        array::from_fn(|i| {
522            let item_pointer = value.get(i).unwrap().ptr_mut();
523            // SAFETY: `split_child` is called only once on each item and the original
524            // `UniqueMmioPointer` is consumed by this function.
525            unsafe { value.split_child(core::ptr::NonNull::new(item_pointer).unwrap()) }
526        })
527    }
528}
529
530impl<'a, T: ?Sized> From<&'a mut T> for UniqueMmioPointer<'a, T> {
531    fn from(r: &'a mut T) -> Self {
532        Self(SharedMmioPointer {
533            regs: r.into(),
534            phantom: PhantomData,
535        })
536    }
537}
538
539impl<'a, T: ?Sized> Deref for UniqueMmioPointer<'a, T> {
540    type Target = SharedMmioPointer<'a, T>;
541
542    fn deref(&self) -> &Self::Target {
543        &self.0
544    }
545}
546
547impl<'a, T> IntoIterator for UniqueMmioPointer<'a, [T]> {
548    type Item = UniqueMmioPointer<'a, T>;
549
550    type IntoIter = UniqueMmioPointerIterator<'a, T>;
551
552    fn into_iter(self) -> Self::IntoIter {
553        UniqueMmioPointerIterator { tail: self }
554    }
555}
556
557impl<'a, T, const LEN: usize> IntoIterator for UniqueMmioPointer<'a, [T; LEN]> {
558    type Item = UniqueMmioPointer<'a, T>;
559
560    type IntoIter = UniqueMmioPointerIterator<'a, T>;
561
562    fn into_iter(self) -> Self::IntoIter {
563        UniqueMmioPointerIterator { tail: self.into() }
564    }
565}
566
567/// Iterator over a `UniqueMmioPointer` slice, yielding pointers to items.
568///
569/// This iterator advances by splitting off the head element and shortening the
570/// remaining tail.
571#[derive(Debug)]
572pub struct UniqueMmioPointerIterator<'a, T> {
573    tail: UniqueMmioPointer<'a, [T]>,
574}
575
576impl<'a, T> Iterator for UniqueMmioPointerIterator<'a, T> {
577    type Item = UniqueMmioPointer<'a, T>;
578
579    fn next(&mut self) -> Option<Self::Item> {
580        if !self.tail.is_empty() {
581            // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
582            // unique, as promised by the caller of `UniqueMmioPointer::new` and the slice is
583            // not empty.
584            let regs_head = NonNull::new(unsafe { &raw mut (*self.tail.ptr_mut())[0] }).unwrap();
585
586            // SAFETY: regs_head is created from self.tail so it is valid and within the range of
587            // the original pointer. There no other further split_child calls to the same child and
588            // self.tail is moved by one in the following lines.
589            let head = unsafe { self.tail.split_child(regs_head) };
590
591            let regs_tail = NonNull::new(slice_from_raw_parts_mut(
592                regs_head.as_ptr().wrapping_add(1),
593                self.tail.len() - 1,
594            ))
595            .unwrap();
596
597            // SAFETY: regs is created from self.tail so it is valid and within the range of the
598            // original pointer. The new pointer overwrites the original so it cannot be used
599            // afterwards, and there are no further calls to split_child().
600            self.tail = unsafe { self.tail.split_child(regs_tail) };
601
602            Some(head)
603        } else {
604            None
605        }
606    }
607
608    fn size_hint(&self) -> (usize, Option<usize>) {
609        (self.tail.len(), Some(self.tail.len()))
610    }
611}
612
613/// A shared pointer to the registers of some MMIO device.
614///
615/// It is guaranteed to be valid but unlike [`UniqueMmioPointer`] may not be unique.
616pub struct SharedMmioPointer<'a, T: ?Sized> {
617    regs: NonNull<T>,
618    phantom: PhantomData<&'a T>,
619}
620
621// Implement Debug, Eq and PartialEq manually rather than deriving to avoid an unneccessary bound on
622// T.
623
624impl<T: ?Sized> Debug for SharedMmioPointer<'_, T> {
625    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
626        f.debug_tuple("SharedMmioPointer")
627            .field(&self.regs)
628            .finish()
629    }
630}
631
632impl<T: ?Sized> PartialEq for SharedMmioPointer<'_, T> {
633    fn eq(&self, other: &Self) -> bool {
634        ptr::eq(self.regs.as_ptr(), other.regs.as_ptr())
635    }
636}
637
638impl<T: ?Sized> Eq for SharedMmioPointer<'_, T> {}
639
640impl<T: ?Sized> Clone for SharedMmioPointer<'_, T> {
641    fn clone(&self) -> Self {
642        *self
643    }
644}
645
646impl<T: ?Sized> Copy for SharedMmioPointer<'_, T> {}
647
648impl<'a, T: ?Sized> SharedMmioPointer<'a, T> {
649    /// Creates a new `SharedMmioPointer` with the same lifetime as this one.
650    ///
651    /// This is used internally by the [`field_shared!`] macro and shouldn't be called directly.
652    ///
653    /// # Safety
654    ///
655    /// `regs` must be a properly aligned and valid pointer to some MMIO address space of type T,
656    /// within the allocation that `self` points to.
657    pub const unsafe fn child<U: ?Sized>(&self, regs: NonNull<U>) -> SharedMmioPointer<'a, U> {
658        SharedMmioPointer {
659            regs,
660            phantom: PhantomData,
661        }
662    }
663
664    /// Returns a raw const pointer to the MMIO registers.
665    pub const fn ptr(&self) -> *const T {
666        self.regs.as_ptr()
667    }
668}
669
670// SAFETY: A `SharedMmioPointer` always originates either from a reference or from a
671// `UniqueMmioPointer`. The caller of `UniqueMmioPointer::new` promises that the MMIO registers can
672// be accessed from any thread.
673unsafe impl<T: ?Sized + Send + Sync> Send for SharedMmioPointer<'_, T> {}
674
675impl<'a, T: ?Sized> From<&'a T> for SharedMmioPointer<'a, T> {
676    fn from(r: &'a T) -> Self {
677        Self {
678            regs: r.into(),
679            phantom: PhantomData,
680        }
681    }
682}
683
684impl<'a, T: ?Sized> From<UniqueMmioPointer<'a, T>> for SharedMmioPointer<'a, T> {
685    fn from(unique: UniqueMmioPointer<'a, T>) -> Self {
686        unique.0
687    }
688}
689
690impl<T: FromBytes + IntoBytes> SharedMmioPointer<'_, ReadPure<T>> {
691    /// Performs an MMIO read of the entire `T`.
692    pub fn read(&self) -> T {
693        // SAFETY: self.regs is always a valid and unique pointer to MMIO address space, and `T`
694        // being wrapped in `ReadPure` implies that it is safe to read from a shared reference
695        // because doing so has no side-effects.
696        unsafe { self.read_unsafe().0 }
697    }
698}
699
700impl<T: FromBytes + IntoBytes> SharedMmioPointer<'_, ReadPureWrite<T>> {
701    /// Performs an MMIO read of the entire `T`.
702    pub fn read(&self) -> T {
703        // SAFETY: self.regs is always a valid pointer to MMIO address space, and `T`
704        // being wrapped in `ReadPureWrite` implies that it is safe to read from a shared reference
705        // because doing so has no side-effects.
706        unsafe { self.read_unsafe().0 }
707    }
708}
709
710impl<'a, T> SharedMmioPointer<'a, [T]> {
711    /// Splits a `UniqueMmioPointer` to a slice into an array of `UniqueMmioPointer`s, taking only
712    /// the `chosen` indices.
713    ///
714    /// Panics if `chosen` contains the same index more than once, or any index out of bounds.
715    pub fn split_some<const N: usize>(self, chosen: [usize; N]) -> [UniqueMmioPointer<'a, T>; N] {
716        for (i, a) in chosen.iter().enumerate() {
717            for (j, b) in chosen.iter().enumerate() {
718                assert!(i == j || a != b, "chosen array must not contain duplicates");
719            }
720        }
721        chosen.map(|chosen_index| {
722            UniqueMmioPointer(SharedMmioPointer {
723                // SAFETY: self.regs is always unique and valid for MMIO access. We checked that
724                // `chosen` doesn't contain duplicates so the pointers we split it into don't
725                // overlap, so the same applies to each of them.
726                regs: NonNull::new(unsafe { &raw mut (*self.regs.as_ptr())[chosen_index] })
727                    .unwrap(),
728                phantom: PhantomData,
729            })
730        })
731    }
732
733    /// Returns a `SharedMmioPointer` to an element of this slice, or `None` if the index is out of
734    /// bounds.
735    pub const fn get(&self, index: usize) -> Option<SharedMmioPointer<'a, T>> {
736        if index >= self.len() {
737            return None;
738        }
739        // SAFETY: self.regs is always unique and valid for MMIO access.
740        let regs = NonNull::new(unsafe { &raw mut (*self.regs.as_ptr())[index] }).unwrap();
741        // SAFETY: We created regs from the raw slice in self.regs, so it must also be valid, unique
742        // and within the allocation of self.regs.
743        Some(unsafe { self.child(regs) })
744    }
745
746    /// Returns a `SharedMmioPointer` to a range of elements of this slice, or `None` if the range
747    /// is out of bounds.
748    pub fn get_range(&self, range: Range<usize>) -> Option<SharedMmioPointer<'_, [T]>> {
749        if range.start > range.end || range.end > self.len() {
750            return None;
751        }
752
753        let regs_start = if !range.is_empty() {
754            // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
755            // unique, as promised by the caller of `UniqueMmioPointer::new`. range.start is within the
756            // boundaries of the slice.
757            unsafe { &raw mut (*self.regs.as_ptr())[range.start] }
758        } else {
759            // Based on the documentation of core::slice::from_raw_parts_mut, NonNull::dangling()
760            // should be used for creating zero-length slices.
761            NonNull::dangling().as_ptr()
762        };
763
764        let regs = NonNull::new(slice_from_raw_parts_mut(regs_start, range.len())).unwrap();
765
766        // SAFETY: We created regs from the valid start address of regs_start and `range` is within
767        // the boundaries of self.regs, so it must also be valid, unique and within the allocation
768        // of self.regs.
769        Some(unsafe { self.child(regs) })
770    }
771
772    /// Returns a new iterator of the items of the slice.
773    pub fn iter(&self) -> SharedMmioPointerIterator<'_, T> {
774        SharedMmioPointerIterator { tail: *self }
775    }
776
777    /// Returns the length of the slice.
778    pub const fn len(&self) -> usize {
779        self.regs.len()
780    }
781
782    /// Returns whether the slice is empty.
783    pub const fn is_empty(&self) -> bool {
784        self.regs.is_empty()
785    }
786}
787
788impl<'a, T, const LEN: usize> SharedMmioPointer<'a, [T; LEN]> {
789    /// Splits a `SharedMmioPointer` to an array into an array of `SharedMmioPointer`s.
790    pub fn split(self) -> [SharedMmioPointer<'a, T>; LEN] {
791        array::from_fn(|i| SharedMmioPointer {
792            // SAFETY: self.regs is always unique and valid for MMIO access. We make sure the
793            // pointers we split it into don't overlap, so the same applies to each of them.
794            regs: NonNull::new(unsafe { &raw mut (*self.regs.as_ptr())[i] }).unwrap(),
795            phantom: PhantomData,
796        })
797    }
798
799    /// Converts this array pointer to an equivalent slice pointer.
800    pub const fn as_slice(&self) -> SharedMmioPointer<'a, [T]> {
801        let regs = NonNull::new(self.regs.as_ptr()).unwrap();
802        // SAFETY: We created regs from the raw array in self.regs, so it must also be valid, unique
803        // and within the allocation of self.regs.
804        unsafe { self.child(regs) }
805    }
806
807    /// Returns a `SharedMmioPointer` to an element of this array, or `None` if the index is out of
808    /// bounds.
809    pub const fn get(&self, index: usize) -> Option<SharedMmioPointer<'a, T>> {
810        if index >= LEN {
811            return None;
812        }
813        // SAFETY: self.regs is always unique and valid for MMIO access.
814        let regs = NonNull::new(unsafe { &raw mut (*self.regs.as_ptr())[index] }).unwrap();
815        // SAFETY: We created regs from the raw array in self.regs, so it must also be valid, unique
816        // and within the allocation of self.regs.
817        Some(unsafe { self.child(regs) })
818    }
819
820    /// Returns a `SharedMmioPointer` to a range of elements of this array, or `None` if the range
821    /// is out of bounds.
822    pub fn get_range(&self, range: Range<usize>) -> Option<SharedMmioPointer<'_, [T]>> {
823        if range.start > range.end || range.end > LEN {
824            return None;
825        }
826
827        let regs_start = if !range.is_empty() {
828            // SAFETY: self.regs is always unique and valid for MMIO access. range.start is within the
829            // boundaries of the slice.
830            unsafe { &raw mut (*self.regs.as_ptr())[range.start] }
831        } else {
832            // Based on the documentation of core::slice::from_raw_parts_mut, NonNull::dangling()
833            // should be used for creating zero-length slices.
834            NonNull::dangling().as_ptr()
835        };
836
837        let regs = NonNull::new(slice_from_raw_parts_mut(regs_start, range.len())).unwrap();
838
839        // SAFETY: We created regs from the valid start address of regs_start and `range` is within
840        // the boundaries of self.regs, so it must also be valid, unique and within the allocation
841        // of self.regs.
842        Some(unsafe { self.child(regs) })
843    }
844
845    /// Returns a new iterator of the items of the array.
846    pub fn iter(&self) -> SharedMmioPointerIterator<'_, T> {
847        SharedMmioPointerIterator {
848            tail: self.as_slice(),
849        }
850    }
851}
852
853impl<'a, T, const LEN: usize> From<SharedMmioPointer<'a, [T; LEN]>> for SharedMmioPointer<'a, [T]> {
854    fn from(value: SharedMmioPointer<'a, [T; LEN]>) -> Self {
855        let regs = NonNull::new(value.regs.as_ptr()).unwrap();
856        SharedMmioPointer {
857            regs,
858            phantom: PhantomData,
859        }
860    }
861}
862
863impl<'a, T> From<SharedMmioPointer<'a, T>> for SharedMmioPointer<'a, [T; 1]> {
864    fn from(value: SharedMmioPointer<'a, T>) -> Self {
865        let regs = NonNull::new(value.regs.as_ptr()).unwrap().cast();
866        SharedMmioPointer {
867            regs,
868            phantom: PhantomData,
869        }
870    }
871}
872
873impl<'a, T> From<SharedMmioPointer<'a, T>> for SharedMmioPointer<'a, [T]> {
874    fn from(value: SharedMmioPointer<'a, T>) -> Self {
875        let array: *mut [T; 1] = value.regs.as_ptr().cast();
876        let regs = NonNull::new(array).unwrap();
877        SharedMmioPointer {
878            regs,
879            phantom: PhantomData,
880        }
881    }
882}
883
884impl<'a, T> IntoIterator for SharedMmioPointer<'a, [T]> {
885    type Item = SharedMmioPointer<'a, T>;
886
887    type IntoIter = SharedMmioPointerIterator<'a, T>;
888
889    fn into_iter(self) -> Self::IntoIter {
890        SharedMmioPointerIterator { tail: self }
891    }
892}
893
894impl<'a, T, const LEN: usize> IntoIterator for SharedMmioPointer<'a, [T; LEN]> {
895    type Item = SharedMmioPointer<'a, T>;
896
897    type IntoIter = SharedMmioPointerIterator<'a, T>;
898
899    fn into_iter(self) -> Self::IntoIter {
900        SharedMmioPointerIterator { tail: self.into() }
901    }
902}
903
904/// Iterator over a `SharedMmioPointer` slice, yielding pointers to items.
905///
906/// This iterator advances by creating a head pointer and shortening the
907/// remaining tail.
908#[derive(Clone, Copy, Debug)]
909pub struct SharedMmioPointerIterator<'a, T> {
910    tail: SharedMmioPointer<'a, [T]>,
911}
912
913impl<'a, T> Iterator for SharedMmioPointerIterator<'a, T> {
914    type Item = SharedMmioPointer<'a, T>;
915
916    fn next(&mut self) -> Option<Self::Item> {
917        if !self.tail.is_empty() {
918            // SAFETY: self.ptr_mut() is guaranteed to return a pointer that is valid for MMIO and
919            // unique, as promised by the caller of `UniqueMmioPointer::new` and the slice is
920            // not empty.
921            let regs_head =
922                NonNull::new(unsafe { &raw mut (*self.tail.regs.as_ptr())[0] }).unwrap();
923
924            // SAFETY: regs_head is created from self.tail so it is valid and within the range of
925            // the original pointer.
926            let head = unsafe { self.tail.child(regs_head) };
927
928            let regs_tail = NonNull::new(slice_from_raw_parts_mut(
929                regs_head.as_ptr().wrapping_add(1),
930                self.tail.len() - 1,
931            ))
932            .unwrap();
933
934            // SAFETY: We created regs from the raw array in self.regs, so it must also be valid,
935            // unique and within the allocation of self.regs.
936            self.tail = unsafe { self.tail.child(regs_tail) };
937
938            Some(head)
939        } else {
940            None
941        }
942    }
943
944    fn size_hint(&self) -> (usize, Option<usize>) {
945        (self.tail.len(), Some(self.tail.len()))
946    }
947}
948
949/// Gets a `UniqueMmioPointer` to a field of a type wrapped in a `UniqueMmioPointer`.
950#[macro_export]
951macro_rules! field {
952    ($mmio_pointer:expr, $field:ident) => {{
953        _ = &mut $mmio_pointer;
954
955        // SAFETY: ptr_mut is guaranteed to return a valid pointer for MMIO, so the pointer to the
956        // field must also be valid. UniqueMmioPointer::child gives it the same lifetime as the
957        // original pointer.
958        unsafe {
959            let child_pointer = core::ptr::NonNull::new(
960                &raw mut (*$crate::UniqueMmioPointer::ptr_mut(&mut $mmio_pointer)).$field,
961            )
962            .unwrap();
963            $crate::UniqueMmioPointer::child(&mut $mmio_pointer, child_pointer)
964        }
965    }};
966}
967
968/// Gets `UniqueMmioPointer`s to several fields of a type wrapped in a `UniqueMmioPointer`.
969///
970/// # Safety
971///
972/// The same field name must not be passed more than once.
973#[macro_export]
974macro_rules! split_fields {
975    ($mmio_pointer:expr, $( $field:ident ),+) => {{
976        // Make sure $mmio_pointer is the right type, and take ownership of it.
977        let mut mmio_pointer: $crate::UniqueMmioPointer<_> = $mmio_pointer;
978        let pointer = mmio_pointer.ptr_mut();
979        let ret = (
980            $(
981                // SAFETY: ptr_mut is guaranteed to return a valid pointer for MMIO, so the pointer
982                // to the field must also be valid. MmioPointer::child gives it the same lifetime as
983                // the original pointer, and the caller of `split_fields!` promised not to pass the
984                // same field more than once.
985                {
986                    let child_pointer = core::ptr::NonNull::new(&raw mut (*pointer).$field).unwrap();
987                    mmio_pointer.split_child(child_pointer)
988                }
989            ),+
990        );
991        ret
992    }};
993}
994
995/// Gets a `SharedMmioPointer` to a field of a type wrapped in a `SharedMmioPointer`.
996#[macro_export]
997macro_rules! field_shared {
998    ($mmio_pointer:expr, $field:ident) => {{
999        _ = &$mmio_pointer;
1000
1001        // SAFETY: ptr_mut is guaranteed to return a valid pointer for MMIO, so the pointer to the
1002        // field must also be valid. MmioPointer::child gives it the same lifetime as the original
1003        // pointer.
1004        #[allow(unused_unsafe, reason = "May be nested")]
1005        unsafe {
1006            let child_pointer = core::ptr::NonNull::new(
1007                (&raw const (*$crate::SharedMmioPointer::ptr(&$mmio_pointer)).$field).cast_mut(),
1008            )
1009            .unwrap();
1010            $crate::SharedMmioPointer::child(&$mmio_pointer, child_pointer)
1011        }
1012    }};
1013}
1014
1015#[cfg(test)]
1016mod tests {
1017    use super::*;
1018
1019    #[test]
1020    fn fields() {
1021        #[repr(C)]
1022        struct Foo {
1023            a: ReadWrite<u32>,
1024            b: ReadOnly<u32>,
1025            c: ReadPure<u32>,
1026        }
1027
1028        let mut foo = Foo {
1029            a: ReadWrite(1),
1030            b: ReadOnly(2),
1031            c: ReadPure(3),
1032        };
1033        let mut owned: UniqueMmioPointer<Foo> = UniqueMmioPointer::from(&mut foo);
1034
1035        let mut owned_a: UniqueMmioPointer<ReadWrite<u32>> = field!(owned, a);
1036        assert_eq!(owned_a.read(), 1);
1037        owned_a.write(42);
1038        assert_eq!(owned_a.read(), 42);
1039        field!(owned, a).write(44);
1040        assert_eq!(field!(owned, a).read(), 44);
1041
1042        let mut owned_b: UniqueMmioPointer<ReadOnly<u32>> = field!(owned, b);
1043        assert_eq!(owned_b.read(), 2);
1044
1045        let owned_c: UniqueMmioPointer<ReadPure<u32>> = field!(owned, c);
1046        assert_eq!(owned_c.read(), 3);
1047        assert_eq!(field!(owned, c).read(), 3);
1048    }
1049
1050    #[test]
1051    fn shared_fields() {
1052        #[repr(C)]
1053        struct Foo {
1054            a: ReadPureWrite<u32>,
1055            b: ReadPure<u32>,
1056        }
1057
1058        let foo = Foo {
1059            a: ReadPureWrite(1),
1060            b: ReadPure(2),
1061        };
1062        let shared: SharedMmioPointer<Foo> = SharedMmioPointer::from(&foo);
1063
1064        let shared_a: SharedMmioPointer<ReadPureWrite<u32>> = field_shared!(shared, a);
1065        assert_eq!(shared_a.read(), 1);
1066        assert_eq!(field_shared!(shared, a).read(), 1);
1067
1068        let shared_b: SharedMmioPointer<ReadPure<u32>> = field_shared!(shared, b);
1069        assert_eq!(shared_b.read(), 2);
1070    }
1071
1072    #[test]
1073    fn shared_from_unique() {
1074        #[repr(C)]
1075        struct Foo {
1076            a: ReadPureWrite<u32>,
1077            b: ReadPure<u32>,
1078        }
1079
1080        let mut foo = Foo {
1081            a: ReadPureWrite(1),
1082            b: ReadPure(2),
1083        };
1084        let unique: UniqueMmioPointer<Foo> = UniqueMmioPointer::from(&mut foo);
1085
1086        let shared_a: SharedMmioPointer<ReadPureWrite<u32>> = field_shared!(unique, a);
1087        assert_eq!(shared_a.read(), 1);
1088
1089        let shared_b: SharedMmioPointer<ReadPure<u32>> = field_shared!(unique, b);
1090        assert_eq!(shared_b.read(), 2);
1091    }
1092
1093    #[test]
1094    fn restricted_fields() {
1095        #[repr(C)]
1096        struct Foo {
1097            r: ReadOnly<u32>,
1098            w: WriteOnly<u32>,
1099            u: u32,
1100        }
1101
1102        let mut foo = Foo {
1103            r: ReadOnly(1),
1104            w: WriteOnly(2),
1105            u: 3,
1106        };
1107        let mut owned: UniqueMmioPointer<Foo> = UniqueMmioPointer::from(&mut foo);
1108
1109        let mut owned_r: UniqueMmioPointer<ReadOnly<u32>> = field!(owned, r);
1110        assert_eq!(owned_r.read(), 1);
1111
1112        let mut owned_w: UniqueMmioPointer<WriteOnly<u32>> = field!(owned, w);
1113        owned_w.write(42);
1114
1115        let mut owned_u: UniqueMmioPointer<u32> = field!(owned, u);
1116        // SAFETY: 'u' is safe to read or write because it's just a fake.
1117        unsafe {
1118            assert_eq!(owned_u.read_unsafe(), 3);
1119            owned_u.write_unsafe(42);
1120            assert_eq!(owned_u.read_unsafe(), 42);
1121        }
1122    }
1123
1124    #[test]
1125    fn array() {
1126        let mut foo = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1127        let mut owned = UniqueMmioPointer::from(&mut foo);
1128
1129        let mut parts = owned.reborrow().split();
1130        assert_eq!(parts[0].read(), 1);
1131        assert_eq!(parts[1].read(), 2);
1132        assert_eq!(owned.split()[2].read(), 3);
1133    }
1134
1135    #[test]
1136    fn array_shared() {
1137        let foo = [ReadPure(1), ReadPure(2), ReadPure(3)];
1138        let shared = SharedMmioPointer::from(&foo);
1139
1140        let parts = shared.split();
1141        assert_eq!(parts[0].read(), 1);
1142        assert_eq!(parts[1].read(), 2);
1143        assert_eq!(shared.split()[2].read(), 3);
1144    }
1145
1146    #[test]
1147    fn slice() {
1148        let mut foo = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1149        let mut owned = UniqueMmioPointer::from(foo.as_mut_slice());
1150
1151        assert!(!owned.ptr().is_null());
1152        assert!(!owned.ptr_mut().is_null());
1153
1154        assert!(!owned.is_empty());
1155        assert_eq!(owned.len(), 3);
1156
1157        let mut first: UniqueMmioPointer<ReadWrite<i32>> = owned.get(0).unwrap();
1158        assert_eq!(first.read(), 1);
1159
1160        let mut second: UniqueMmioPointer<ReadWrite<i32>> = owned.get(1).unwrap();
1161        assert_eq!(second.read(), 2);
1162
1163        assert!(owned.get(3).is_none());
1164    }
1165
1166    #[test]
1167    fn slice_shared() {
1168        let foo = [ReadPure(1), ReadPure(2), ReadPure(3)];
1169        let shared = SharedMmioPointer::from(foo.as_slice());
1170
1171        assert!(!shared.ptr().is_null());
1172
1173        assert!(!shared.is_empty());
1174        assert_eq!(shared.len(), 3);
1175
1176        let first: SharedMmioPointer<ReadPure<i32>> = shared.get(0).unwrap();
1177        assert_eq!(first.read(), 1);
1178
1179        let second: SharedMmioPointer<ReadPure<i32>> = shared.get(1).unwrap();
1180        assert_eq!(second.read(), 2);
1181
1182        assert!(shared.get(3).is_none());
1183
1184        // Test that lifetime of pointer returned from `get` isn't tied to the lifetime of the slice
1185        // pointer.
1186        let second = {
1187            let shared_copy = shared;
1188            shared_copy.get(1).unwrap()
1189        };
1190        assert_eq!(second.read(), 2);
1191    }
1192
1193    #[test]
1194    fn array_field() {
1195        #[repr(C)]
1196        struct Regs {
1197            a: [ReadPureWrite<u32>; 4],
1198        }
1199
1200        let mut foo = Regs {
1201            a: [const { ReadPureWrite(0) }; 4],
1202        };
1203        let mut owned: UniqueMmioPointer<Regs> = UniqueMmioPointer::from(&mut foo);
1204
1205        field!(owned, a).get(0).unwrap().write(42);
1206        assert_eq!(field_shared!(owned, a).get(0).unwrap().read(), 42);
1207    }
1208
1209    #[test]
1210    fn slice_field() {
1211        #[repr(transparent)]
1212        struct Regs {
1213            s: [ReadPureWrite<u32>],
1214        }
1215
1216        impl Regs {
1217            fn from_slice(slice: &mut [ReadPureWrite<u32>]) -> &mut Self {
1218                let regs_ptr: *mut Self = slice as *mut [ReadPureWrite<u32>] as *mut Self;
1219                // SAFETY: `Regs` is repr(transparent) so a reference to its field has the same
1220                // metadata as a reference to `Regs``.
1221                unsafe { &mut *regs_ptr }
1222            }
1223        }
1224
1225        let mut foo: [ReadPureWrite<u32>; 1] = [ReadPureWrite(0)];
1226        let regs_mut = Regs::from_slice(foo.as_mut_slice());
1227        let mut owned: UniqueMmioPointer<Regs> = UniqueMmioPointer::from(regs_mut);
1228
1229        field!(owned, s).get(0).unwrap().write(42);
1230        assert_eq!(field_shared!(owned, s).get(0).unwrap().read(), 42);
1231    }
1232
1233    #[test]
1234    fn multiple_fields() {
1235        #[repr(C)]
1236        struct Regs {
1237            first: ReadPureWrite<u32>,
1238            second: ReadPureWrite<u32>,
1239            third: ReadPureWrite<u32>,
1240        }
1241
1242        let mut foo = Regs {
1243            first: ReadPureWrite(1),
1244            second: ReadPureWrite(2),
1245            third: ReadPureWrite(3),
1246        };
1247        let mut owned: UniqueMmioPointer<Regs> = UniqueMmioPointer::from(&mut foo);
1248
1249        // SAFETY: We don't pass the same field name more than once.
1250        let (first, second) = unsafe { split_fields!(owned.reborrow(), first, second) };
1251
1252        assert_eq!(first.read(), 1);
1253        assert_eq!(second.read(), 2);
1254
1255        assert_eq!(first.read(), 1);
1256        assert_eq!(second.read(), 2);
1257
1258        assert_eq!(field!(owned, first).read(), 1);
1259    }
1260
1261    #[test]
1262    fn split_array() {
1263        let mut foo = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1264
1265        let mut parts: [UniqueMmioPointer<ReadWrite<i32>>; 3] = {
1266            let owned = UniqueMmioPointer::from(&mut foo);
1267
1268            owned.into()
1269        };
1270
1271        assert_eq!(parts[0].read(), 1);
1272        assert_eq!(parts[1].read(), 2);
1273    }
1274
1275    #[test]
1276    fn subfield() {
1277        #[repr(C)]
1278        struct Regs {
1279            subregs: Subregs,
1280        }
1281
1282        #[repr(C)]
1283        struct Subregs {
1284            field: ReadPureWrite<u32>,
1285        }
1286
1287        let mut foo = Regs {
1288            subregs: Subregs {
1289                field: ReadPureWrite(0),
1290            },
1291        };
1292        let mut owned: UniqueMmioPointer<Regs> = UniqueMmioPointer::from(&mut foo);
1293
1294        assert_eq!(
1295            field_shared!(field_shared!(owned, subregs), field).read(),
1296            0
1297        );
1298
1299        let mut sub = field!(owned, subregs);
1300        let mut field = field!(sub, field);
1301        field.write(42);
1302
1303        assert_eq!(foo.subregs.field.0, 42);
1304    }
1305
1306    #[test]
1307    fn get_range_slice() {
1308        let mut regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1309
1310        {
1311            let mut ptr = UniqueMmioPointer::from(&mut regs);
1312            let mut slice = ptr.as_mut_slice();
1313
1314            let range = slice.get_range(100..200);
1315            assert!(range.is_none());
1316
1317            let range = slice.get_range(0..3).unwrap();
1318            assert_eq!(range.len(), 3);
1319
1320            let range = slice.get_range(1..3).unwrap();
1321            assert_eq!(range.len(), 2);
1322
1323            let range = slice.get_range(0..0).unwrap();
1324            assert_eq!(range.len(), 0);
1325
1326            let range = slice.get_range(2..2).unwrap();
1327            assert_eq!(range.len(), 0);
1328
1329            let range = slice.get_range(3..3).unwrap();
1330            assert_eq!(range.len(), 0);
1331
1332            let range = slice.get_range(4..4);
1333            assert!(range.is_none());
1334
1335            let mut range = slice.get_range(3..3).unwrap();
1336            let nested_range = range.get_range(0..0).unwrap();
1337            assert_eq!(nested_range.len(), 0);
1338
1339            let nested_range = range.get_range(1..1);
1340            assert!(nested_range.is_none());
1341        }
1342    }
1343
1344    #[test]
1345    fn get_range_array() {
1346        let mut regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1347
1348        {
1349            let mut ptr = UniqueMmioPointer::from(&mut regs);
1350
1351            let range = ptr.get_range(100..200);
1352            assert!(range.is_none());
1353
1354            let range = ptr.get_range(0..3).unwrap();
1355            assert_eq!(range.len(), 3);
1356
1357            let range = ptr.get_range(1..3).unwrap();
1358            assert_eq!(range.len(), 2);
1359
1360            let range = ptr.get_range(0..0).unwrap();
1361            assert_eq!(range.len(), 0);
1362
1363            let range = ptr.get_range(2..2).unwrap();
1364            assert_eq!(range.len(), 0);
1365
1366            let range = ptr.get_range(3..3).unwrap();
1367            assert_eq!(range.len(), 0);
1368
1369            let range = ptr.get_range(4..4);
1370            assert!(range.is_none());
1371
1372            let mut range = ptr.get_range(3..3).unwrap();
1373            let nested_range = range.get_range(0..0).unwrap();
1374            assert_eq!(nested_range.len(), 0);
1375
1376            let nested_range = range.get_range(1..1);
1377            assert!(nested_range.is_none());
1378        }
1379    }
1380
1381    #[test]
1382    fn shared_get_range_slice() {
1383        let regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1384
1385        {
1386            let ptr = SharedMmioPointer::from(&regs);
1387            let slice = ptr.as_slice();
1388
1389            let range = slice.get_range(100..200);
1390            assert!(range.is_none());
1391
1392            let range = slice.get_range(0..3).unwrap();
1393            assert_eq!(range.len(), 3);
1394
1395            let range = slice.get_range(1..3).unwrap();
1396            assert_eq!(range.len(), 2);
1397
1398            let range = slice.get_range(0..0).unwrap();
1399            assert_eq!(range.len(), 0);
1400
1401            let range = slice.get_range(2..2).unwrap();
1402            assert_eq!(range.len(), 0);
1403
1404            let range = slice.get_range(3..3).unwrap();
1405            assert_eq!(range.len(), 0);
1406
1407            let range = slice.get_range(4..4);
1408            assert!(range.is_none());
1409
1410            let range = slice.get_range(3..3).unwrap();
1411            let nested_range = range.get_range(0..0).unwrap();
1412            assert_eq!(nested_range.len(), 0);
1413
1414            let nested_range = range.get_range(1..1);
1415            assert!(nested_range.is_none());
1416        }
1417    }
1418
1419    #[test]
1420    fn shared_get_range_array() {
1421        let regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1422
1423        {
1424            let ptr = SharedMmioPointer::from(&regs);
1425
1426            let range = ptr.get_range(100..200);
1427            assert!(range.is_none());
1428
1429            let range = ptr.get_range(0..3).unwrap();
1430            assert_eq!(range.len(), 3);
1431
1432            let range = ptr.get_range(1..3).unwrap();
1433            assert_eq!(range.len(), 2);
1434
1435            let range = ptr.get_range(0..0).unwrap();
1436            assert_eq!(range.len(), 0);
1437
1438            let range = ptr.get_range(2..2).unwrap();
1439            assert_eq!(range.len(), 0);
1440
1441            let range = ptr.get_range(3..3).unwrap();
1442            assert_eq!(range.len(), 0);
1443
1444            let range = ptr.get_range(4..4);
1445            assert!(range.is_none());
1446
1447            let range = ptr.get_range(3..3).unwrap();
1448            let nested_range = range.get_range(0..0).unwrap();
1449            assert_eq!(nested_range.len(), 0);
1450
1451            let nested_range = range.get_range(1..1);
1452            assert!(nested_range.is_none());
1453        }
1454    }
1455
1456    #[test]
1457    fn iterator_slice() {
1458        let mut regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1459
1460        {
1461            let mut ptr = UniqueMmioPointer::from(&mut regs);
1462            let mut slice = ptr.as_mut_slice();
1463
1464            let mut iter = slice.iter();
1465
1466            iter.next().unwrap().write(4);
1467            iter.next().unwrap().write(5);
1468            iter.next().unwrap().write(6);
1469            assert_eq!(iter.next(), None);
1470        }
1471
1472        assert_eq!(regs[0].0, 4);
1473        assert_eq!(regs[1].0, 5);
1474        assert_eq!(regs[2].0, 6);
1475    }
1476
1477    #[test]
1478    fn iterator_array() {
1479        let mut regs = [ReadWrite(1), ReadWrite(2), ReadWrite(3)];
1480
1481        {
1482            let mut ptr = UniqueMmioPointer::from(&mut regs);
1483
1484            let mut iter = ptr.iter();
1485
1486            iter.next().unwrap().write(4);
1487            iter.next().unwrap().write(5);
1488            iter.next().unwrap().write(6);
1489            assert_eq!(iter.next(), None);
1490        }
1491
1492        assert_eq!(regs[0].0, 4);
1493        assert_eq!(regs[1].0, 5);
1494        assert_eq!(regs[2].0, 6);
1495    }
1496
1497    #[test]
1498    fn shared_iterator_slice() {
1499        let regs = [ReadPureWrite(1), ReadPureWrite(2), ReadPureWrite(3)];
1500
1501        let ptr = SharedMmioPointer::from(&regs);
1502        let slice = ptr.as_slice();
1503
1504        let mut iter = slice.iter();
1505
1506        assert_eq!(iter.next().unwrap().read(), 1);
1507        assert_eq!(iter.next().unwrap().read(), 2);
1508        assert_eq!(iter.next().unwrap().read(), 3);
1509        assert_eq!(iter.next(), None);
1510    }
1511
1512    #[test]
1513    fn shared_iterator_array() {
1514        let regs = [ReadPureWrite(1), ReadPureWrite(2), ReadPureWrite(3)];
1515
1516        let ptr = SharedMmioPointer::from(&regs);
1517
1518        let mut iter = ptr.iter();
1519
1520        assert_eq!(iter.next().unwrap().read(), 1);
1521        assert_eq!(iter.next().unwrap().read(), 2);
1522        assert_eq!(iter.next().unwrap().read(), 3);
1523        assert_eq!(iter.next(), None);
1524    }
1525}