aarch32_cpu/
mmu.rs

1use arbitrary_int::{u12, u2, u3, u4};
2
3#[derive(Debug, thiserror::Error)]
4#[cfg_attr(feature = "defmt", derive(defmt::Format))]
5#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
6#[error("invalid L1 entry type {0:?}")]
7pub struct InvalidL1EntryType(pub L1EntryType);
8
9#[bitbybit::bitenum(u3, exhaustive = true)]
10#[cfg_attr(feature = "defmt", derive(defmt::Format))]
11#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
12#[derive(Debug, PartialEq, Eq)]
13pub enum AccessPermissions {
14    PermissionFault = 0b000,
15    PrivilegedOnly = 0b001,
16    NoUserWrite = 0b010,
17    FullAccess = 0b011,
18    _Reserved1 = 0b100,
19    PrivilegedReadOnly = 0b101,
20    ReadOnly = 0b110,
21    _Reserved2 = 0b111,
22}
23
24impl AccessPermissions {
25    #[inline]
26    pub const fn new(apx: bool, ap: u2) -> Self {
27        Self::new_with_raw_value(u3::new(((apx as u8) << 2) | ap.value()))
28    }
29
30    /// AP bit for the given access permission.
31    #[inline]
32    pub const fn ap(&self) -> u2 {
33        u2::new((*self as u8) & 0b11)
34    }
35
36    /// APX bit for the given access permission.
37    #[inline]
38    pub const fn apx(&self) -> bool {
39        (*self as u8) > (AccessPermissions::FullAccess as u8)
40    }
41}
42
43#[bitbybit::bitenum(u2, exhaustive = true)]
44#[cfg_attr(feature = "defmt", derive(defmt::Format))]
45#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
46#[derive(Debug, PartialEq, Eq)]
47#[repr(u8)]
48pub enum L1EntryType {
49    /// Access generates an abort exception. Indicates an unmapped virtual address.
50    Fault = 0b00,
51    /// Entry points to a L2 translation table, allowing 1 MB of memory to be further divided
52    PageTable = 0b01,
53    /// Maps a 1 MB region to a physical address.
54    Section = 0b10,
55    /// Special 1MB section entry which requires 16 entries in the translation table.
56    Supersection = 0b11,
57}
58
59/// The ARM Cortex-A architecture reference manual p.1363 specifies these attributes in more detail.
60///
61/// The B (Bufferable), C (Cacheable), and TEX (Type extension) bit names are inherited from
62/// earlier versions of the architecture. These names no longer adequately describe the function
63/// of the B, C, and TEX bits.
64#[derive(Debug, Copy, Clone, PartialEq, Eq)]
65#[cfg_attr(feature = "defmt", derive(defmt::Format))]
66#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
67pub struct MemoryRegionAttributesRaw {
68    /// TEX bits
69    type_extensions: u3,
70    c: bool,
71    b: bool,
72}
73
74impl MemoryRegionAttributesRaw {
75    #[inline]
76    pub const fn new(type_extensions: u3, c: bool, b: bool) -> Self {
77        Self {
78            type_extensions,
79            c,
80            b,
81        }
82    }
83}
84
85#[bitbybit::bitenum(u2, exhaustive = true)]
86#[cfg_attr(feature = "defmt", derive(defmt::Format))]
87#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
88#[derive(Debug)]
89pub enum CacheableMemoryAttribute {
90    NonCacheable = 0b00,
91    WriteBackWriteAlloc = 0b01,
92    WriteThroughNoWriteAlloc = 0b10,
93    WriteBackNoWriteAlloc = 0b11,
94}
95
96#[derive(Debug, Copy, Clone)]
97#[cfg_attr(feature = "defmt", derive(defmt::Format))]
98#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
99pub enum MemoryRegionAttributes {
100    StronglyOrdered,
101    ShareableDevice,
102    OuterAndInnerWriteThroughNoWriteAlloc,
103    OuterAndInnerWriteBackNoWriteAlloc,
104    OuterAndInnerNonCacheable,
105    OuterAndInnerWriteBackWriteAlloc,
106    NonShareableDevice,
107    CacheableMemory {
108        inner: CacheableMemoryAttribute,
109        outer: CacheableMemoryAttribute,
110    },
111}
112
113impl MemoryRegionAttributes {
114    pub const fn as_raw(&self) -> MemoryRegionAttributesRaw {
115        match self {
116            MemoryRegionAttributes::StronglyOrdered => {
117                MemoryRegionAttributesRaw::new(u3::new(0b000), false, false)
118            }
119            MemoryRegionAttributes::ShareableDevice => {
120                MemoryRegionAttributesRaw::new(u3::new(0b000), false, true)
121            }
122            MemoryRegionAttributes::OuterAndInnerWriteThroughNoWriteAlloc => {
123                MemoryRegionAttributesRaw::new(u3::new(0b000), true, false)
124            }
125            MemoryRegionAttributes::OuterAndInnerWriteBackNoWriteAlloc => {
126                MemoryRegionAttributesRaw::new(u3::new(0b000), true, true)
127            }
128            MemoryRegionAttributes::OuterAndInnerNonCacheable => {
129                MemoryRegionAttributesRaw::new(u3::new(0b001), false, false)
130            }
131            MemoryRegionAttributes::OuterAndInnerWriteBackWriteAlloc => {
132                MemoryRegionAttributesRaw::new(u3::new(0b001), true, true)
133            }
134            MemoryRegionAttributes::NonShareableDevice => {
135                MemoryRegionAttributesRaw::new(u3::new(0b010), false, false)
136            }
137            MemoryRegionAttributes::CacheableMemory { inner, outer } => {
138                MemoryRegionAttributesRaw::new(
139                    u3::new((1 << 2) | (outer.raw_value().value())),
140                    (*inner as u8 & 0b10) != 0,
141                    (*inner as u8 & 0b01) != 0,
142                )
143            }
144        }
145    }
146}
147
148/// Individual section attributes for a L1 section.
149#[derive(Debug, Copy, Clone, PartialEq, Eq)]
150#[cfg_attr(feature = "defmt", derive(defmt::Format))]
151#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
152pub struct SectionAttributes {
153    /// NG bit
154    pub non_global: bool,
155    /// Implementation defined bit.
156    pub p_bit: bool,
157    pub shareable: bool,
158    /// AP bits
159    pub access: AccessPermissions,
160    pub memory_attrs: MemoryRegionAttributesRaw,
161    pub domain: u4,
162    /// xN bit.
163    pub execute_never: bool,
164}
165
166impl SectionAttributes {
167    /// Extract the section attributes from a raw L1 section entry.
168    #[inline]
169    fn from_raw(raw: u32) -> Result<Self, InvalidL1EntryType> {
170        let section_type = L1EntryType::new_with_raw_value(u2::new((raw & 0b11) as u8));
171        if section_type != L1EntryType::Section {
172            return Err(InvalidL1EntryType(section_type));
173        }
174        Ok(Self::from_raw_unchecked(raw))
175    }
176
177    /// Retrieves the corresponding L1 section part without the section base address being set.
178    const fn l1_section_part(&self) -> L1Section {
179        L1Section::builder()
180            .with_base_addr_upper_bits(u12::new(0))
181            .with_ng(self.non_global)
182            .with_s(self.shareable)
183            .with_apx(self.access.apx())
184            .with_tex(self.memory_attrs.type_extensions)
185            .with_ap(self.access.ap())
186            .with_p_bit(self.p_bit)
187            .with_domain(self.domain)
188            .with_xn(self.execute_never)
189            .with_c(self.memory_attrs.c)
190            .with_b(self.memory_attrs.b)
191            .with_entry_type(L1EntryType::Section)
192            .build()
193    }
194
195    /// Extract the section attributes without checking the entry type bits.
196    #[inline]
197    const fn from_raw_unchecked(raw: u32) -> Self {
198        let l1 = L1Section::new_with_raw_value(raw);
199        Self {
200            non_global: l1.ng(),
201            shareable: l1.s(),
202            p_bit: l1.p_bit(),
203            access: AccessPermissions::new(l1.apx(), l1.ap()),
204            memory_attrs: MemoryRegionAttributesRaw::new(l1.tex(), l1.c(), l1.b()),
205            domain: l1.domain(),
206            execute_never: l1.xn(),
207        }
208    }
209}
210
211/// 1 MB section translation entry, mapping a 1 MB region to a physical address.
212///
213/// The ARM Cortex-A architecture programmers manual chapter 9.4 (p.163) or the ARMv7-A and ArmV7-R
214/// architecture reference manual p.1323 specify these attributes in more detail.
215#[bitbybit::bitfield(u32, default = 0, defmt_fields(feature = "defmt"))]
216#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
217#[derive(PartialEq, Eq)]
218pub struct L1Section {
219    /// Section base address upper bits.
220    #[bits(20..=31, rw)]
221    base_addr_upper_bits: u12,
222    /// Non-global bit.
223    #[bit(17, rw)]
224    ng: bool,
225    /// Shareable bit.
226    #[bit(16, rw)]
227    s: bool,
228    #[bit(15, rw)]
229    apx: bool,
230    /// Type extension bits.
231    #[bits(12..=14, rw)]
232    tex: u3,
233    #[bits(10..=11, rw)]
234    ap: u2,
235    #[bit(9, rw)]
236    p_bit: bool,
237    #[bits(5..=8, rw)]
238    domain: u4,
239    #[bit(4, rw)]
240    xn: bool,
241    #[bit(3, rw)]
242    c: bool,
243    #[bit(2, rw)]
244    b: bool,
245    #[bits(0..=1, rw)]
246    entry_type: L1EntryType,
247}
248
249impl core::fmt::Debug for L1Section {
250    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
251        write!(
252            f,
253            "L1Section {{ base_addr={:#x} ng={} s={} apx={} tex={:#b} ap={:#b} domain={:#b} xn={} c={} b={} }}",
254            self.base_addr_upper_bits(),
255            self.ng() as u8,
256            self.s() as u8,
257            self.apx() as u8,
258            self.tex(),
259            self.ap(),
260            self.domain(),
261            self.xn() as u8,
262            self.c() as u8,
263            self.b() as u8,
264        )
265    }
266}
267
268impl L1Section {
269    /// Generates a new L1 section from a physical address and section attributes.
270    ///
271    /// The uppermost 12 bits of the physical address define which 1 MB of virtual address space
272    /// are being accessed. They will be stored in the L1 section table. This address MUST be
273    /// aligned to 1 MB.
274    ///
275    /// # Panics
276    ///
277    /// Physcal address not aligned to 1 MB.
278    pub const fn new_with_addr_and_attrs(phys_addr: u32, section_attrs: SectionAttributes) -> Self {
279        // Must be aligned to 1 MB
280        if phys_addr & 0x000F_FFFF != 0 {
281            panic!("physical base address for L1 section must be aligned to 1 MB");
282        }
283        Self::new_with_addr_upper_bits_and_attrs(u12::new((phys_addr >> 20) as u16), section_attrs)
284    }
285
286    /// Retrieve the section attributes.
287    #[inline]
288    pub fn section_attrs(&self) -> Result<SectionAttributes, InvalidL1EntryType> {
289        SectionAttributes::from_raw(self.raw_value())
290    }
291
292    /// Set the section attributes without changing the address.
293    #[inline]
294    pub fn set_section_attrs(&mut self, section_attrs: SectionAttributes) {
295        *self = Self::new_with_addr_upper_bits_and_attrs(self.base_addr_upper_bits(), section_attrs)
296    }
297
298    /// Create a new L1 section with the given upper 12 bits of the address and section attributes.
299    #[inline]
300    pub const fn new_with_addr_upper_bits_and_attrs(
301        addr_upper_twelve_bits: u12,
302        section_attrs: SectionAttributes,
303    ) -> Self {
304        let attrs = section_attrs.l1_section_part();
305        L1Section::builder()
306            .with_base_addr_upper_bits(addr_upper_twelve_bits)
307            .with_ng(attrs.ng())
308            .with_s(attrs.s())
309            .with_apx(attrs.apx())
310            .with_tex(attrs.tex())
311            .with_ap(attrs.ap())
312            .with_p_bit(attrs.p_bit())
313            .with_domain(attrs.domain())
314            .with_xn(attrs.xn())
315            .with_c(attrs.c())
316            .with_b(attrs.b())
317            .with_entry_type(attrs.entry_type())
318            .build()
319    }
320}
321
322#[cfg(test)]
323mod tests {
324    use super::*;
325
326    const SECTION_ATTRS_DEVICE_PERIPHERAL: SectionAttributes = SectionAttributes {
327        non_global: false,
328        p_bit: false,
329        shareable: false,
330        // APX false, AP 0b11
331        access: AccessPermissions::FullAccess,
332        // TEX 0b000, c false, b true
333        memory_attrs: MemoryRegionAttributes::ShareableDevice.as_raw(),
334        domain: u4::new(0b1111),
335        execute_never: false,
336    };
337    /// Address upper 12 bits: 0b1
338    const L1_SECTION_PERIPHERAL: L1Section =
339        L1Section::new_with_addr_and_attrs(0x100000, SECTION_ATTRS_DEVICE_PERIPHERAL);
340
341    // Fully cacheable normal memory (for example DDR with L1 and L2 cache)
342    const SECTION_ATTRS_FULL_CACHEABLE: SectionAttributes = SectionAttributes {
343        non_global: false,
344        p_bit: false,
345        shareable: true,
346        // APX false, AP 0b11
347        access: AccessPermissions::FullAccess,
348        // TEX 0b101, c false, b true
349        memory_attrs: MemoryRegionAttributes::CacheableMemory {
350            inner: CacheableMemoryAttribute::WriteBackWriteAlloc,
351            outer: CacheableMemoryAttribute::WriteBackWriteAlloc,
352        }
353        .as_raw(),
354        domain: u4::new(0b1010),
355        execute_never: false,
356    };
357    /// Address upper 12 bits: 0b10
358    const L1_SECTION_MEMORY: L1Section =
359        L1Section::new_with_addr_and_attrs(0x200000, SECTION_ATTRS_FULL_CACHEABLE);
360
361    #[test]
362    pub fn basic_test_peripheral_memory() {
363        assert_eq!(L1_SECTION_PERIPHERAL.raw_value(), 0x100DE6);
364        assert_eq!(L1_SECTION_PERIPHERAL.base_addr_upper_bits(), u12::new(0b1));
365        assert_eq!(
366            L1_SECTION_PERIPHERAL
367                .section_attrs()
368                .expect("invalid type field"),
369            SECTION_ATTRS_DEVICE_PERIPHERAL
370        );
371        assert!(!L1_SECTION_PERIPHERAL.ng());
372        assert!(!L1_SECTION_PERIPHERAL.p_bit());
373        assert!(!L1_SECTION_PERIPHERAL.s());
374        assert!(!L1_SECTION_PERIPHERAL.apx());
375        assert_eq!(L1_SECTION_PERIPHERAL.ap(), u2::new(0b11));
376        assert_eq!(L1_SECTION_PERIPHERAL.tex(), u3::new(0b000));
377        assert!(!L1_SECTION_PERIPHERAL.c());
378        assert!(L1_SECTION_PERIPHERAL.b());
379        assert_eq!(L1_SECTION_PERIPHERAL.domain(), u4::new(0b1111));
380        assert!(!L1_SECTION_PERIPHERAL.xn());
381    }
382
383    #[test]
384    pub fn basic_test_normal_memory() {
385        assert_eq!(L1_SECTION_MEMORY.raw_value(), 0x215D46);
386        assert_eq!(L1_SECTION_MEMORY.base_addr_upper_bits(), u12::new(0b10));
387        assert_eq!(
388            L1_SECTION_MEMORY
389                .section_attrs()
390                .expect("invalid type field"),
391            SECTION_ATTRS_FULL_CACHEABLE
392        );
393        assert!(!L1_SECTION_MEMORY.ng());
394        assert!(!L1_SECTION_MEMORY.p_bit());
395        assert!(L1_SECTION_MEMORY.s());
396        assert!(!L1_SECTION_MEMORY.apx());
397        assert_eq!(L1_SECTION_MEMORY.ap(), u2::new(0b11));
398        assert_eq!(L1_SECTION_MEMORY.tex(), u3::new(0b101));
399        assert!(!L1_SECTION_MEMORY.c());
400        assert!(L1_SECTION_MEMORY.b());
401        assert_eq!(L1_SECTION_MEMORY.domain(), u4::new(0b1010));
402        assert!(!L1_SECTION_MEMORY.xn());
403    }
404
405    #[test]
406    pub fn update_fields() {
407        let mut l1 = L1_SECTION_MEMORY;
408        let new_attrs = SectionAttributes {
409            non_global: true,
410            p_bit: true,
411            shareable: false,
412            // APX true, AP 0b10
413            access: AccessPermissions::ReadOnly,
414            // TEX 0b000, c false, b false
415            memory_attrs: MemoryRegionAttributes::StronglyOrdered.as_raw(),
416            domain: u4::new(0b1001),
417            execute_never: true,
418        };
419        l1.set_section_attrs(new_attrs);
420        assert_eq!(l1.raw_value(), 0x228B32);
421        assert_eq!(l1.base_addr_upper_bits(), u12::new(0b10));
422        assert_eq!(l1.section_attrs().unwrap(), new_attrs);
423        assert!(l1.ng());
424        assert!(l1.p_bit());
425        assert!(!l1.s());
426        assert!(l1.apx());
427        assert_eq!(l1.ap(), u2::new(0b10));
428        assert_eq!(l1.tex(), u3::new(0b000));
429        assert!(!l1.c());
430        assert!(!l1.b());
431        assert_eq!(l1.domain(), u4::new(0b1001));
432        assert!(l1.xn());
433    }
434
435    #[test]
436    #[should_panic(expected = "physical base address for L1 section must be aligned to 1 MB")]
437    pub fn unaligned_section_address() {
438        L1Section::new_with_addr_and_attrs(0x100001, SECTION_ATTRS_DEVICE_PERIPHERAL);
439    }
440}