1use crate::register;
9
10use arbitrary_int::{u2, u3};
11
12#[doc(inline)]
13pub use register::drsr::RegionSize;
14
15#[derive(Debug, Clone, PartialEq, Eq)]
17#[cfg_attr(feature = "defmt", derive(defmt::Format))]
18pub enum Error {
19 TooManyRegions,
21 UnalignedRegion(*mut u8),
23}
24
25pub struct Mpu();
27
28impl Mpu {
29 pub unsafe fn new() -> Mpu {
36 Mpu()
37 }
38
39 pub fn num_iregions(&self) -> u8 {
41 register::Mpuir::read().iregions()
42 }
43
44 pub fn num_dregions(&self) -> u8 {
46 register::Mpuir::read().dregions()
47 }
48
49 pub fn is_unified(&self) -> bool {
51 !register::Mpuir::read().non_unified()
52 }
53
54 pub fn get_iregion(&mut self, idx: u8) -> Option<Region> {
56 if idx >= self.num_iregions() {
57 return None;
58 }
59 register::Rgnr::write(register::Rgnr(idx as u32));
60 let base = register::Irbar::read().0 as *mut u8;
61 let rsr = register::Irsr::read();
62 let racr = register::Iracr::read();
63
64 let mem_attr_bits = MemAttrBits {
65 tex: racr.tex(),
66 c: racr.c(),
67 b: racr.b(),
68 s: racr.s(),
69 };
70
71 let mem_attr = mem_attr_bits.decode()?;
72
73 Some(Region {
74 base,
75 size: rsr.region_size(),
76 subregion_mask: rsr.subregion_mask(),
77 enabled: rsr.enabled(),
78 no_exec: racr.nx(),
79 mem_attr,
80 })
81 }
82
83 pub fn get_dregion(&mut self, idx: u8) -> Option<Region> {
85 if idx >= self.num_dregions() {
86 return None;
87 }
88 register::Rgnr::write(register::Rgnr(idx as u32));
89 let base = register::Drbar::read().0 as *mut u8;
90 let rsr = register::Drsr::read();
91 let racr = register::Dracr::read();
92
93 let mem_attr_bits = MemAttrBits {
94 tex: racr.tex(),
95 c: racr.c(),
96 b: racr.b(),
97 s: racr.s(),
98 };
99 let mem_attr = mem_attr_bits.decode()?;
100
101 Some(Region {
102 base,
103 size: rsr.region_size(),
104 subregion_mask: rsr.subregion_mask(),
105 enabled: rsr.enabled(),
106 no_exec: racr.nx(),
107 mem_attr,
108 })
109 }
110
111 pub fn configure(&mut self, config: &Config) -> Result<(), Error> {
113 if config.iregions.len() > self.num_iregions() as usize {
114 return Err(Error::TooManyRegions);
115 }
116 if config.dregions.len() > self.num_dregions() as usize {
117 return Err(Error::TooManyRegions);
118 }
119 for (idx, region) in config.iregions.iter().enumerate() {
120 register::Rgnr::write(register::Rgnr(idx as u32));
121 if !region.size.is_aligned(region.base) {
122 return Err(Error::UnalignedRegion(region.base));
123 }
124 register::Irbar::write(register::Irbar(region.base as u32));
125 register::Irsr::write({
126 let mut out = register::Irsr::new_with_raw_value(0);
127 out.set_enabled(region.enabled);
128 out.set_region_size(region.size);
129 out.set_subregion_mask(region.subregion_mask);
130 out
131 });
132 register::Iracr::write({
133 let mut out = register::Iracr::new_with_raw_value(0);
134 let mem_attr_bits = region.mem_attr.to_bits();
135 out.set_tex(mem_attr_bits.tex);
136 out.set_c(mem_attr_bits.c);
137 out.set_b(mem_attr_bits.b);
138 out.set_s(mem_attr_bits.s);
139 out.set_nx(region.no_exec);
140 out
142 });
143 }
144 for (idx, region) in config.dregions.iter().enumerate() {
145 if !region.size.is_aligned(region.base) {
146 return Err(Error::UnalignedRegion(region.base));
147 }
148 register::Rgnr::write(register::Rgnr(idx as u32));
149 register::Drbar::write(register::Drbar(region.base as u32));
150 register::Drsr::write({
151 let mut out = register::Drsr::new_with_raw_value(0);
152 out.set_enabled(region.enabled);
153 out.set_region_size(region.size);
154 out.set_subregion_mask(region.subregion_mask);
155 out
156 });
157 register::Dracr::write({
158 let mut out = register::Dracr::new_with_raw_value(0);
159 let mem_attr_bits = region.mem_attr.to_bits();
160 out.set_tex(mem_attr_bits.tex);
161 out.set_c(mem_attr_bits.c);
162 out.set_b(mem_attr_bits.b);
163 out.set_s(mem_attr_bits.s);
164 out.set_nx(region.no_exec);
165 out
167 });
168 }
169 register::Sctlr::modify(|r| {
170 r.set_br(config.background_config);
171 });
172 Ok(())
173 }
174
175 pub fn enable(&mut self) {
177 register::Sctlr::modify(|r| {
178 r.set_m(true);
179 });
180 }
181
182 pub fn disable(&mut self) {
184 register::Sctlr::modify(|r| {
185 r.set_m(false);
186 });
187 }
188}
189
190#[derive(Clone, Debug, PartialEq, Eq)]
192#[cfg_attr(feature = "defmt", derive(defmt::Format))]
193pub struct Config<'a> {
194 pub background_config: bool,
198 pub iregions: &'a [Region],
200 pub dregions: &'a [Region],
202}
203
204#[derive(Clone, Debug, PartialEq, Eq)]
206#[cfg_attr(feature = "defmt", derive(defmt::Format))]
207pub struct Region {
208 pub base: *mut u8,
212 pub size: RegionSize,
214 pub subregion_mask: u8,
223 pub enabled: bool,
225 pub no_exec: bool,
227 pub mem_attr: MemAttr,
229}
230
231unsafe impl Sync for Region {}
234
235#[derive(Debug, Clone, PartialEq, Eq)]
237#[cfg_attr(feature = "defmt", derive(defmt::Format))]
238pub enum MemAttr {
239 StronglyOrdered,
241 Device {
243 shareable: bool,
245 },
246 WriteThroughNoWriteAlloc {
248 shareable: bool,
250 },
251 WriteBackNoWriteAlloc {
253 shareable: bool,
255 },
256 NonCacheable {
258 shareable: bool,
260 },
261 ImplementationDefined {
263 shareable: bool,
265 },
266 WriteBackWriteAlloc {
268 shareable: bool,
270 },
271 Cacheable {
273 outer: CachePolicy,
275 inner: CachePolicy,
277 shareable: bool,
279 },
280}
281
282impl MemAttr {
283 const fn to_bits(&self) -> MemAttrBits {
285 match self {
286 MemAttr::StronglyOrdered => MemAttrBits {
287 tex: u3::from_u8(0b000),
288 c: false,
289 b: false,
290 s: true,
291 },
292 MemAttr::Device { shareable: true } => MemAttrBits {
293 tex: u3::from_u8(0b000),
294 c: false,
295 b: true,
296 s: true,
297 },
298 MemAttr::Device { shareable: false } => MemAttrBits {
299 tex: u3::from_u8(0b010),
300 c: false,
301 b: false,
302 s: false,
303 },
304 MemAttr::WriteThroughNoWriteAlloc { shareable } => MemAttrBits {
305 tex: u3::from_u8(0b000),
306 c: true,
307 b: false,
308 s: *shareable,
309 },
310 MemAttr::WriteBackNoWriteAlloc { shareable } => MemAttrBits {
311 tex: u3::from_u8(0b000),
312 c: true,
313 b: true,
314 s: *shareable,
315 },
316 MemAttr::NonCacheable { shareable } => MemAttrBits {
317 tex: u3::from_u8(0b001),
318 c: false,
319 b: false,
320 s: *shareable,
321 },
322 MemAttr::ImplementationDefined { shareable } => MemAttrBits {
323 tex: u3::from_u8(0b001),
324 c: true,
325 b: false,
326 s: *shareable,
327 },
328 MemAttr::WriteBackWriteAlloc { shareable } => MemAttrBits {
329 tex: u3::from_u8(0b000),
330 c: true,
331 b: true,
332 s: *shareable,
333 },
334 MemAttr::Cacheable {
335 outer,
336 inner,
337 shareable,
338 } => {
339 let outer = *outer as u8;
340 let inner = *inner as u8;
341 MemAttrBits {
342 tex: u3::from_u8(0b100 | outer),
343 c: (inner & 0b10) != 0,
344 b: (inner & 0b01) != 0,
345 s: *shareable,
346 }
347 }
348 }
349 }
350}
351
352#[derive(Debug, Clone, PartialEq, Eq)]
354#[cfg_attr(feature = "defmt", derive(defmt::Format))]
355struct MemAttrBits {
356 tex: u3,
357 c: bool,
358 b: bool,
359 s: bool,
360}
361
362impl MemAttrBits {
363 const fn decode(&self) -> Option<MemAttr> {
364 match (self.tex.value(), self.c, self.b) {
365 (0b000, false, false) => Some(MemAttr::StronglyOrdered),
366 (0b000, false, true) => Some(MemAttr::Device { shareable: true }),
367 (0b000, true, false) => Some(MemAttr::WriteThroughNoWriteAlloc { shareable: self.s }),
368 (0b000, true, true) => Some(MemAttr::WriteBackNoWriteAlloc { shareable: self.s }),
369 (0b001, false, false) => Some(MemAttr::NonCacheable { shareable: self.s }),
370 (0b001, true, false) => Some(MemAttr::ImplementationDefined { shareable: self.s }),
371 (0b001, true, true) => Some(MemAttr::WriteBackWriteAlloc { shareable: self.s }),
372 (0b010, false, false) => Some(MemAttr::Device { shareable: false }),
373 (tex, c, b) if tex >= 0b100 => {
374 let outer = tex & 0b11;
375 let inner = ((c as u8) << 1) | (b as u8);
376 Some(MemAttr::Cacheable {
377 outer: CachePolicy::new_with_raw_value(u2::from_u8(outer)),
378 inner: CachePolicy::new_with_raw_value(u2::from_u8(inner)),
379 shareable: self.s,
380 })
381 }
382 _ => {
383 None
385 }
386 }
387 }
388}
389
390#[bitbybit::bitenum(u2, exhaustive = true)]
392#[cfg_attr(feature = "defmt", derive(defmt::Format))]
393#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
394#[derive(Debug, PartialEq, Eq)]
395pub enum CachePolicy {
396 NonCacheable = 0b00,
398 WriteBackWriteAlloc = 0b01,
400 WriteThroughNoWriteAlloc = 0b10,
402 WriteBackNoWriteAlloc = 0b11,
404}
405
406#[cfg(test)]
407mod test {
408 use super::*;
409
410 #[test]
411 fn mem_attr_strong() {
412 let mem_attr = MemAttr::StronglyOrdered;
413 let mem_attr_bits = mem_attr.to_bits();
414 assert_eq!(
415 mem_attr_bits,
416 MemAttrBits {
417 tex: u3::from_u8(0),
418 c: false,
419 b: false,
420 s: true
421 }
422 );
423 let mem_attr2 = mem_attr_bits.decode();
424 assert_eq!(Some(mem_attr), mem_attr2);
425 }
426
427 #[test]
428 fn mem_attr_complex() {
429 let mem_attr = MemAttr::Cacheable {
430 outer: CachePolicy::WriteBackWriteAlloc,
432 inner: CachePolicy::WriteThroughNoWriteAlloc,
434 shareable: true,
435 };
436 let mem_attr_bits = mem_attr.to_bits();
437 assert_eq!(
438 mem_attr_bits,
439 MemAttrBits {
440 tex: u3::from_u8(0b101),
441 c: true,
442 b: false,
443 s: true
444 }
445 );
446 let mem_attr2 = mem_attr_bits.decode();
447 assert_eq!(Some(mem_attr), mem_attr2);
448 }
449}