1use crate::register;
9
10use arbitrary_int::{u2, u3};
11#[doc(inline)]
12pub use register::drsr::RegionSize;
13
14#[derive(Debug, Clone, PartialEq, Eq)]
16#[cfg_attr(feature = "defmt", derive(defmt::Format))]
17pub enum Error {
18 TooManyRegions,
20 UnalignedRegion(*mut u8),
22}
23
24pub struct Mpu();
26
27impl Mpu {
28 pub unsafe fn new() -> Mpu {
35 Mpu()
36 }
37
38 pub fn num_iregions(&self) -> u8 {
40 register::Mpuir::read().iregions()
41 }
42
43 pub fn num_dregions(&self) -> u8 {
45 register::Mpuir::read().dregions()
46 }
47
48 pub fn is_unified(&self) -> bool {
50 !register::Mpuir::read().non_unified()
51 }
52
53 pub fn get_iregion(&mut self, idx: u8) -> Option<Region> {
55 if idx >= self.num_iregions() {
56 return None;
57 }
58 register::Rgnr::write(register::Rgnr(idx as u32));
59 let base = register::Irbar::read().0 as *mut u8;
60 let rsr = register::Irsr::read();
61 let racr = register::Iracr::read();
62
63 let mem_attr_bits = MemAttrBits {
64 tex: racr.tex(),
65 c: racr.c(),
66 b: racr.b(),
67 s: racr.s(),
68 };
69
70 let mem_attr = mem_attr_bits.decode()?;
71
72 Some(Region {
73 base,
74 size: rsr.region_size(),
75 subregion_mask: rsr.subregion_mask(),
76 enabled: rsr.enabled(),
77 no_exec: racr.nx(),
78 mem_attr,
79 })
80 }
81
82 pub fn get_dregion(&mut self, idx: u8) -> Option<Region> {
84 if idx >= self.num_dregions() {
85 return None;
86 }
87 register::Rgnr::write(register::Rgnr(idx as u32));
88 let base = register::Drbar::read().0 as *mut u8;
89 let rsr = register::Drsr::read();
90 let racr = register::Dracr::read();
91
92 let mem_attr_bits = MemAttrBits {
93 tex: racr.tex(),
94 c: racr.c(),
95 b: racr.b(),
96 s: racr.s(),
97 };
98 let mem_attr = mem_attr_bits.decode()?;
99
100 Some(Region {
101 base,
102 size: rsr.region_size(),
103 subregion_mask: rsr.subregion_mask(),
104 enabled: rsr.enabled(),
105 no_exec: racr.nx(),
106 mem_attr,
107 })
108 }
109
110 pub fn configure(&mut self, config: &Config) -> Result<(), Error> {
112 if config.iregions.len() > self.num_iregions() as usize {
113 return Err(Error::TooManyRegions);
114 }
115 if config.dregions.len() > self.num_dregions() as usize {
116 return Err(Error::TooManyRegions);
117 }
118 for (idx, region) in config.iregions.iter().enumerate() {
119 register::Rgnr::write(register::Rgnr(idx as u32));
120 if !region.size.is_aligned(region.base) {
121 return Err(Error::UnalignedRegion(region.base));
122 }
123 register::Irbar::write(register::Irbar(region.base as u32));
124 register::Irsr::write({
125 let mut out = register::Irsr::new_with_raw_value(0);
126 out.set_enabled(region.enabled);
127 out.set_region_size(region.size);
128 out.set_subregion_mask(region.subregion_mask);
129 out
130 });
131 register::Iracr::write({
132 let mut out = register::Iracr::new_with_raw_value(0);
133 let mem_attr_bits = region.mem_attr.to_bits();
134 out.set_tex(mem_attr_bits.tex);
135 out.set_c(mem_attr_bits.c);
136 out.set_b(mem_attr_bits.b);
137 out.set_s(mem_attr_bits.s);
138 out.set_nx(region.no_exec);
139 out
141 });
142 }
143 for (idx, region) in config.dregions.iter().enumerate() {
144 if !region.size.is_aligned(region.base) {
145 return Err(Error::UnalignedRegion(region.base));
146 }
147 register::Rgnr::write(register::Rgnr(idx as u32));
148 register::Drbar::write(register::Drbar(region.base as u32));
149 register::Drsr::write({
150 let mut out = register::Drsr::new_with_raw_value(0);
151 out.set_enabled(region.enabled);
152 out.set_region_size(region.size);
153 out.set_subregion_mask(region.subregion_mask);
154 out
155 });
156 register::Dracr::write({
157 let mut out = register::Dracr::new_with_raw_value(0);
158 let mem_attr_bits = region.mem_attr.to_bits();
159 out.set_tex(mem_attr_bits.tex);
160 out.set_c(mem_attr_bits.c);
161 out.set_b(mem_attr_bits.b);
162 out.set_s(mem_attr_bits.s);
163 out.set_nx(region.no_exec);
164 out
166 });
167 }
168 register::Sctlr::modify(|r| {
169 r.set_br(config.background_config);
170 });
171 Ok(())
172 }
173
174 pub fn enable(&mut self) {
176 register::Sctlr::modify(|r| {
177 r.set_m(true);
178 });
179 }
180
181 pub fn disable(&mut self) {
183 register::Sctlr::modify(|r| {
184 r.set_m(false);
185 });
186 }
187}
188
189#[derive(Clone, Debug, PartialEq, Eq)]
191pub struct Config<'a> {
192 pub background_config: bool,
196 pub iregions: &'a [Region],
198 pub dregions: &'a [Region],
200}
201
202#[derive(Clone, Debug, PartialEq, Eq)]
204pub struct Region {
205 pub base: *mut u8,
209 pub size: RegionSize,
211 pub subregion_mask: u8,
220 pub enabled: bool,
222 pub no_exec: bool,
224 pub mem_attr: MemAttr,
226}
227
228unsafe impl Sync for Region {}
231
232#[derive(Debug, Clone, PartialEq, Eq)]
234pub enum MemAttr {
235 StronglyOrdered,
237 Device { shareable: bool },
239 WriteThroughNoWriteAllocate { shareable: bool },
241 WriteBackNoWriteAllocate { shareable: bool },
243 NonCacheable { shareable: bool },
245 ImplementationDefined { shareable: bool },
247 WriteBackWriteAllocate { shareable: bool },
249 Cacheable {
251 outer: CacheablePolicy,
252 inner: CacheablePolicy,
253 shareable: bool,
254 },
255}
256
257impl MemAttr {
258 const fn to_bits(&self) -> MemAttrBits {
260 match self {
261 MemAttr::StronglyOrdered => MemAttrBits {
262 tex: u3::from_u8(0b000),
263 c: false,
264 b: false,
265 s: true,
266 },
267 MemAttr::Device { shareable: true } => MemAttrBits {
268 tex: u3::from_u8(0b000),
269 c: false,
270 b: true,
271 s: true,
272 },
273 MemAttr::Device { shareable: false } => MemAttrBits {
274 tex: u3::from_u8(0b010),
275 c: false,
276 b: false,
277 s: false,
278 },
279 MemAttr::WriteThroughNoWriteAllocate { shareable } => MemAttrBits {
280 tex: u3::from_u8(0b000),
281 c: true,
282 b: false,
283 s: *shareable,
284 },
285 MemAttr::WriteBackNoWriteAllocate { shareable } => MemAttrBits {
286 tex: u3::from_u8(0b000),
287 c: true,
288 b: true,
289 s: *shareable,
290 },
291 MemAttr::NonCacheable { shareable } => MemAttrBits {
292 tex: u3::from_u8(0b001),
293 c: false,
294 b: false,
295 s: *shareable,
296 },
297 MemAttr::ImplementationDefined { shareable } => MemAttrBits {
298 tex: u3::from_u8(0b001),
299 c: true,
300 b: false,
301 s: *shareable,
302 },
303 MemAttr::WriteBackWriteAllocate { shareable } => MemAttrBits {
304 tex: u3::from_u8(0b000),
305 c: true,
306 b: true,
307 s: *shareable,
308 },
309 MemAttr::Cacheable {
310 outer,
311 inner,
312 shareable,
313 } => {
314 let outer = *outer as u8;
315 let inner = *inner as u8;
316 MemAttrBits {
317 tex: u3::from_u8(0b100 | outer),
318 c: (inner & 0b10) != 0,
319 b: (inner & 0b01) != 0,
320 s: *shareable,
321 }
322 }
323 }
324 }
325}
326
327#[derive(Debug, Clone, PartialEq, Eq)]
329struct MemAttrBits {
330 tex: u3,
331 c: bool,
332 b: bool,
333 s: bool,
334}
335
336impl MemAttrBits {
337 const fn decode(&self) -> Option<MemAttr> {
338 match (self.tex.value(), self.c, self.b) {
339 (0b000, false, false) => Some(MemAttr::StronglyOrdered),
340 (0b000, false, true) => Some(MemAttr::Device { shareable: true }),
341 (0b000, true, false) => {
342 Some(MemAttr::WriteThroughNoWriteAllocate { shareable: self.s })
343 }
344 (0b000, true, true) => Some(MemAttr::WriteBackNoWriteAllocate { shareable: self.s }),
345 (0b001, false, false) => Some(MemAttr::NonCacheable { shareable: self.s }),
346 (0b001, true, false) => Some(MemAttr::ImplementationDefined { shareable: self.s }),
347 (0b001, true, true) => Some(MemAttr::WriteBackWriteAllocate { shareable: self.s }),
348 (0b010, false, false) => Some(MemAttr::Device { shareable: false }),
349 (tex, c, b) if tex >= 0b100 => {
350 let outer = tex & 0b11;
351 let inner = ((c as u8) << 1) | (b as u8);
352 Some(MemAttr::Cacheable {
353 outer: CacheablePolicy::new_with_raw_value(u2::from_u8(outer)),
354 inner: CacheablePolicy::new_with_raw_value(u2::from_u8(inner)),
355 shareable: self.s,
356 })
357 }
358 _ => {
359 None
361 }
362 }
363 }
364}
365
366#[derive(Debug, PartialEq, Eq)]
368#[bitbybit::bitenum(u2, exhaustive = true)]
369pub enum CacheablePolicy {
370 NonCacheable = 0b00,
371 WriteBackWriteAllocate = 0b01,
372 WriteThroughNoWriteAllocate = 0b10,
373 WriteBackNoWriteAllocate = 0b11,
374}
375
376#[cfg(test)]
377mod test {
378 use super::*;
379
380 #[test]
381 fn mem_attr_strong() {
382 let mem_attr = MemAttr::StronglyOrdered;
383 let mem_attr_bits = mem_attr.to_bits();
384 assert_eq!(
385 mem_attr_bits,
386 MemAttrBits {
387 tex: u3::from_u8(0),
388 c: false,
389 b: false,
390 s: true
391 }
392 );
393 let mem_attr2 = mem_attr_bits.decode();
394 assert_eq!(Some(mem_attr), mem_attr2);
395 }
396
397 #[test]
398 fn mem_attr_complex() {
399 let mem_attr = MemAttr::Cacheable {
400 outer: CacheablePolicy::WriteBackWriteAllocate,
402 inner: CacheablePolicy::WriteThroughNoWriteAllocate,
404 shareable: true,
405 };
406 let mem_attr_bits = mem_attr.to_bits();
407 assert_eq!(
408 mem_attr_bits,
409 MemAttrBits {
410 tex: u3::from_u8(0b101),
411 c: true,
412 b: false,
413 s: true
414 }
415 );
416 let mem_attr2 = mem_attr_bits.decode();
417 assert_eq!(Some(mem_attr), mem_attr2);
418 }
419}