1use arbitrary_int::{u26, u3};
10
11use crate::register;
12
13#[doc(inline)]
14pub use register::hprbar::{AccessPerms as El2AccessPerms, Shareability as El2Shareability};
15#[doc(inline)]
16pub use register::prbar::{AccessPerms as El1AccessPerms, Shareability as El1Shareability};
17
18#[derive(Debug, Clone, PartialEq, Eq)]
20pub enum Error {
21 TooManyRegions,
23 InvalidMair(u8),
25 UnalignedRegion(core::ops::RangeInclusive<*mut u8>),
27}
28
29pub struct El1Mpu();
31
32impl El1Mpu {
33 pub unsafe fn new() -> El1Mpu {
40 El1Mpu()
41 }
42
43 pub fn num_regions(&self) -> u8 {
45 register::Mpuir::read().dregions()
46 }
47
48 pub fn get_region(&mut self, idx: u8) -> Option<El1Region> {
50 if idx >= self.num_regions() {
51 return None;
52 }
53 register::Prselr::write(register::Prselr(idx as u32));
54 let prbar = register::Prbar::read();
55 let prlar = register::Prlar::read();
56 let start_addr = (prbar.base().value() << 6) as *mut u8;
57 let end_addr = ((prlar.limit().value() << 6) | 0x3F) as *mut u8;
58 Some(El1Region {
59 range: start_addr..=end_addr,
60 shareability: prbar.shareability(),
61 access: prbar.access_perms(),
62 no_exec: prbar.nx(),
63 mair: prlar.mair().value(),
64 enable: prlar.enabled(),
65 })
66 }
67
68 pub fn set_region(&mut self, idx: u8, region: &El1Region) -> Result<(), Error> {
82 let start = *(region.range.start()) as usize as u32;
83 if start & 0x3F != 0 {
85 return Err(Error::UnalignedRegion(region.range.clone()));
86 }
87 let end = *(region.range.end()) as usize as u32;
88 if end & 0x3F != 0x3F {
89 return Err(Error::UnalignedRegion(region.range.clone()));
90 }
91 if region.mair > 7 {
92 return Err(Error::InvalidMair(region.mair));
93 }
94 register::Prselr::write(register::Prselr(idx as u32));
95 register::Prbar::write({
96 let mut bar = register::Prbar::new_with_raw_value(0);
97 bar.set_base(u26::from_u32(start >> 6));
98 bar.set_access_perms(region.access);
99 bar.set_nx(region.no_exec);
100 bar.set_shareability(region.shareability);
101 bar
102 });
103 register::Prlar::write({
104 let mut lar = register::Prlar::new_with_raw_value(0);
105 lar.set_limit(u26::from_u32(end >> 6));
106 lar.set_enabled(region.enable);
107 lar.set_mair(u3::from_u8(region.mair));
108 lar
109 });
110
111 Ok(())
112 }
113
114 pub fn set_regions(
121 &mut self,
122 regions_starting_idx: u8,
123 regions: &[El1Region],
124 ) -> Result<(), Error> {
125 if regions.len().saturating_add(regions_starting_idx as usize) > self.num_regions() as usize
126 {
127 return Err(Error::TooManyRegions);
128 }
129
130 for (idx, region) in regions.iter().enumerate() {
131 self.set_region(idx as u8 + regions_starting_idx, region)?;
132 }
133
134 Ok(())
135 }
136
137 pub fn set_attributes(&mut self, memattrs: &[MemAttr]) {
139 let mem_attr0 = memattrs.get(0).map(|m| m.to_bits()).unwrap_or(0) as u32;
140 let mem_attr1 = memattrs.get(1).map(|m| m.to_bits()).unwrap_or(0) as u32;
141 let mem_attr2 = memattrs.get(2).map(|m| m.to_bits()).unwrap_or(0) as u32;
142 let mem_attr3 = memattrs.get(3).map(|m| m.to_bits()).unwrap_or(0) as u32;
143 let mair0 = mem_attr3 << 24 | mem_attr2 << 16 | mem_attr1 << 8 | mem_attr0;
144 unsafe {
145 register::Mair0::write(register::Mair0(mair0));
146 }
147 let mem_attr0 = memattrs.get(4).map(|m| m.to_bits()).unwrap_or(0) as u32;
148 let mem_attr1 = memattrs.get(5).map(|m| m.to_bits()).unwrap_or(0) as u32;
149 let mem_attr2 = memattrs.get(6).map(|m| m.to_bits()).unwrap_or(0) as u32;
150 let mem_attr3 = memattrs.get(7).map(|m| m.to_bits()).unwrap_or(0) as u32;
151 let mair1 = mem_attr3 << 24 | mem_attr2 << 16 | mem_attr1 << 8 | mem_attr0;
152 unsafe {
153 register::Mair1::write(register::Mair1(mair1));
154 }
155 }
156
157 pub fn background_region_enable(&mut self, enable: bool) {
159 register::Sctlr::modify(|r| {
160 r.set_br(enable);
161 });
162 }
163
164 pub fn configure(&mut self, config: &El1Config) -> Result<(), Error> {
169 self.set_regions(0, config.regions)?;
170
171 self.set_attributes(config.memory_attributes);
172
173 self.background_region_enable(config.background_config);
174
175 Ok(())
176 }
177
178 pub fn enable(&mut self) {
180 register::Sctlr::modify(|r| {
181 r.set_m(true);
182 });
183 }
184
185 pub fn disable(&mut self) {
187 register::Sctlr::modify(|r| {
188 r.set_m(false);
189 });
190 }
191}
192
193pub struct El2Mpu();
195
196impl El2Mpu {
197 pub unsafe fn new() -> El2Mpu {
204 El2Mpu()
205 }
206
207 pub fn num_regions(&self) -> u8 {
209 register::Hmpuir::read().region()
210 }
211
212 pub fn get_region(&mut self, idx: u8) -> Option<El2Region> {
214 if idx >= self.num_regions() {
215 return None;
216 }
217 register::Hprselr::write(register::Hprselr(idx as u32));
218 let hprbar = register::Hprbar::read();
219 let hprlar = register::Hprlar::read();
220 let start_addr = (hprbar.base().value() << 6) as *mut u8;
221 let end_addr = ((hprlar.limit().value() << 6) | 0x3F) as *mut u8;
222 Some(El2Region {
223 range: start_addr..=end_addr,
224 shareability: hprbar.shareability(),
225 access: hprbar.access_perms(),
226 no_exec: hprbar.nx(),
227 mair: hprlar.mair().value(),
228 enable: hprlar.enabled(),
229 })
230 }
231
232 pub fn set_region(&mut self, idx: u8, region: &El2Region) -> Result<(), Error> {
246 let start = *(region.range.start()) as usize as u32;
247 if start & 0x3F != 0 {
249 return Err(Error::UnalignedRegion(region.range.clone()));
250 }
251 let end = *(region.range.end()) as usize as u32;
252 if end & 0x3F != 0x3F {
253 return Err(Error::UnalignedRegion(region.range.clone()));
254 }
255 if region.mair > 7 {
256 return Err(Error::InvalidMair(region.mair));
257 }
258 register::Hprselr::write(register::Hprselr(idx as u32));
259 register::Hprbar::write({
260 let mut bar = register::Hprbar::new_with_raw_value(0);
261 bar.set_base(u26::from_u32(start >> 6));
262 bar.set_access_perms(region.access);
263 bar.set_nx(region.no_exec);
264 bar.set_shareability(region.shareability);
265 bar
266 });
267 register::Hprlar::write({
268 let mut lar = register::Hprlar::new_with_raw_value(0);
269 lar.set_limit(u26::from_u32(end >> 6));
270 lar.set_enabled(region.enable);
271 lar.set_mair(u3::from_u8(region.mair));
272 lar
273 });
274
275 Ok(())
276 }
277
278 pub fn set_regions(
285 &mut self,
286 regions_starting_idx: u8,
287 regions: &[El2Region],
288 ) -> Result<(), Error> {
289 if regions.len().saturating_add(regions_starting_idx as usize) > self.num_regions() as usize
290 {
291 return Err(Error::TooManyRegions);
292 }
293
294 for (idx, region) in regions.iter().enumerate() {
295 self.set_region(idx as u8 + regions_starting_idx, region)?;
296 }
297
298 Ok(())
299 }
300
301 pub fn set_attributes(&mut self, memattrs: &[MemAttr]) {
303 let mem_attr0 = memattrs.get(0).map(|m| m.to_bits()).unwrap_or(0) as u32;
304 let mem_attr1 = memattrs.get(1).map(|m| m.to_bits()).unwrap_or(0) as u32;
305 let mem_attr2 = memattrs.get(2).map(|m| m.to_bits()).unwrap_or(0) as u32;
306 let mem_attr3 = memattrs.get(3).map(|m| m.to_bits()).unwrap_or(0) as u32;
307 let hmair0 = mem_attr3 << 24 | mem_attr2 << 16 | mem_attr1 << 8 | mem_attr0;
308 unsafe {
309 register::Hmair0::write(register::Hmair0(hmair0));
310 }
311 let mem_attr0 = memattrs.get(4).map(|m| m.to_bits()).unwrap_or(0) as u32;
312 let mem_attr1 = memattrs.get(5).map(|m| m.to_bits()).unwrap_or(0) as u32;
313 let mem_attr2 = memattrs.get(6).map(|m| m.to_bits()).unwrap_or(0) as u32;
314 let mem_attr3 = memattrs.get(7).map(|m| m.to_bits()).unwrap_or(0) as u32;
315 let hmair1 = mem_attr3 << 24 | mem_attr2 << 16 | mem_attr1 << 8 | mem_attr0;
316 unsafe {
317 register::Hmair1::write(register::Hmair1(hmair1));
318 }
319 }
320
321 pub fn background_region_enable(&mut self, enable: bool) {
323 register::Hsctlr::modify(|r| {
324 r.set_br(enable);
325 });
326 }
327
328 pub fn configure(&mut self, config: &El2Config) -> Result<(), Error> {
332 self.set_regions(0, config.regions)?;
333
334 self.set_attributes(config.memory_attributes);
335
336 self.background_region_enable(config.background_config);
337
338 Ok(())
339 }
340
341 pub fn enable(&mut self) {
343 register::Hsctlr::modify(|r| {
344 r.set_m(true);
345 });
346 }
347
348 pub fn disable(&mut self) {
350 register::Hsctlr::modify(|r| {
351 r.set_m(false);
352 });
353 }
354}
355
356#[derive(Clone, Debug, PartialEq, Eq)]
358pub struct El1Config<'a> {
359 pub background_config: bool,
363 pub regions: &'a [El1Region],
367 pub memory_attributes: &'a [MemAttr],
371}
372
373#[derive(Clone, Debug, PartialEq, Eq)]
375pub struct El1Region {
376 pub range: core::ops::RangeInclusive<*mut u8>,
381 pub shareability: El1Shareability,
383 pub access: El1AccessPerms,
385 pub no_exec: bool,
387 pub mair: u8,
393 pub enable: bool,
395}
396
397unsafe impl Sync for El1Region {}
400
401#[derive(Clone, Debug, PartialEq, Eq)]
403pub struct El2Config<'a> {
404 pub background_config: bool,
408 pub regions: &'a [El2Region],
412 pub memory_attributes: &'a [MemAttr],
416}
417
418#[derive(Clone, Debug, PartialEq, Eq)]
420pub struct El2Region {
421 pub range: core::ops::RangeInclusive<*mut u8>,
426 pub shareability: El2Shareability,
428 pub access: El2AccessPerms,
430 pub no_exec: bool,
432 pub mair: u8,
438 pub enable: bool,
440}
441
442unsafe impl Sync for El2Region {}
445
446#[derive(Debug, Clone, PartialEq, Eq)]
448pub enum MemAttr {
449 StronglyOrdered,
451 DeviceMemory,
453 NormalMemory {
455 outer: Cacheable,
457 inner: Cacheable,
459 },
460}
461
462impl MemAttr {
463 const fn to_bits(&self) -> u8 {
465 match self {
466 MemAttr::StronglyOrdered => 0b0000_0000,
467 MemAttr::DeviceMemory => 0b0000_0100,
468 MemAttr::NormalMemory { outer, inner } => {
469 let outer_bits = outer.to_bits();
470 let inner_bits = inner.to_bits();
471 outer_bits << 4 | inner_bits
472 }
473 }
474 }
475}
476
477#[derive(Debug, Clone, PartialEq, Eq)]
479pub enum Cacheable {
480 WriteThroughTransient(RwAllocPolicy),
481 WriteBackTransient(RwAllocPolicy),
482 WriteThroughNonTransient(RwAllocPolicy),
483 WriteBackNonTransient(RwAllocPolicy),
484 NonCacheable,
485}
486
487impl Cacheable {
488 const fn to_bits(&self) -> u8 {
489 match self {
490 Cacheable::WriteThroughTransient(rw_alloc) => 0b0000 | (*rw_alloc as u8),
491 Cacheable::WriteBackTransient(rw_alloc) => 0b0100 | (*rw_alloc as u8),
492 Cacheable::WriteThroughNonTransient(rw_alloc) => 0b1000 | (*rw_alloc as u8),
493 Cacheable::WriteBackNonTransient(rw_alloc) => 0b1100 | (*rw_alloc as u8),
494 Cacheable::NonCacheable => 0b0100,
495 }
496 }
497}
498
499#[derive(Copy, Debug, Clone, PartialEq, Eq)]
501#[repr(u8)]
502pub enum RwAllocPolicy {
503 W = 0b01,
505 R = 0b10,
507 RW = 0b11,
509}
510
511#[cfg(test)]
512mod test {
513 use super::*;
514
515 #[test]
516 fn mem_attr_strong() {
517 let mem_attr = MemAttr::StronglyOrdered;
518 assert_eq!(mem_attr.to_bits(), 0b0000_0000);
519 }
520
521 #[test]
522 fn mem_attr_device() {
523 let mem_attr = MemAttr::DeviceMemory;
524 assert_eq!(mem_attr.to_bits(), 0b0000_0100);
525 }
526
527 #[test]
528 fn mem_attr_normal() {
529 let mem_attr = MemAttr::NormalMemory {
530 outer: Cacheable::NonCacheable,
531 inner: Cacheable::WriteBackNonTransient(RwAllocPolicy::W),
532 };
533 assert_eq!(
534 mem_attr.to_bits(),
535 0b0100_1101,
536 "0b{:08b}",
537 mem_attr.to_bits()
538 );
539 }
540}