1#![allow(unused)]
2
3use core::{
4 alloc::GlobalAlloc,
5 ops::Range,
6 ptr::{NonNull, null_mut, slice_from_raw_parts_mut},
7 sync::atomic::{AtomicUsize, Ordering},
8};
9
10use buddy_system_allocator::Heap;
11use log::debug;
12use page_table_generic::PagingError;
13use spin::{Mutex, Once};
14
15use crate::{
16 globals::global_val,
17 hal_al::mmu::MapConfig,
18 irq::NoIrqGuard,
19 mem::{
20 mmu::{AccessSetting, BootMemoryKind, BootRegion, CacheSetting, LINER_OFFSET},
21 once::OnceStatic,
22 },
23 platform::{self, kstack_size},
24 println,
25};
26
27pub use crate::platform::page_size;
28
29mod addr;
30mod cache;
31pub mod mmu;
33pub mod once;
34pub mod region;
35pub use addr::*;
36
37#[cfg(target_os = "none")]
38#[global_allocator]
39static ALLOCATOR: KAllocator = KAllocator {
40 inner: Mutex::new(Heap::empty()),
41};
42
43static mut TMP_PAGE_ALLOC_ADDR: usize = 0;
44
45pub struct KAllocator {
46 pub(crate) inner: Mutex<Heap<32>>,
47}
48
49impl KAllocator {
50 pub fn reset(&self, memory: &mut [u8]) {
51 let mut g = self.inner.lock();
52
53 let mut h = Heap::empty();
54
55 unsafe { h.init(memory.as_mut_ptr() as usize, memory.len()) };
56
57 *g = h;
58 }
59
60 pub fn add_to_heap(&self, memory: &mut [u8]) {
61 let mut g = self.inner.lock();
62 let range = memory.as_mut_ptr_range();
63
64 unsafe { g.add_to_heap(range.start as usize, range.end as usize) };
65 }
66}
67
68unsafe impl GlobalAlloc for KAllocator {
69 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
70 let g = NoIrqGuard::new();
71 if let Ok(p) = self.inner.lock().alloc(layout) {
72 drop(g);
73 p.as_ptr()
74 } else {
75 drop(g);
76 null_mut()
77 }
78 }
79
80 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
81 let g = NoIrqGuard::new();
82 self.inner
83 .lock()
84 .dealloc(unsafe { NonNull::new_unchecked(ptr) }, layout);
85 drop(g);
86 }
87}
88
89pub(crate) fn init() {
90 let range = global_val().main_memory.clone();
91 mmu::init_with_tmp_table();
92
93 let mut start = VirtAddr::from(range.start.raw() + LINER_OFFSET);
94 let mut end = VirtAddr::from(range.end.raw() + LINER_OFFSET);
95
96 unsafe {
97 if TMP_PAGE_ALLOC_ADDR != 0 {
98 end = VirtAddr::from(TMP_PAGE_ALLOC_ADDR + LINER_OFFSET);
99 }
100 }
101
102 println!("heap add memory [{}, {})", start, end);
103 #[cfg(target_os = "none")]
104 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), end - start) });
105
106 println!("heap initialized");
107
108 mmu::init();
109
110 add_all_ram();
111
112 cache::init();
113}
114
115fn add_all_ram() {
116 let main = global_val().main_memory.clone();
117
118 for region in platform::boot_regions() {
119 if !matches!(region.kind, BootMemoryKind::Ram) {
120 continue;
121 }
122
123 if region.range.to_range().contains(&main.start) {
124 continue;
125 }
126
127 let start = VirtAddr::from(region.range.start.raw() + LINER_OFFSET);
128 let end = VirtAddr::from(region.range.end.raw() + LINER_OFFSET);
129 let len = end - start;
130
131 println!("Heap add memory [{}, {})", start, end);
132 #[cfg(target_os = "none")]
133 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), len) });
134 }
135}
136
137pub(crate) fn find_main_memory() -> Option<BootRegion> {
138 let mut ram_regions = heapless::Vec::<_, 32>::new();
139 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
140
141 for r in platform::boot_regions() {
143 if matches!(r.kind, BootMemoryKind::Ram) {
144 ram_regions.push(r).ok()?;
145 } else {
146 non_ram_regions.push(r).ok()?;
147 }
148 }
149
150 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
151
152 for ram in &ram_regions {
154 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
155 current_ranges.push(ram.range).ok()?;
156
157 for non_ram in &non_ram_regions {
159 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
160
161 for current_range in ¤t_ranges {
162 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
164 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
165
166 if overlap_start < overlap_end {
167 if current_range.start.raw() < overlap_start {
170 new_ranges
171 .push(PhysCRange {
172 start: current_range.start,
173 end: PhysAddr::new(overlap_start),
174 })
175 .ok()?;
176 }
177 if overlap_end < current_range.end.raw() {
179 new_ranges
180 .push(PhysCRange {
181 start: PhysAddr::new(overlap_end),
182 end: current_range.end,
183 })
184 .ok()?;
185 }
186 } else {
187 new_ranges.push(*current_range).ok()?;
189 }
190 }
191 current_ranges = new_ranges;
192 }
193
194 for range in current_ranges {
196 available_regions.push(range).ok()?;
197 }
198 }
199
200 const MIN_SIZE: usize = 16 * 1024 * 1024; let mut best_region: Option<PhysCRange> = None;
203
204 for region in &available_regions {
205 let size = region.end.raw() - region.start.raw();
206 if size >= MIN_SIZE {
207 match best_region {
208 None => best_region = Some(*region),
209 Some(current_best) => {
210 if region.start.raw() < current_best.start.raw() {
211 best_region = Some(*region);
212 }
213 }
214 }
215 }
216 }
217
218 if let Some(main_range) = best_region {
219 println!(
220 "Selected main memory: {:?}, size: {}MB",
221 main_range,
222 (main_range.end.raw() - main_range.start.raw()) / (1024 * 1024)
223 );
224
225 let first_ram = ram_regions.first()?;
227 Some(BootRegion {
228 range: main_range,
229 name: c"main memory".as_ptr() as _,
230 access: first_ram.access,
231 cache: first_ram.cache,
232 kind: BootMemoryKind::Ram,
233 })
234 } else {
235 println!("no suitable main memory region found (>= 16MB)");
236 None
237 }
238}
239
240pub fn map(config: &MapConfig) -> Result<(), PagingError> {
241 mmu::map(config)
242}
243
244pub fn iomap(paddr: PhysAddr, size: usize) -> NonNull<u8> {
245 let vaddr = VirtAddr::from(paddr.raw() + LINER_OFFSET);
246 match mmu::map(&MapConfig {
247 name: "iomap",
248 va_start: vaddr,
249 pa_start: paddr,
250 size,
251 access: AccessSetting::ReadWrite,
252 cache: CacheSetting::Device,
253 }) {
254 Ok(_) => {}
255 Err(e) => match e {
256 PagingError::AlreadyMapped => {}
257 _ => panic!("iomap failed: {:?}", e),
258 },
259 }
260
261 let ptr: *mut u8 = vaddr.into();
262 unsafe { NonNull::new_unchecked(ptr) }
263
264 }
275
276#[cfg(test)]
277mod tests {
278 use super::*;
279 use crate::hal_al::mmu::{AccessSetting, CacheSetting};
280 use core::ffi::CStr;
281
282 fn create_test_region(
284 start: usize,
285 end: usize,
286 name: &'static str,
287 kind: BootMemoryKind,
288 ) -> BootRegion {
289 BootRegion {
290 range: PhysCRange {
291 start: PhysAddr::new(start),
292 end: PhysAddr::new(end),
293 },
294 name: name.as_ptr(),
295 access: AccessSetting::Read | AccessSetting::Write,
296 cache: CacheSetting::Normal,
297 kind,
298 }
299 }
300
301 fn mock_boot_regions(regions: &[BootRegion]) -> impl Iterator<Item = BootRegion> + '_ {
303 regions.iter().copied()
304 }
305
306 #[test]
307 fn test_find_main_memory_simple_case() {
308 let regions = [
310 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), ];
312
313 let mut ram_regions = heapless::Vec::<_, 32>::new();
315 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
316
317 for r in mock_boot_regions(®ions) {
318 if matches!(r.kind, BootMemoryKind::Ram) {
319 ram_regions.push(r).unwrap();
320 } else {
321 non_ram_regions.push(r).unwrap();
322 }
323 }
324
325 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
326
327 for ram in &ram_regions {
328 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
329 current_ranges.push(ram.range).unwrap();
330
331 for non_ram in &non_ram_regions {
332 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
333
334 for current_range in ¤t_ranges {
335 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
336 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
337
338 if overlap_start < overlap_end {
339 if current_range.start.raw() < overlap_start {
340 new_ranges
341 .push(PhysCRange {
342 start: current_range.start,
343 end: PhysAddr::new(overlap_start),
344 })
345 .unwrap();
346 }
347 if overlap_end < current_range.end.raw() {
348 new_ranges
349 .push(PhysCRange {
350 start: PhysAddr::new(overlap_end),
351 end: current_range.end,
352 })
353 .unwrap();
354 }
355 } else {
356 new_ranges.push(*current_range).unwrap();
357 }
358 }
359 current_ranges = new_ranges;
360 }
361
362 for range in current_ranges {
363 available_regions.push(range).unwrap();
364 }
365 }
366
367 assert_eq!(available_regions.len(), 1);
369 assert_eq!(available_regions[0].start.raw(), 0x40000000);
370 assert_eq!(available_regions[0].end.raw(), 0x60000000);
371
372 const MIN_SIZE: usize = 16 * 1024 * 1024;
374 let size = available_regions[0].end.raw() - available_regions[0].start.raw();
375 assert!(size >= MIN_SIZE);
376 }
377
378 #[test]
379 fn test_find_main_memory_with_overlap() {
380 let regions = [
382 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), create_test_region(0x45000000, 0x46000000, "reserved", BootMemoryKind::Reserved), ];
385
386 let mut ram_regions = heapless::Vec::<_, 32>::new();
387 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
388
389 for r in mock_boot_regions(®ions) {
390 if matches!(r.kind, BootMemoryKind::Ram) {
391 ram_regions.push(r).unwrap();
392 } else {
393 non_ram_regions.push(r).unwrap();
394 }
395 }
396
397 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
398
399 for ram in &ram_regions {
400 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
401 current_ranges.push(ram.range).unwrap();
402
403 for non_ram in &non_ram_regions {
404 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
405
406 for current_range in ¤t_ranges {
407 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
408 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
409
410 if overlap_start < overlap_end {
411 if current_range.start.raw() < overlap_start {
412 new_ranges
413 .push(PhysCRange {
414 start: current_range.start,
415 end: PhysAddr::new(overlap_start),
416 })
417 .unwrap();
418 }
419 if overlap_end < current_range.end.raw() {
420 new_ranges
421 .push(PhysCRange {
422 start: PhysAddr::new(overlap_end),
423 end: current_range.end,
424 })
425 .unwrap();
426 }
427 } else {
428 new_ranges.push(*current_range).unwrap();
429 }
430 }
431 current_ranges = new_ranges;
432 }
433
434 for range in current_ranges {
435 available_regions.push(range).unwrap();
436 }
437 }
438
439 assert_eq!(available_regions.len(), 2);
441
442 let region1 = available_regions
444 .iter()
445 .find(|r| r.start.raw() == 0x40000000)
446 .unwrap();
447 assert_eq!(region1.end.raw(), 0x45000000);
448
449 let region2 = available_regions
451 .iter()
452 .find(|r| r.start.raw() == 0x46000000)
453 .unwrap();
454 assert_eq!(region2.end.raw(), 0x60000000);
455
456 const MIN_SIZE: usize = 16 * 1024 * 1024;
458 for region in &available_regions {
459 let size = region.end.raw() - region.start.raw();
460 assert!(size >= MIN_SIZE);
461 }
462 }
463
464 #[test]
465 fn test_find_main_memory_multiple_overlaps() {
466 let regions = [
468 create_test_region(0x40000000, 0x80000000, "ram", BootMemoryKind::Ram), create_test_region(
470 0x45000000,
471 0x46000000,
472 "reserved1",
473 BootMemoryKind::Reserved,
474 ), create_test_region(
476 0x50000000,
477 0x52000000,
478 "reserved2",
479 BootMemoryKind::Reserved,
480 ), create_test_region(0x70000000, 0x71000000, "kimage", BootMemoryKind::KImage), ];
483
484 let mut ram_regions = heapless::Vec::<_, 32>::new();
485 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
486
487 for r in mock_boot_regions(®ions) {
488 if matches!(r.kind, BootMemoryKind::Ram) {
489 ram_regions.push(r).unwrap();
490 } else {
491 non_ram_regions.push(r).unwrap();
492 }
493 }
494
495 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
496
497 for ram in &ram_regions {
498 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
499 current_ranges.push(ram.range).unwrap();
500
501 for non_ram in &non_ram_regions {
502 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
503
504 for current_range in ¤t_ranges {
505 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
506 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
507
508 if overlap_start < overlap_end {
509 if current_range.start.raw() < overlap_start {
510 new_ranges
511 .push(PhysCRange {
512 start: current_range.start,
513 end: PhysAddr::new(overlap_start),
514 })
515 .unwrap();
516 }
517 if overlap_end < current_range.end.raw() {
518 new_ranges
519 .push(PhysCRange {
520 start: PhysAddr::new(overlap_end),
521 end: current_range.end,
522 })
523 .unwrap();
524 }
525 } else {
526 new_ranges.push(*current_range).unwrap();
527 }
528 }
529 current_ranges = new_ranges;
530 }
531
532 for range in current_ranges {
533 available_regions.push(range).unwrap();
534 }
535 }
536
537 assert_eq!(available_regions.len(), 4);
539
540 let expected_regions = [
542 (0x40000000, 0x45000000), (0x46000000, 0x50000000), (0x52000000, 0x70000000), (0x71000000, 0x80000000), ];
547
548 for (start, end) in expected_regions {
549 let region = available_regions
550 .iter()
551 .find(|r| r.start.raw() == start)
552 .unwrap();
553 assert_eq!(region.end.raw(), end);
554 }
555 }
556
557 #[test]
558 fn test_find_main_memory_select_lowest_address() {
559 let regions = [
561 create_test_region(0x80000000, 0x90000000, "ram1", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "ram2", BootMemoryKind::Ram), ];
564
565 let mut ram_regions = heapless::Vec::<_, 32>::new();
566 let mut non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
567
568 for r in mock_boot_regions(®ions) {
569 if matches!(r.kind, BootMemoryKind::Ram) {
570 ram_regions.push(r).unwrap();
571 }
572 }
573
574 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
575
576 for ram in &ram_regions {
577 available_regions.push(ram.range).unwrap();
578 }
579
580 const MIN_SIZE: usize = 16 * 1024 * 1024;
582 let mut best_region: Option<PhysCRange> = None;
583
584 for region in &available_regions {
585 let size = region.end.raw() - region.start.raw();
586 if size >= MIN_SIZE {
587 match best_region {
588 None => best_region = Some(*region),
589 Some(current_best) => {
590 if region.start.raw() < current_best.start.raw() {
591 best_region = Some(*region);
592 }
593 }
594 }
595 }
596 }
597
598 assert!(best_region.is_some());
600 let selected = best_region.unwrap();
601 assert_eq!(selected.start.raw(), 0x40000000);
602 assert_eq!(selected.end.raw(), 0x50000000);
603 }
604
605 #[test]
606 fn test_find_main_memory_no_suitable_region() {
607 let regions = [
609 create_test_region(0x40000000, 0x40800000, "ram1", BootMemoryKind::Ram), create_test_region(0x50000000, 0x50400000, "ram2", BootMemoryKind::Ram), ];
612
613 let mut ram_regions = heapless::Vec::<_, 32>::new();
614 let non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
615
616 for r in mock_boot_regions(®ions) {
617 if matches!(r.kind, BootMemoryKind::Ram) {
618 ram_regions.push(r).unwrap();
619 }
620 }
621
622 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
623
624 for ram in &ram_regions {
625 available_regions.push(ram.range).unwrap();
626 }
627
628 const MIN_SIZE: usize = 16 * 1024 * 1024;
630 let mut best_region: Option<PhysCRange> = None;
631
632 for region in &available_regions {
633 let size = region.end.raw() - region.start.raw();
634 if size >= MIN_SIZE {
635 match best_region {
636 None => best_region = Some(*region),
637 Some(current_best) => {
638 if region.start.raw() < current_best.start.raw() {
639 best_region = Some(*region);
640 }
641 }
642 }
643 }
644 }
645
646 assert!(best_region.is_none());
648 }
649
650 #[test]
651 fn test_find_main_memory_edge_case_exact_overlap() {
652 let regions = [
654 create_test_region(0x40000000, 0x50000000, "ram", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "reserved", BootMemoryKind::Reserved), ];
657
658 let mut ram_regions = heapless::Vec::<_, 32>::new();
659 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
660
661 for r in mock_boot_regions(®ions) {
662 if matches!(r.kind, BootMemoryKind::Ram) {
663 ram_regions.push(r).unwrap();
664 } else {
665 non_ram_regions.push(r).unwrap();
666 }
667 }
668
669 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
670
671 for ram in &ram_regions {
672 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
673 current_ranges.push(ram.range).unwrap();
674
675 for non_ram in &non_ram_regions {
676 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
677
678 for current_range in ¤t_ranges {
679 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
680 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
681
682 if overlap_start < overlap_end {
683 if current_range.start.raw() < overlap_start {
684 new_ranges
685 .push(PhysCRange {
686 start: current_range.start,
687 end: PhysAddr::new(overlap_start),
688 })
689 .unwrap();
690 }
691 if overlap_end < current_range.end.raw() {
692 new_ranges
693 .push(PhysCRange {
694 start: PhysAddr::new(overlap_end),
695 end: current_range.end,
696 })
697 .unwrap();
698 }
699 } else {
700 new_ranges.push(*current_range).unwrap();
701 }
702 }
703 current_ranges = new_ranges;
704 }
705
706 for range in current_ranges {
707 available_regions.push(range).unwrap();
708 }
709 }
710
711 assert_eq!(available_regions.len(), 0);
713 }
714}