1#![allow(unused)]
2
3use core::{
4 alloc::GlobalAlloc,
5 ops::Range,
6 ptr::{NonNull, null_mut, slice_from_raw_parts_mut},
7 sync::atomic::{AtomicUsize, Ordering},
8};
9
10use buddy_system_allocator::Heap;
11use log::debug;
12use page_table_generic::PagingError;
13use spin::{Mutex, Once};
14
15use crate::{
16 globals::global_val,
17 hal_al::mmu::MapConfig,
18 mem::{
19 mmu::{AccessSetting, BootMemoryKind, BootRegion, CacheSetting, LINER_OFFSET},
20 once::OnceStatic,
21 },
22 platform::{self, kstack_size},
23 println,
24};
25
26pub use crate::platform::page_size;
27
28mod addr;
29mod cache;
30pub mod mmu;
32pub mod once;
33pub mod region;
34pub use addr::*;
35
36#[cfg(target_os = "none")]
37#[global_allocator]
38static ALLOCATOR: KAllocator = KAllocator {
39 inner: Mutex::new(Heap::empty()),
40};
41
42static mut TMP_PAGE_ALLOC_ADDR: usize = 0;
43
44pub struct KAllocator {
45 pub(crate) inner: Mutex<Heap<32>>,
46}
47
48impl KAllocator {
49 pub fn reset(&self, memory: &mut [u8]) {
50 let mut g = self.inner.lock();
51
52 let mut h = Heap::empty();
53
54 unsafe { h.init(memory.as_mut_ptr() as usize, memory.len()) };
55
56 *g = h;
57 }
58
59 pub fn add_to_heap(&self, memory: &mut [u8]) {
60 let mut g = self.inner.lock();
61 let range = memory.as_mut_ptr_range();
62
63 unsafe { g.add_to_heap(range.start as usize, range.end as usize) };
64 }
65}
66
67unsafe impl GlobalAlloc for KAllocator {
68 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
69 if let Ok(p) = self.inner.lock().alloc(layout) {
70 p.as_ptr()
71 } else {
72 null_mut()
73 }
74 }
75
76 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
77 self.inner
78 .lock()
79 .dealloc(unsafe { NonNull::new_unchecked(ptr) }, layout);
80 }
81}
82
83pub(crate) fn init() {
84 let range = global_val().main_memory.clone();
85 mmu::init_with_tmp_table();
86
87 let mut start = VirtAddr::from(range.start.raw() + LINER_OFFSET);
88 let mut end = VirtAddr::from(range.end.raw() + LINER_OFFSET);
89
90 unsafe {
91 if TMP_PAGE_ALLOC_ADDR != 0 {
92 end = VirtAddr::from(TMP_PAGE_ALLOC_ADDR + LINER_OFFSET);
93 }
94 }
95
96 println!("heap add memory [{}, {})", start, end);
97 #[cfg(target_os = "none")]
98 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), end - start) });
99
100 println!("heap initialized");
101
102 mmu::init();
103
104 add_all_ram();
105}
106
107fn add_all_ram() {
108 let main = global_val().main_memory.clone();
109
110 for region in platform::boot_regions() {
111 if !matches!(region.kind, BootMemoryKind::Ram) {
112 continue;
113 }
114
115 if region.range.to_range().contains(&main.start) {
116 continue;
117 }
118
119 let start = VirtAddr::from(region.range.start.raw() + LINER_OFFSET);
120 let end = VirtAddr::from(region.range.end.raw() + LINER_OFFSET);
121 let len = end - start;
122
123 println!("Heap add memory [{}, {})", start, end);
124 #[cfg(target_os = "none")]
125 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), len) });
126 }
127}
128
129pub(crate) fn find_main_memory() -> Option<BootRegion> {
130 let mut ram_regions = heapless::Vec::<_, 32>::new();
131 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
132
133 for r in platform::boot_regions() {
135 if matches!(r.kind, BootMemoryKind::Ram) {
136 ram_regions.push(r).ok()?;
137 } else {
138 non_ram_regions.push(r).ok()?;
139 }
140 }
141
142 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
143
144 for ram in &ram_regions {
146 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
147 current_ranges.push(ram.range).ok()?;
148
149 for non_ram in &non_ram_regions {
151 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
152
153 for current_range in ¤t_ranges {
154 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
156 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
157
158 if overlap_start < overlap_end {
159 if current_range.start.raw() < overlap_start {
162 new_ranges
163 .push(PhysCRange {
164 start: current_range.start,
165 end: PhysAddr::new(overlap_start),
166 })
167 .ok()?;
168 }
169 if overlap_end < current_range.end.raw() {
171 new_ranges
172 .push(PhysCRange {
173 start: PhysAddr::new(overlap_end),
174 end: current_range.end,
175 })
176 .ok()?;
177 }
178 } else {
179 new_ranges.push(*current_range).ok()?;
181 }
182 }
183 current_ranges = new_ranges;
184 }
185
186 for range in current_ranges {
188 available_regions.push(range).ok()?;
189 }
190 }
191
192 const MIN_SIZE: usize = 16 * 1024 * 1024; let mut best_region: Option<PhysCRange> = None;
195
196 for region in &available_regions {
197 let size = region.end.raw() - region.start.raw();
198 if size >= MIN_SIZE {
199 match best_region {
200 None => best_region = Some(*region),
201 Some(current_best) => {
202 if region.start.raw() < current_best.start.raw() {
203 best_region = Some(*region);
204 }
205 }
206 }
207 }
208 }
209
210 if let Some(main_range) = best_region {
211 println!(
212 "Selected main memory: {:?}, size: {}MB",
213 main_range,
214 (main_range.end.raw() - main_range.start.raw()) / (1024 * 1024)
215 );
216
217 let first_ram = ram_regions.first()?;
219 Some(BootRegion {
220 range: main_range,
221 name: c"main memory".as_ptr() as _,
222 access: first_ram.access,
223 cache: first_ram.cache,
224 kind: BootMemoryKind::Ram,
225 })
226 } else {
227 println!("no suitable main memory region found (>= 16MB)");
228 None
229 }
230}
231
232pub fn map(config: &MapConfig) -> Result<(), PagingError> {
233 mmu::map(config)
234}
235
236pub fn iomap(paddr: PhysAddr, size: usize) -> NonNull<u8> {
237 let vaddr = VirtAddr::from(paddr.raw() + LINER_OFFSET);
238 match mmu::map(&MapConfig {
239 name: "iomap",
240 va_start: vaddr,
241 pa_start: paddr,
242 size,
243 access: AccessSetting::ReadWrite,
244 cache: CacheSetting::Device,
245 }) {
246 Ok(_) => {}
247 Err(e) => match e {
248 PagingError::AlreadyMapped => {}
249 _ => panic!("iomap failed: {:?}", e),
250 },
251 }
252
253 let ptr: *mut u8 = vaddr.into();
254 unsafe { NonNull::new_unchecked(ptr) }
255
256 }
267
268#[cfg(test)]
269mod tests {
270 use super::*;
271 use crate::hal_al::mmu::{AccessSetting, CacheSetting};
272 use core::ffi::CStr;
273
274 fn create_test_region(
276 start: usize,
277 end: usize,
278 name: &'static str,
279 kind: BootMemoryKind,
280 ) -> BootRegion {
281 BootRegion {
282 range: PhysCRange {
283 start: PhysAddr::new(start),
284 end: PhysAddr::new(end),
285 },
286 name: name.as_ptr(),
287 access: AccessSetting::Read | AccessSetting::Write,
288 cache: CacheSetting::Normal,
289 kind,
290 }
291 }
292
293 fn mock_boot_regions(regions: &[BootRegion]) -> impl Iterator<Item = BootRegion> + '_ {
295 regions.iter().copied()
296 }
297
298 #[test]
299 fn test_find_main_memory_simple_case() {
300 let regions = [
302 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), ];
304
305 let mut ram_regions = heapless::Vec::<_, 32>::new();
307 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
308
309 for r in mock_boot_regions(®ions) {
310 if matches!(r.kind, BootMemoryKind::Ram) {
311 ram_regions.push(r).unwrap();
312 } else {
313 non_ram_regions.push(r).unwrap();
314 }
315 }
316
317 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
318
319 for ram in &ram_regions {
320 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
321 current_ranges.push(ram.range).unwrap();
322
323 for non_ram in &non_ram_regions {
324 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
325
326 for current_range in ¤t_ranges {
327 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
328 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
329
330 if overlap_start < overlap_end {
331 if current_range.start.raw() < overlap_start {
332 new_ranges
333 .push(PhysCRange {
334 start: current_range.start,
335 end: PhysAddr::new(overlap_start),
336 })
337 .unwrap();
338 }
339 if overlap_end < current_range.end.raw() {
340 new_ranges
341 .push(PhysCRange {
342 start: PhysAddr::new(overlap_end),
343 end: current_range.end,
344 })
345 .unwrap();
346 }
347 } else {
348 new_ranges.push(*current_range).unwrap();
349 }
350 }
351 current_ranges = new_ranges;
352 }
353
354 for range in current_ranges {
355 available_regions.push(range).unwrap();
356 }
357 }
358
359 assert_eq!(available_regions.len(), 1);
361 assert_eq!(available_regions[0].start.raw(), 0x40000000);
362 assert_eq!(available_regions[0].end.raw(), 0x60000000);
363
364 const MIN_SIZE: usize = 16 * 1024 * 1024;
366 let size = available_regions[0].end.raw() - available_regions[0].start.raw();
367 assert!(size >= MIN_SIZE);
368 }
369
370 #[test]
371 fn test_find_main_memory_with_overlap() {
372 let regions = [
374 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), create_test_region(0x45000000, 0x46000000, "reserved", BootMemoryKind::Reserved), ];
377
378 let mut ram_regions = heapless::Vec::<_, 32>::new();
379 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
380
381 for r in mock_boot_regions(®ions) {
382 if matches!(r.kind, BootMemoryKind::Ram) {
383 ram_regions.push(r).unwrap();
384 } else {
385 non_ram_regions.push(r).unwrap();
386 }
387 }
388
389 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
390
391 for ram in &ram_regions {
392 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
393 current_ranges.push(ram.range).unwrap();
394
395 for non_ram in &non_ram_regions {
396 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
397
398 for current_range in ¤t_ranges {
399 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
400 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
401
402 if overlap_start < overlap_end {
403 if current_range.start.raw() < overlap_start {
404 new_ranges
405 .push(PhysCRange {
406 start: current_range.start,
407 end: PhysAddr::new(overlap_start),
408 })
409 .unwrap();
410 }
411 if overlap_end < current_range.end.raw() {
412 new_ranges
413 .push(PhysCRange {
414 start: PhysAddr::new(overlap_end),
415 end: current_range.end,
416 })
417 .unwrap();
418 }
419 } else {
420 new_ranges.push(*current_range).unwrap();
421 }
422 }
423 current_ranges = new_ranges;
424 }
425
426 for range in current_ranges {
427 available_regions.push(range).unwrap();
428 }
429 }
430
431 assert_eq!(available_regions.len(), 2);
433
434 let region1 = available_regions
436 .iter()
437 .find(|r| r.start.raw() == 0x40000000)
438 .unwrap();
439 assert_eq!(region1.end.raw(), 0x45000000);
440
441 let region2 = available_regions
443 .iter()
444 .find(|r| r.start.raw() == 0x46000000)
445 .unwrap();
446 assert_eq!(region2.end.raw(), 0x60000000);
447
448 const MIN_SIZE: usize = 16 * 1024 * 1024;
450 for region in &available_regions {
451 let size = region.end.raw() - region.start.raw();
452 assert!(size >= MIN_SIZE);
453 }
454 }
455
456 #[test]
457 fn test_find_main_memory_multiple_overlaps() {
458 let regions = [
460 create_test_region(0x40000000, 0x80000000, "ram", BootMemoryKind::Ram), create_test_region(
462 0x45000000,
463 0x46000000,
464 "reserved1",
465 BootMemoryKind::Reserved,
466 ), create_test_region(
468 0x50000000,
469 0x52000000,
470 "reserved2",
471 BootMemoryKind::Reserved,
472 ), create_test_region(0x70000000, 0x71000000, "kimage", BootMemoryKind::KImage), ];
475
476 let mut ram_regions = heapless::Vec::<_, 32>::new();
477 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
478
479 for r in mock_boot_regions(®ions) {
480 if matches!(r.kind, BootMemoryKind::Ram) {
481 ram_regions.push(r).unwrap();
482 } else {
483 non_ram_regions.push(r).unwrap();
484 }
485 }
486
487 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
488
489 for ram in &ram_regions {
490 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
491 current_ranges.push(ram.range).unwrap();
492
493 for non_ram in &non_ram_regions {
494 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
495
496 for current_range in ¤t_ranges {
497 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
498 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
499
500 if overlap_start < overlap_end {
501 if current_range.start.raw() < overlap_start {
502 new_ranges
503 .push(PhysCRange {
504 start: current_range.start,
505 end: PhysAddr::new(overlap_start),
506 })
507 .unwrap();
508 }
509 if overlap_end < current_range.end.raw() {
510 new_ranges
511 .push(PhysCRange {
512 start: PhysAddr::new(overlap_end),
513 end: current_range.end,
514 })
515 .unwrap();
516 }
517 } else {
518 new_ranges.push(*current_range).unwrap();
519 }
520 }
521 current_ranges = new_ranges;
522 }
523
524 for range in current_ranges {
525 available_regions.push(range).unwrap();
526 }
527 }
528
529 assert_eq!(available_regions.len(), 4);
531
532 let expected_regions = [
534 (0x40000000, 0x45000000), (0x46000000, 0x50000000), (0x52000000, 0x70000000), (0x71000000, 0x80000000), ];
539
540 for (start, end) in expected_regions {
541 let region = available_regions
542 .iter()
543 .find(|r| r.start.raw() == start)
544 .unwrap();
545 assert_eq!(region.end.raw(), end);
546 }
547 }
548
549 #[test]
550 fn test_find_main_memory_select_lowest_address() {
551 let regions = [
553 create_test_region(0x80000000, 0x90000000, "ram1", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "ram2", BootMemoryKind::Ram), ];
556
557 let mut ram_regions = heapless::Vec::<_, 32>::new();
558 let mut non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
559
560 for r in mock_boot_regions(®ions) {
561 if matches!(r.kind, BootMemoryKind::Ram) {
562 ram_regions.push(r).unwrap();
563 }
564 }
565
566 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
567
568 for ram in &ram_regions {
569 available_regions.push(ram.range).unwrap();
570 }
571
572 const MIN_SIZE: usize = 16 * 1024 * 1024;
574 let mut best_region: Option<PhysCRange> = None;
575
576 for region in &available_regions {
577 let size = region.end.raw() - region.start.raw();
578 if size >= MIN_SIZE {
579 match best_region {
580 None => best_region = Some(*region),
581 Some(current_best) => {
582 if region.start.raw() < current_best.start.raw() {
583 best_region = Some(*region);
584 }
585 }
586 }
587 }
588 }
589
590 assert!(best_region.is_some());
592 let selected = best_region.unwrap();
593 assert_eq!(selected.start.raw(), 0x40000000);
594 assert_eq!(selected.end.raw(), 0x50000000);
595 }
596
597 #[test]
598 fn test_find_main_memory_no_suitable_region() {
599 let regions = [
601 create_test_region(0x40000000, 0x40800000, "ram1", BootMemoryKind::Ram), create_test_region(0x50000000, 0x50400000, "ram2", BootMemoryKind::Ram), ];
604
605 let mut ram_regions = heapless::Vec::<_, 32>::new();
606 let non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
607
608 for r in mock_boot_regions(®ions) {
609 if matches!(r.kind, BootMemoryKind::Ram) {
610 ram_regions.push(r).unwrap();
611 }
612 }
613
614 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
615
616 for ram in &ram_regions {
617 available_regions.push(ram.range).unwrap();
618 }
619
620 const MIN_SIZE: usize = 16 * 1024 * 1024;
622 let mut best_region: Option<PhysCRange> = None;
623
624 for region in &available_regions {
625 let size = region.end.raw() - region.start.raw();
626 if size >= MIN_SIZE {
627 match best_region {
628 None => best_region = Some(*region),
629 Some(current_best) => {
630 if region.start.raw() < current_best.start.raw() {
631 best_region = Some(*region);
632 }
633 }
634 }
635 }
636 }
637
638 assert!(best_region.is_none());
640 }
641
642 #[test]
643 fn test_find_main_memory_edge_case_exact_overlap() {
644 let regions = [
646 create_test_region(0x40000000, 0x50000000, "ram", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "reserved", BootMemoryKind::Reserved), ];
649
650 let mut ram_regions = heapless::Vec::<_, 32>::new();
651 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
652
653 for r in mock_boot_regions(®ions) {
654 if matches!(r.kind, BootMemoryKind::Ram) {
655 ram_regions.push(r).unwrap();
656 } else {
657 non_ram_regions.push(r).unwrap();
658 }
659 }
660
661 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
662
663 for ram in &ram_regions {
664 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
665 current_ranges.push(ram.range).unwrap();
666
667 for non_ram in &non_ram_regions {
668 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
669
670 for current_range in ¤t_ranges {
671 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
672 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
673
674 if overlap_start < overlap_end {
675 if current_range.start.raw() < overlap_start {
676 new_ranges
677 .push(PhysCRange {
678 start: current_range.start,
679 end: PhysAddr::new(overlap_start),
680 })
681 .unwrap();
682 }
683 if overlap_end < current_range.end.raw() {
684 new_ranges
685 .push(PhysCRange {
686 start: PhysAddr::new(overlap_end),
687 end: current_range.end,
688 })
689 .unwrap();
690 }
691 } else {
692 new_ranges.push(*current_range).unwrap();
693 }
694 }
695 current_ranges = new_ranges;
696 }
697
698 for range in current_ranges {
699 available_regions.push(range).unwrap();
700 }
701 }
702
703 assert_eq!(available_regions.len(), 0);
705 }
706}