1#![allow(unused)]
2
3use core::{
4 alloc::GlobalAlloc,
5 ops::Range,
6 ptr::{NonNull, null_mut, slice_from_raw_parts_mut},
7 sync::atomic::{AtomicUsize, Ordering},
8};
9
10use buddy_system_allocator::Heap;
11use log::debug;
12use page_table_generic::PagingError;
13use spin::{Mutex, Once};
14
15use crate::{
16 globals::global_val,
17 hal_al::mmu::MapConfig,
18 irq::NoIrqGuard,
19 mem::{
20 mmu::{AccessSetting, BootMemoryKind, BootRegion, CacheSetting, LINER_OFFSET},
21 once::OnceStatic,
22 },
23 platform::{self, kstack_size},
24 println,
25};
26
27pub use crate::platform::page_size;
28
29mod addr;
30mod cache;
31pub mod mmu;
33pub mod once;
34pub mod region;
35pub use addr::*;
36
37#[cfg(target_os = "none")]
38#[global_allocator]
39static ALLOCATOR: KAllocator = KAllocator {
40 inner: Mutex::new(Heap::empty()),
41};
42
43static mut TMP_PAGE_ALLOC_ADDR: usize = 0;
44
45pub struct KAllocator {
46 pub(crate) inner: Mutex<Heap<32>>,
47}
48
49impl KAllocator {
50 pub fn reset(&self, memory: &mut [u8]) {
51 let mut g = self.inner.lock();
52
53 let mut h = Heap::empty();
54
55 unsafe { h.init(memory.as_mut_ptr() as usize, memory.len()) };
56
57 *g = h;
58 }
59
60 pub fn add_to_heap(&self, memory: &mut [u8]) {
61 let mut g = self.inner.lock();
62 let range = memory.as_mut_ptr_range();
63
64 unsafe { g.add_to_heap(range.start as usize, range.end as usize) };
65 }
66}
67
68unsafe impl GlobalAlloc for KAllocator {
69 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
70 let g = NoIrqGuard::new();
71 if let Ok(p) = self.inner.lock().alloc(layout) {
72 drop(g);
73 p.as_ptr()
74 } else {
75 drop(g);
76 null_mut()
77 }
78 }
79
80 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
81 let g = NoIrqGuard::new();
82 self.inner
83 .lock()
84 .dealloc(unsafe { NonNull::new_unchecked(ptr) }, layout);
85 drop(g);
86 }
87}
88
89pub(crate) fn init() {
90 let range = global_val().main_memory.clone();
91 mmu::init_with_tmp_table();
92
93 let mut start = VirtAddr::from(range.start.raw() + LINER_OFFSET);
94 let mut end = VirtAddr::from(range.end.raw() + LINER_OFFSET);
95
96 unsafe {
97 if TMP_PAGE_ALLOC_ADDR != 0 {
98 end = VirtAddr::from(TMP_PAGE_ALLOC_ADDR + LINER_OFFSET);
99 }
100 }
101
102 println!("heap add memory [{}, {})", start, end);
103 #[cfg(target_os = "none")]
104 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), end - start) });
105
106 println!("heap initialized");
107
108 mmu::init();
109
110 add_all_ram();
111}
112
113fn add_all_ram() {
114 let main = global_val().main_memory.clone();
115
116 for region in platform::boot_regions() {
117 if !matches!(region.kind, BootMemoryKind::Ram) {
118 continue;
119 }
120
121 if region.range.to_range().contains(&main.start) {
122 continue;
123 }
124
125 let start = VirtAddr::from(region.range.start.raw() + LINER_OFFSET);
126 let end = VirtAddr::from(region.range.end.raw() + LINER_OFFSET);
127 let len = end - start;
128
129 println!("Heap add memory [{}, {})", start, end);
130 #[cfg(target_os = "none")]
131 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), len) });
132 }
133}
134
135pub(crate) fn find_main_memory() -> Option<BootRegion> {
136 let mut ram_regions = heapless::Vec::<_, 32>::new();
137 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
138
139 for r in platform::boot_regions() {
141 if matches!(r.kind, BootMemoryKind::Ram) {
142 ram_regions.push(r).ok()?;
143 } else {
144 non_ram_regions.push(r).ok()?;
145 }
146 }
147
148 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
149
150 for ram in &ram_regions {
152 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
153 current_ranges.push(ram.range).ok()?;
154
155 for non_ram in &non_ram_regions {
157 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
158
159 for current_range in ¤t_ranges {
160 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
162 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
163
164 if overlap_start < overlap_end {
165 if current_range.start.raw() < overlap_start {
168 new_ranges
169 .push(PhysCRange {
170 start: current_range.start,
171 end: PhysAddr::new(overlap_start),
172 })
173 .ok()?;
174 }
175 if overlap_end < current_range.end.raw() {
177 new_ranges
178 .push(PhysCRange {
179 start: PhysAddr::new(overlap_end),
180 end: current_range.end,
181 })
182 .ok()?;
183 }
184 } else {
185 new_ranges.push(*current_range).ok()?;
187 }
188 }
189 current_ranges = new_ranges;
190 }
191
192 for range in current_ranges {
194 available_regions.push(range).ok()?;
195 }
196 }
197
198 const MIN_SIZE: usize = 16 * 1024 * 1024; let mut best_region: Option<PhysCRange> = None;
201
202 for region in &available_regions {
203 let size = region.end.raw() - region.start.raw();
204 if size >= MIN_SIZE {
205 match best_region {
206 None => best_region = Some(*region),
207 Some(current_best) => {
208 if region.start.raw() < current_best.start.raw() {
209 best_region = Some(*region);
210 }
211 }
212 }
213 }
214 }
215
216 if let Some(main_range) = best_region {
217 println!(
218 "Selected main memory: {:?}, size: {}MB",
219 main_range,
220 (main_range.end.raw() - main_range.start.raw()) / (1024 * 1024)
221 );
222
223 let first_ram = ram_regions.first()?;
225 Some(BootRegion {
226 range: main_range,
227 name: c"main memory".as_ptr() as _,
228 access: first_ram.access,
229 cache: first_ram.cache,
230 kind: BootMemoryKind::Ram,
231 })
232 } else {
233 println!("no suitable main memory region found (>= 16MB)");
234 None
235 }
236}
237
238pub fn map(config: &MapConfig) -> Result<(), PagingError> {
239 mmu::map(config)
240}
241
242pub fn iomap(paddr: PhysAddr, size: usize) -> NonNull<u8> {
243 let vaddr = VirtAddr::from(paddr.raw() + LINER_OFFSET);
244 match mmu::map(&MapConfig {
245 name: "iomap",
246 va_start: vaddr,
247 pa_start: paddr,
248 size,
249 access: AccessSetting::ReadWrite,
250 cache: CacheSetting::Device,
251 }) {
252 Ok(_) => {}
253 Err(e) => match e {
254 PagingError::AlreadyMapped => {}
255 _ => panic!("iomap failed: {:?}", e),
256 },
257 }
258
259 let ptr: *mut u8 = vaddr.into();
260 unsafe { NonNull::new_unchecked(ptr) }
261
262 }
273
274#[cfg(test)]
275mod tests {
276 use super::*;
277 use crate::hal_al::mmu::{AccessSetting, CacheSetting};
278 use core::ffi::CStr;
279
280 fn create_test_region(
282 start: usize,
283 end: usize,
284 name: &'static str,
285 kind: BootMemoryKind,
286 ) -> BootRegion {
287 BootRegion {
288 range: PhysCRange {
289 start: PhysAddr::new(start),
290 end: PhysAddr::new(end),
291 },
292 name: name.as_ptr(),
293 access: AccessSetting::Read | AccessSetting::Write,
294 cache: CacheSetting::Normal,
295 kind,
296 }
297 }
298
299 fn mock_boot_regions(regions: &[BootRegion]) -> impl Iterator<Item = BootRegion> + '_ {
301 regions.iter().copied()
302 }
303
304 #[test]
305 fn test_find_main_memory_simple_case() {
306 let regions = [
308 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), ];
310
311 let mut ram_regions = heapless::Vec::<_, 32>::new();
313 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
314
315 for r in mock_boot_regions(®ions) {
316 if matches!(r.kind, BootMemoryKind::Ram) {
317 ram_regions.push(r).unwrap();
318 } else {
319 non_ram_regions.push(r).unwrap();
320 }
321 }
322
323 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
324
325 for ram in &ram_regions {
326 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
327 current_ranges.push(ram.range).unwrap();
328
329 for non_ram in &non_ram_regions {
330 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
331
332 for current_range in ¤t_ranges {
333 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
334 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
335
336 if overlap_start < overlap_end {
337 if current_range.start.raw() < overlap_start {
338 new_ranges
339 .push(PhysCRange {
340 start: current_range.start,
341 end: PhysAddr::new(overlap_start),
342 })
343 .unwrap();
344 }
345 if overlap_end < current_range.end.raw() {
346 new_ranges
347 .push(PhysCRange {
348 start: PhysAddr::new(overlap_end),
349 end: current_range.end,
350 })
351 .unwrap();
352 }
353 } else {
354 new_ranges.push(*current_range).unwrap();
355 }
356 }
357 current_ranges = new_ranges;
358 }
359
360 for range in current_ranges {
361 available_regions.push(range).unwrap();
362 }
363 }
364
365 assert_eq!(available_regions.len(), 1);
367 assert_eq!(available_regions[0].start.raw(), 0x40000000);
368 assert_eq!(available_regions[0].end.raw(), 0x60000000);
369
370 const MIN_SIZE: usize = 16 * 1024 * 1024;
372 let size = available_regions[0].end.raw() - available_regions[0].start.raw();
373 assert!(size >= MIN_SIZE);
374 }
375
376 #[test]
377 fn test_find_main_memory_with_overlap() {
378 let regions = [
380 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), create_test_region(0x45000000, 0x46000000, "reserved", BootMemoryKind::Reserved), ];
383
384 let mut ram_regions = heapless::Vec::<_, 32>::new();
385 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
386
387 for r in mock_boot_regions(®ions) {
388 if matches!(r.kind, BootMemoryKind::Ram) {
389 ram_regions.push(r).unwrap();
390 } else {
391 non_ram_regions.push(r).unwrap();
392 }
393 }
394
395 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
396
397 for ram in &ram_regions {
398 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
399 current_ranges.push(ram.range).unwrap();
400
401 for non_ram in &non_ram_regions {
402 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
403
404 for current_range in ¤t_ranges {
405 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
406 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
407
408 if overlap_start < overlap_end {
409 if current_range.start.raw() < overlap_start {
410 new_ranges
411 .push(PhysCRange {
412 start: current_range.start,
413 end: PhysAddr::new(overlap_start),
414 })
415 .unwrap();
416 }
417 if overlap_end < current_range.end.raw() {
418 new_ranges
419 .push(PhysCRange {
420 start: PhysAddr::new(overlap_end),
421 end: current_range.end,
422 })
423 .unwrap();
424 }
425 } else {
426 new_ranges.push(*current_range).unwrap();
427 }
428 }
429 current_ranges = new_ranges;
430 }
431
432 for range in current_ranges {
433 available_regions.push(range).unwrap();
434 }
435 }
436
437 assert_eq!(available_regions.len(), 2);
439
440 let region1 = available_regions
442 .iter()
443 .find(|r| r.start.raw() == 0x40000000)
444 .unwrap();
445 assert_eq!(region1.end.raw(), 0x45000000);
446
447 let region2 = available_regions
449 .iter()
450 .find(|r| r.start.raw() == 0x46000000)
451 .unwrap();
452 assert_eq!(region2.end.raw(), 0x60000000);
453
454 const MIN_SIZE: usize = 16 * 1024 * 1024;
456 for region in &available_regions {
457 let size = region.end.raw() - region.start.raw();
458 assert!(size >= MIN_SIZE);
459 }
460 }
461
462 #[test]
463 fn test_find_main_memory_multiple_overlaps() {
464 let regions = [
466 create_test_region(0x40000000, 0x80000000, "ram", BootMemoryKind::Ram), create_test_region(
468 0x45000000,
469 0x46000000,
470 "reserved1",
471 BootMemoryKind::Reserved,
472 ), create_test_region(
474 0x50000000,
475 0x52000000,
476 "reserved2",
477 BootMemoryKind::Reserved,
478 ), create_test_region(0x70000000, 0x71000000, "kimage", BootMemoryKind::KImage), ];
481
482 let mut ram_regions = heapless::Vec::<_, 32>::new();
483 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
484
485 for r in mock_boot_regions(®ions) {
486 if matches!(r.kind, BootMemoryKind::Ram) {
487 ram_regions.push(r).unwrap();
488 } else {
489 non_ram_regions.push(r).unwrap();
490 }
491 }
492
493 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
494
495 for ram in &ram_regions {
496 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
497 current_ranges.push(ram.range).unwrap();
498
499 for non_ram in &non_ram_regions {
500 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
501
502 for current_range in ¤t_ranges {
503 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
504 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
505
506 if overlap_start < overlap_end {
507 if current_range.start.raw() < overlap_start {
508 new_ranges
509 .push(PhysCRange {
510 start: current_range.start,
511 end: PhysAddr::new(overlap_start),
512 })
513 .unwrap();
514 }
515 if overlap_end < current_range.end.raw() {
516 new_ranges
517 .push(PhysCRange {
518 start: PhysAddr::new(overlap_end),
519 end: current_range.end,
520 })
521 .unwrap();
522 }
523 } else {
524 new_ranges.push(*current_range).unwrap();
525 }
526 }
527 current_ranges = new_ranges;
528 }
529
530 for range in current_ranges {
531 available_regions.push(range).unwrap();
532 }
533 }
534
535 assert_eq!(available_regions.len(), 4);
537
538 let expected_regions = [
540 (0x40000000, 0x45000000), (0x46000000, 0x50000000), (0x52000000, 0x70000000), (0x71000000, 0x80000000), ];
545
546 for (start, end) in expected_regions {
547 let region = available_regions
548 .iter()
549 .find(|r| r.start.raw() == start)
550 .unwrap();
551 assert_eq!(region.end.raw(), end);
552 }
553 }
554
555 #[test]
556 fn test_find_main_memory_select_lowest_address() {
557 let regions = [
559 create_test_region(0x80000000, 0x90000000, "ram1", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "ram2", BootMemoryKind::Ram), ];
562
563 let mut ram_regions = heapless::Vec::<_, 32>::new();
564 let mut non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
565
566 for r in mock_boot_regions(®ions) {
567 if matches!(r.kind, BootMemoryKind::Ram) {
568 ram_regions.push(r).unwrap();
569 }
570 }
571
572 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
573
574 for ram in &ram_regions {
575 available_regions.push(ram.range).unwrap();
576 }
577
578 const MIN_SIZE: usize = 16 * 1024 * 1024;
580 let mut best_region: Option<PhysCRange> = None;
581
582 for region in &available_regions {
583 let size = region.end.raw() - region.start.raw();
584 if size >= MIN_SIZE {
585 match best_region {
586 None => best_region = Some(*region),
587 Some(current_best) => {
588 if region.start.raw() < current_best.start.raw() {
589 best_region = Some(*region);
590 }
591 }
592 }
593 }
594 }
595
596 assert!(best_region.is_some());
598 let selected = best_region.unwrap();
599 assert_eq!(selected.start.raw(), 0x40000000);
600 assert_eq!(selected.end.raw(), 0x50000000);
601 }
602
603 #[test]
604 fn test_find_main_memory_no_suitable_region() {
605 let regions = [
607 create_test_region(0x40000000, 0x40800000, "ram1", BootMemoryKind::Ram), create_test_region(0x50000000, 0x50400000, "ram2", BootMemoryKind::Ram), ];
610
611 let mut ram_regions = heapless::Vec::<_, 32>::new();
612 let non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
613
614 for r in mock_boot_regions(®ions) {
615 if matches!(r.kind, BootMemoryKind::Ram) {
616 ram_regions.push(r).unwrap();
617 }
618 }
619
620 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
621
622 for ram in &ram_regions {
623 available_regions.push(ram.range).unwrap();
624 }
625
626 const MIN_SIZE: usize = 16 * 1024 * 1024;
628 let mut best_region: Option<PhysCRange> = None;
629
630 for region in &available_regions {
631 let size = region.end.raw() - region.start.raw();
632 if size >= MIN_SIZE {
633 match best_region {
634 None => best_region = Some(*region),
635 Some(current_best) => {
636 if region.start.raw() < current_best.start.raw() {
637 best_region = Some(*region);
638 }
639 }
640 }
641 }
642 }
643
644 assert!(best_region.is_none());
646 }
647
648 #[test]
649 fn test_find_main_memory_edge_case_exact_overlap() {
650 let regions = [
652 create_test_region(0x40000000, 0x50000000, "ram", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "reserved", BootMemoryKind::Reserved), ];
655
656 let mut ram_regions = heapless::Vec::<_, 32>::new();
657 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
658
659 for r in mock_boot_regions(®ions) {
660 if matches!(r.kind, BootMemoryKind::Ram) {
661 ram_regions.push(r).unwrap();
662 } else {
663 non_ram_regions.push(r).unwrap();
664 }
665 }
666
667 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
668
669 for ram in &ram_regions {
670 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
671 current_ranges.push(ram.range).unwrap();
672
673 for non_ram in &non_ram_regions {
674 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
675
676 for current_range in ¤t_ranges {
677 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
678 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
679
680 if overlap_start < overlap_end {
681 if current_range.start.raw() < overlap_start {
682 new_ranges
683 .push(PhysCRange {
684 start: current_range.start,
685 end: PhysAddr::new(overlap_start),
686 })
687 .unwrap();
688 }
689 if overlap_end < current_range.end.raw() {
690 new_ranges
691 .push(PhysCRange {
692 start: PhysAddr::new(overlap_end),
693 end: current_range.end,
694 })
695 .unwrap();
696 }
697 } else {
698 new_ranges.push(*current_range).unwrap();
699 }
700 }
701 current_ranges = new_ranges;
702 }
703
704 for range in current_ranges {
705 available_regions.push(range).unwrap();
706 }
707 }
708
709 assert_eq!(available_regions.len(), 0);
711 }
712}