1#![allow(unused)]
2
3use core::{
4 alloc::GlobalAlloc,
5 ops::Range,
6 ptr::{NonNull, null_mut, slice_from_raw_parts_mut},
7 sync::atomic::{AtomicUsize, Ordering},
8};
9
10use buddy_system_allocator::Heap;
11use log::debug;
12use page_table_generic::PagingError;
13use spin::{Mutex, Once};
14
15use crate::{
16 globals::global_val,
17 hal_al::mmu::MapConfig,
18 irq::NoIrqGuard,
19 mem::{
20 mmu::{AccessSetting, BootMemoryKind, BootRegion, CacheSetting, LINER_OFFSET},
21 once::OnceStatic,
22 },
23 platform::{self, kstack_size},
24 println,
25};
26
27pub use crate::platform::page_size;
28
29mod addr;
30mod cache;
31pub mod mmu;
33pub mod once;
34pub mod region;
35pub use addr::*;
36
37#[cfg(target_os = "none")]
38#[global_allocator]
39static ALLOCATOR: KAllocator = KAllocator {
40 heap32: Mutex::new(Heap::empty()),
41 heap64: Mutex::new(Heap::empty()),
42};
43
44static mut TMP_PAGE_ALLOC_ADDR: usize = 0;
45
46pub struct KAllocator {
47 heap32: Mutex<Heap<32>>,
48 heap64: Mutex<Heap<64>>,
49}
50
51impl KAllocator {
52 pub fn reset(&self, memory: &mut [u8]) {
53 let range = memory.as_mut_ptr_range();
54 let start = range.start as usize;
55 let end = range.end as usize;
56 let len = memory.len();
57
58 {
59 let mut heap32 = self.heap32.lock();
60 *heap32 = Heap::empty();
61 }
62 {
63 let mut heap64 = self.heap64.lock();
64 *heap64 = Heap::empty();
65 }
66
67 if Self::range_within_u32(start, end) {
68 let mut heap32 = self.heap32.lock();
69 unsafe { heap32.init(start, len) };
70 } else {
71 let mut heap64 = self.heap64.lock();
72 unsafe { heap64.init(start, len) };
73 }
74 }
75
76 pub fn add_to_heap(&self, memory: &mut [u8]) {
77 let range = memory.as_mut_ptr_range();
78 let start = range.start as usize;
79 let end = range.end as usize;
80
81 if Self::range_within_u32(start, end) {
82 let mut heap32 = self.heap32.lock();
83 unsafe { heap32.add_to_heap(start, end) };
84 } else {
85 let mut heap64 = self.heap64.lock();
86 unsafe { heap64.add_to_heap(start, end) };
87 }
88 }
89
90 pub(crate) fn lock_heap32(&self) -> spin::MutexGuard<'_, Heap<32>> {
91 self.heap32.lock()
92 }
93
94 pub(crate) fn lock_heap64(&self) -> spin::MutexGuard<'_, Heap<64>> {
95 self.heap64.lock()
96 }
97
98 pub(crate) unsafe fn alloc_with_mask(
99 &self,
100 layout: core::alloc::Layout,
101 dma_mask: u64,
102 ) -> *mut u8 {
103 let guard = NoIrqGuard::new();
104 let result = if dma_mask <= u32::MAX as u64 {
105 Self::try_alloc(&self.heap32, layout)
106 } else {
107 Self::try_alloc(&self.heap64, layout).or_else(|| Self::try_alloc(&self.heap32, layout))
108 };
109 drop(guard);
110
111 result.map_or(null_mut(), |ptr| ptr.as_ptr())
112 }
113
114 #[inline]
115 fn try_alloc<const BITS: usize>(
116 heap: &Mutex<Heap<BITS>>,
117 layout: core::alloc::Layout,
118 ) -> Option<NonNull<u8>> {
119 let mut guard = heap.lock();
120 guard.alloc(layout).ok()
121 }
122
123 #[inline]
124 fn range_within_u32(start: usize, end: usize) -> bool {
125 if start >= end {
126 return false;
127 }
128
129 let last = end - 1;
130
131 let ps = PhysAddr::from(VirtAddr::from(start));
132 let pe = PhysAddr::from(VirtAddr::from(last));
133
134 let limit = PhysAddr::from(u32::MAX as usize);
135 ps <= limit && pe <= limit
136 }
137
138 #[inline]
139 fn ptr_in_32bit(ptr: *mut u8) -> bool {
140 let phys = PhysAddr::from(VirtAddr::from(ptr as usize));
141 phys <= PhysAddr::from(u32::MAX as usize)
142 }
143}
144
145unsafe impl GlobalAlloc for KAllocator {
146 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
147 let guard = NoIrqGuard::new();
148 let result =
149 Self::try_alloc(&self.heap64, layout).or_else(|| Self::try_alloc(&self.heap32, layout));
150 drop(guard);
151
152 result.map_or(null_mut(), |ptr| ptr.as_ptr())
153 }
154
155 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
156 let guard = NoIrqGuard::new();
157 let nn = unsafe { NonNull::new_unchecked(ptr) };
158
159 if Self::ptr_in_32bit(ptr) {
160 self.heap32.lock().dealloc(nn, layout);
161 } else {
162 self.heap64.lock().dealloc(nn, layout);
163 }
164 drop(guard);
165 }
166}
167
168pub(crate) fn init() {
169 let range = global_val().main_memory.clone();
170 mmu::init_with_tmp_table();
171
172 let mut start = VirtAddr::from(range.start.raw() + LINER_OFFSET);
173 let mut end = VirtAddr::from(range.end.raw() + LINER_OFFSET);
174
175 unsafe {
176 if TMP_PAGE_ALLOC_ADDR != 0 {
177 end = VirtAddr::from(TMP_PAGE_ALLOC_ADDR + LINER_OFFSET);
178 }
179 }
180
181 println!("heap add memory [{}, {})", start, end);
182 #[cfg(target_os = "none")]
183 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), end - start) });
184
185 println!("heap initialized");
186
187 mmu::init();
188
189 add_all_ram();
190
191 cache::init();
192}
193
194fn add_all_ram() {
195 let main = global_val().main_memory.clone();
196
197 for region in platform::boot_regions() {
198 if !matches!(region.kind, BootMemoryKind::Ram) {
199 continue;
200 }
201
202 if region.range.to_range().contains(&main.start) {
203 continue;
204 }
205
206 let start = VirtAddr::from(region.range.start.raw() + LINER_OFFSET);
207 let end = VirtAddr::from(region.range.end.raw() + LINER_OFFSET);
208 let len = end - start;
209
210 println!("Heap add memory [{}, {})", start, end);
211 #[cfg(target_os = "none")]
212 ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.into(), len) });
213 }
214}
215
216pub(crate) fn find_main_memory() -> Option<BootRegion> {
217 let mut ram_regions = heapless::Vec::<_, 32>::new();
218 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
219
220 for r in platform::boot_regions() {
222 if matches!(r.kind, BootMemoryKind::Ram) {
223 ram_regions.push(r).ok()?;
224 } else {
225 non_ram_regions.push(r).ok()?;
226 }
227 }
228
229 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
230
231 for ram in &ram_regions {
233 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
234 current_ranges.push(ram.range).ok()?;
235
236 for non_ram in &non_ram_regions {
238 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
239
240 for current_range in ¤t_ranges {
241 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
243 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
244
245 if overlap_start < overlap_end {
246 if current_range.start.raw() < overlap_start {
249 new_ranges
250 .push(PhysCRange {
251 start: current_range.start,
252 end: PhysAddr::new(overlap_start),
253 })
254 .ok()?;
255 }
256 if overlap_end < current_range.end.raw() {
258 new_ranges
259 .push(PhysCRange {
260 start: PhysAddr::new(overlap_end),
261 end: current_range.end,
262 })
263 .ok()?;
264 }
265 } else {
266 new_ranges.push(*current_range).ok()?;
268 }
269 }
270 current_ranges = new_ranges;
271 }
272
273 for range in current_ranges {
275 available_regions.push(range).ok()?;
276 }
277 }
278
279 const MIN_SIZE: usize = 16 * 1024 * 1024; let mut best_region: Option<PhysCRange> = None;
282
283 for region in &available_regions {
284 let size = region.end.raw() - region.start.raw();
285 if size >= MIN_SIZE {
286 match best_region {
287 None => best_region = Some(*region),
288 Some(current_best) => {
289 if region.start.raw() < current_best.start.raw() {
290 best_region = Some(*region);
291 }
292 }
293 }
294 }
295 }
296
297 if let Some(main_range) = best_region {
298 println!(
299 "Selected main memory: {:?}, size: {}MB",
300 main_range,
301 (main_range.end.raw() - main_range.start.raw()) / (1024 * 1024)
302 );
303
304 let first_ram = ram_regions.first()?;
306 Some(BootRegion {
307 range: main_range,
308 name: c"main memory".as_ptr() as _,
309 access: first_ram.access,
310 cache: first_ram.cache,
311 kind: BootMemoryKind::Ram,
312 })
313 } else {
314 println!("no suitable main memory region found (>= 16MB)");
315 None
316 }
317}
318
319pub fn map(config: &MapConfig) -> Result<(), PagingError> {
320 mmu::map(config)
321}
322
323pub fn iomap(paddr: PhysAddr, size: usize) -> NonNull<u8> {
324 let vaddr = VirtAddr::from(paddr.raw() + LINER_OFFSET);
325 match mmu::map(&MapConfig {
326 name: "iomap",
327 va_start: vaddr,
328 pa_start: paddr,
329 size,
330 access: AccessSetting::ReadWrite,
331 cache: CacheSetting::Device,
332 }) {
333 Ok(_) => {}
334 Err(e) => match e {
335 PagingError::AlreadyMapped => {}
336 _ => panic!("iomap failed: {:?}", e),
337 },
338 }
339
340 let ptr: *mut u8 = vaddr.into();
341 unsafe { NonNull::new_unchecked(ptr) }
342
343 }
354
355#[cfg(test)]
356mod tests {
357 use super::*;
358 use crate::hal_al::mmu::{AccessSetting, CacheSetting};
359 use core::ffi::CStr;
360
361 fn create_test_region(
363 start: usize,
364 end: usize,
365 name: &'static str,
366 kind: BootMemoryKind,
367 ) -> BootRegion {
368 BootRegion {
369 range: PhysCRange {
370 start: PhysAddr::new(start),
371 end: PhysAddr::new(end),
372 },
373 name: name.as_ptr(),
374 access: AccessSetting::Read | AccessSetting::Write,
375 cache: CacheSetting::Normal,
376 kind,
377 }
378 }
379
380 fn mock_boot_regions(regions: &[BootRegion]) -> impl Iterator<Item = BootRegion> + '_ {
382 regions.iter().copied()
383 }
384
385 #[test]
386 fn test_find_main_memory_simple_case() {
387 let regions = [
389 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), ];
391
392 let mut ram_regions = heapless::Vec::<_, 32>::new();
394 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
395
396 for r in mock_boot_regions(®ions) {
397 if matches!(r.kind, BootMemoryKind::Ram) {
398 ram_regions.push(r).unwrap();
399 } else {
400 non_ram_regions.push(r).unwrap();
401 }
402 }
403
404 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
405
406 for ram in &ram_regions {
407 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
408 current_ranges.push(ram.range).unwrap();
409
410 for non_ram in &non_ram_regions {
411 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
412
413 for current_range in ¤t_ranges {
414 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
415 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
416
417 if overlap_start < overlap_end {
418 if current_range.start.raw() < overlap_start {
419 new_ranges
420 .push(PhysCRange {
421 start: current_range.start,
422 end: PhysAddr::new(overlap_start),
423 })
424 .unwrap();
425 }
426 if overlap_end < current_range.end.raw() {
427 new_ranges
428 .push(PhysCRange {
429 start: PhysAddr::new(overlap_end),
430 end: current_range.end,
431 })
432 .unwrap();
433 }
434 } else {
435 new_ranges.push(*current_range).unwrap();
436 }
437 }
438 current_ranges = new_ranges;
439 }
440
441 for range in current_ranges {
442 available_regions.push(range).unwrap();
443 }
444 }
445
446 assert_eq!(available_regions.len(), 1);
448 assert_eq!(available_regions[0].start.raw(), 0x40000000);
449 assert_eq!(available_regions[0].end.raw(), 0x60000000);
450
451 const MIN_SIZE: usize = 16 * 1024 * 1024;
453 let size = available_regions[0].end.raw() - available_regions[0].start.raw();
454 assert!(size >= MIN_SIZE);
455 }
456
457 #[test]
458 fn test_find_main_memory_with_overlap() {
459 let regions = [
461 create_test_region(0x40000000, 0x60000000, "ram", BootMemoryKind::Ram), create_test_region(0x45000000, 0x46000000, "reserved", BootMemoryKind::Reserved), ];
464
465 let mut ram_regions = heapless::Vec::<_, 32>::new();
466 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
467
468 for r in mock_boot_regions(®ions) {
469 if matches!(r.kind, BootMemoryKind::Ram) {
470 ram_regions.push(r).unwrap();
471 } else {
472 non_ram_regions.push(r).unwrap();
473 }
474 }
475
476 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
477
478 for ram in &ram_regions {
479 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
480 current_ranges.push(ram.range).unwrap();
481
482 for non_ram in &non_ram_regions {
483 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
484
485 for current_range in ¤t_ranges {
486 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
487 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
488
489 if overlap_start < overlap_end {
490 if current_range.start.raw() < overlap_start {
491 new_ranges
492 .push(PhysCRange {
493 start: current_range.start,
494 end: PhysAddr::new(overlap_start),
495 })
496 .unwrap();
497 }
498 if overlap_end < current_range.end.raw() {
499 new_ranges
500 .push(PhysCRange {
501 start: PhysAddr::new(overlap_end),
502 end: current_range.end,
503 })
504 .unwrap();
505 }
506 } else {
507 new_ranges.push(*current_range).unwrap();
508 }
509 }
510 current_ranges = new_ranges;
511 }
512
513 for range in current_ranges {
514 available_regions.push(range).unwrap();
515 }
516 }
517
518 assert_eq!(available_regions.len(), 2);
520
521 let region1 = available_regions
523 .iter()
524 .find(|r| r.start.raw() == 0x40000000)
525 .unwrap();
526 assert_eq!(region1.end.raw(), 0x45000000);
527
528 let region2 = available_regions
530 .iter()
531 .find(|r| r.start.raw() == 0x46000000)
532 .unwrap();
533 assert_eq!(region2.end.raw(), 0x60000000);
534
535 const MIN_SIZE: usize = 16 * 1024 * 1024;
537 for region in &available_regions {
538 let size = region.end.raw() - region.start.raw();
539 assert!(size >= MIN_SIZE);
540 }
541 }
542
543 #[test]
544 fn test_find_main_memory_multiple_overlaps() {
545 let regions = [
547 create_test_region(0x40000000, 0x80000000, "ram", BootMemoryKind::Ram), create_test_region(
549 0x45000000,
550 0x46000000,
551 "reserved1",
552 BootMemoryKind::Reserved,
553 ), create_test_region(
555 0x50000000,
556 0x52000000,
557 "reserved2",
558 BootMemoryKind::Reserved,
559 ), create_test_region(0x70000000, 0x71000000, "kimage", BootMemoryKind::KImage), ];
562
563 let mut ram_regions = heapless::Vec::<_, 32>::new();
564 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
565
566 for r in mock_boot_regions(®ions) {
567 if matches!(r.kind, BootMemoryKind::Ram) {
568 ram_regions.push(r).unwrap();
569 } else {
570 non_ram_regions.push(r).unwrap();
571 }
572 }
573
574 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
575
576 for ram in &ram_regions {
577 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
578 current_ranges.push(ram.range).unwrap();
579
580 for non_ram in &non_ram_regions {
581 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
582
583 for current_range in ¤t_ranges {
584 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
585 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
586
587 if overlap_start < overlap_end {
588 if current_range.start.raw() < overlap_start {
589 new_ranges
590 .push(PhysCRange {
591 start: current_range.start,
592 end: PhysAddr::new(overlap_start),
593 })
594 .unwrap();
595 }
596 if overlap_end < current_range.end.raw() {
597 new_ranges
598 .push(PhysCRange {
599 start: PhysAddr::new(overlap_end),
600 end: current_range.end,
601 })
602 .unwrap();
603 }
604 } else {
605 new_ranges.push(*current_range).unwrap();
606 }
607 }
608 current_ranges = new_ranges;
609 }
610
611 for range in current_ranges {
612 available_regions.push(range).unwrap();
613 }
614 }
615
616 assert_eq!(available_regions.len(), 4);
618
619 let expected_regions = [
621 (0x40000000, 0x45000000), (0x46000000, 0x50000000), (0x52000000, 0x70000000), (0x71000000, 0x80000000), ];
626
627 for (start, end) in expected_regions {
628 let region = available_regions
629 .iter()
630 .find(|r| r.start.raw() == start)
631 .unwrap();
632 assert_eq!(region.end.raw(), end);
633 }
634 }
635
636 #[test]
637 fn test_find_main_memory_select_lowest_address() {
638 let regions = [
640 create_test_region(0x80000000, 0x90000000, "ram1", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "ram2", BootMemoryKind::Ram), ];
643
644 let mut ram_regions = heapless::Vec::<_, 32>::new();
645 let mut non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
646
647 for r in mock_boot_regions(®ions) {
648 if matches!(r.kind, BootMemoryKind::Ram) {
649 ram_regions.push(r).unwrap();
650 }
651 }
652
653 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
654
655 for ram in &ram_regions {
656 available_regions.push(ram.range).unwrap();
657 }
658
659 const MIN_SIZE: usize = 16 * 1024 * 1024;
661 let mut best_region: Option<PhysCRange> = None;
662
663 for region in &available_regions {
664 let size = region.end.raw() - region.start.raw();
665 if size >= MIN_SIZE {
666 match best_region {
667 None => best_region = Some(*region),
668 Some(current_best) => {
669 if region.start.raw() < current_best.start.raw() {
670 best_region = Some(*region);
671 }
672 }
673 }
674 }
675 }
676
677 assert!(best_region.is_some());
679 let selected = best_region.unwrap();
680 assert_eq!(selected.start.raw(), 0x40000000);
681 assert_eq!(selected.end.raw(), 0x50000000);
682 }
683
684 #[test]
685 fn test_find_main_memory_no_suitable_region() {
686 let regions = [
688 create_test_region(0x40000000, 0x40800000, "ram1", BootMemoryKind::Ram), create_test_region(0x50000000, 0x50400000, "ram2", BootMemoryKind::Ram), ];
691
692 let mut ram_regions = heapless::Vec::<_, 32>::new();
693 let non_ram_regions = heapless::Vec::<BootRegion, 32>::new();
694
695 for r in mock_boot_regions(®ions) {
696 if matches!(r.kind, BootMemoryKind::Ram) {
697 ram_regions.push(r).unwrap();
698 }
699 }
700
701 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
702
703 for ram in &ram_regions {
704 available_regions.push(ram.range).unwrap();
705 }
706
707 const MIN_SIZE: usize = 16 * 1024 * 1024;
709 let mut best_region: Option<PhysCRange> = None;
710
711 for region in &available_regions {
712 let size = region.end.raw() - region.start.raw();
713 if size >= MIN_SIZE {
714 match best_region {
715 None => best_region = Some(*region),
716 Some(current_best) => {
717 if region.start.raw() < current_best.start.raw() {
718 best_region = Some(*region);
719 }
720 }
721 }
722 }
723 }
724
725 assert!(best_region.is_none());
727 }
728
729 #[test]
730 fn test_find_main_memory_edge_case_exact_overlap() {
731 let regions = [
733 create_test_region(0x40000000, 0x50000000, "ram", BootMemoryKind::Ram), create_test_region(0x40000000, 0x50000000, "reserved", BootMemoryKind::Reserved), ];
736
737 let mut ram_regions = heapless::Vec::<_, 32>::new();
738 let mut non_ram_regions = heapless::Vec::<_, 32>::new();
739
740 for r in mock_boot_regions(®ions) {
741 if matches!(r.kind, BootMemoryKind::Ram) {
742 ram_regions.push(r).unwrap();
743 } else {
744 non_ram_regions.push(r).unwrap();
745 }
746 }
747
748 let mut available_regions = heapless::Vec::<PhysCRange, 64>::new();
749
750 for ram in &ram_regions {
751 let mut current_ranges = heapless::Vec::<PhysCRange, 32>::new();
752 current_ranges.push(ram.range).unwrap();
753
754 for non_ram in &non_ram_regions {
755 let mut new_ranges = heapless::Vec::<PhysCRange, 32>::new();
756
757 for current_range in ¤t_ranges {
758 let overlap_start = current_range.start.raw().max(non_ram.range.start.raw());
759 let overlap_end = current_range.end.raw().min(non_ram.range.end.raw());
760
761 if overlap_start < overlap_end {
762 if current_range.start.raw() < overlap_start {
763 new_ranges
764 .push(PhysCRange {
765 start: current_range.start,
766 end: PhysAddr::new(overlap_start),
767 })
768 .unwrap();
769 }
770 if overlap_end < current_range.end.raw() {
771 new_ranges
772 .push(PhysCRange {
773 start: PhysAddr::new(overlap_end),
774 end: current_range.end,
775 })
776 .unwrap();
777 }
778 } else {
779 new_ranges.push(*current_range).unwrap();
780 }
781 }
782 current_ranges = new_ranges;
783 }
784
785 for range in current_ranges {
786 available_regions.push(range).unwrap();
787 }
788 }
789
790 assert_eq!(available_regions.len(), 0);
792 }
793}