use aarch64_cpu::{asm::barrier, registers::*};
use memory_addr::PhysAddr;
pub unsafe fn switch_to_el1() {
SPSel.write(SPSel::SP::ELx);
SP_EL0.set(0);
let current_el = CurrentEL.read(CurrentEL::EL);
if current_el >= 2 {
if current_el == 3 {
SCR_EL3.write(
SCR_EL3::NS::NonSecure + SCR_EL3::HCE::HvcEnabled + SCR_EL3::RW::NextELIsAarch64,
);
SPSR_EL3.write(
SPSR_EL3::M::EL1h
+ SPSR_EL3::D::Masked
+ SPSR_EL3::A::Masked
+ SPSR_EL3::I::Masked
+ SPSR_EL3::F::Masked,
);
ELR_EL3.set(LR.get());
}
CNTHCTL_EL2.modify(CNTHCTL_EL2::EL1PCEN::SET + CNTHCTL_EL2::EL1PCTEN::SET);
CNTVOFF_EL2.set(0);
HCR_EL2.write(HCR_EL2::RW::EL1IsAarch64);
SPSR_EL2.write(
SPSR_EL2::M::EL1h
+ SPSR_EL2::D::Masked
+ SPSR_EL2::A::Masked
+ SPSR_EL2::I::Masked
+ SPSR_EL2::F::Masked,
);
SP_EL1.set(SP.get());
ELR_EL2.set(LR.get());
aarch64_cpu::asm::eret();
}
}
pub unsafe fn init_mmu(root_paddr: PhysAddr) {
use page_table_entry::aarch64::MemAttr;
MAIR_EL1.set(MemAttr::MAIR_VALUE);
let tcr_flags0 = TCR_EL1::EPD0::EnableTTBR0Walks
+ TCR_EL1::TG0::KiB_4
+ TCR_EL1::SH0::Inner
+ TCR_EL1::ORGN0::WriteBack_ReadAlloc_WriteAlloc_Cacheable
+ TCR_EL1::IRGN0::WriteBack_ReadAlloc_WriteAlloc_Cacheable
+ TCR_EL1::T0SZ.val(16);
let tcr_flags1 = TCR_EL1::EPD1::EnableTTBR1Walks
+ TCR_EL1::TG1::KiB_4
+ TCR_EL1::SH1::Inner
+ TCR_EL1::ORGN1::WriteBack_ReadAlloc_WriteAlloc_Cacheable
+ TCR_EL1::IRGN1::WriteBack_ReadAlloc_WriteAlloc_Cacheable
+ TCR_EL1::T1SZ.val(16);
TCR_EL1.write(TCR_EL1::IPS::Bits_48 + tcr_flags0 + tcr_flags1);
barrier::isb(barrier::SY);
let root_paddr = root_paddr.as_usize() as u64;
TTBR0_EL1.set(root_paddr);
TTBR1_EL1.set(root_paddr);
crate::asm::flush_tlb(None);
SCTLR_EL1.modify(SCTLR_EL1::M::Enable + SCTLR_EL1::C::Cacheable + SCTLR_EL1::I::Cacheable);
barrier::isb(barrier::SY);
}
pub fn init_trap() {
unsafe extern "C" {
fn exception_vector_base();
}
unsafe {
crate::asm::write_exception_vector_base(exception_vector_base as *const () as usize);
crate::asm::write_user_page_table(0.into());
}
}