use core::{
marker::PhantomData,
mem::{ManuallyDrop, MaybeUninit},
sync::atomic::{AtomicPtr, Ordering},
};
#[instability::unstable]
pub use crate::soc::cpu_control::is_running;
use crate::{
peripherals::CPU_CTRL,
soc::cpu_control::{internal_park_core, start_core1_init},
system::Cpu,
};
#[repr(C, align(16))]
#[instability::unstable]
pub struct Stack<const SIZE: usize> {
pub mem: MaybeUninit<[u8; SIZE]>,
}
impl<const SIZE: usize> Default for Stack<SIZE> {
fn default() -> Self {
Self::new()
}
}
#[allow(clippy::len_without_is_empty)]
impl<const SIZE: usize> Stack<SIZE> {
#[instability::unstable]
pub const fn new() -> Stack<SIZE> {
const {
::core::assert!(SIZE.is_multiple_of(16));
}
Stack {
mem: MaybeUninit::uninit(),
}
}
#[instability::unstable]
pub const fn len(&self) -> usize {
SIZE
}
#[instability::unstable]
pub fn bottom(&mut self) -> *mut u32 {
self.mem.as_mut_ptr() as *mut u32
}
#[instability::unstable]
pub fn top(&mut self) -> *mut u32 {
unsafe { self.bottom().add(SIZE / 4) }
}
}
pub(crate) static START_CORE1_FUNCTION: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
pub(crate) static APP_CORE_STACK_TOP: AtomicPtr<u32> = AtomicPtr::new(core::ptr::null_mut());
pub(crate) static APP_CORE_STACK_GUARD: AtomicPtr<u32> = AtomicPtr::new(core::ptr::null_mut());
#[must_use = "Dropping this guard will park the APP core"]
#[instability::unstable]
pub struct AppCoreGuard<'a> {
phantom: PhantomData<&'a ()>,
}
impl Drop for AppCoreGuard<'_> {
fn drop(&mut self) {
unsafe { internal_park_core(Cpu::AppCpu, true) };
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[instability::unstable]
pub enum Error {
CoreAlreadyRunning,
}
#[procmacros::doc_replace]
#[instability::unstable]
pub struct CpuControl<'d> {
_cpu_control: CPU_CTRL<'d>,
}
impl<'d> CpuControl<'d> {
#[instability::unstable]
pub fn new(cpu_control: CPU_CTRL<'d>) -> CpuControl<'d> {
CpuControl {
_cpu_control: cpu_control,
}
}
#[instability::unstable]
pub unsafe fn park_core(&mut self, core: Cpu) {
unsafe { internal_park_core(core, true) };
}
#[instability::unstable]
pub fn unpark_core(&mut self, core: Cpu) {
unsafe { internal_park_core(core, false) };
}
#[inline(never)]
pub(crate) unsafe fn start_core1_run<F>() -> !
where
F: FnOnce(),
{
let entry = START_CORE1_FUNCTION.load(Ordering::Acquire);
debug_assert!(!entry.is_null());
unsafe {
let entry = ManuallyDrop::take(&mut *entry.cast::<ManuallyDrop<F>>());
entry();
loop {
internal_park_core(Cpu::current(), true);
}
}
}
#[instability::unstable]
pub fn start_app_core<'a, const SIZE: usize, F>(
&mut self,
stack: &'static mut Stack<SIZE>,
entry: F,
) -> Result<AppCoreGuard<'a>, Error>
where
F: FnOnce(),
F: Send + 'a,
{
cfg_if::cfg_if! {
if #[cfg(all(stack_guard_monitoring))] {
let stack_guard_offset = Some(esp_config::esp_config_int!(
usize,
"ESP_HAL_CONFIG_STACK_GUARD_OFFSET"
));
} else {
let stack_guard_offset = None;
}
};
self.start_app_core_with_stack_guard_offset(stack, stack_guard_offset, entry)
}
#[instability::unstable]
pub fn start_app_core_with_stack_guard_offset<'a, const SIZE: usize, F>(
&mut self,
stack: &'static mut Stack<SIZE>,
stack_guard_offset: Option<usize>,
entry: F,
) -> Result<AppCoreGuard<'a>, Error>
where
F: FnOnce(),
F: Send + 'a,
{
if !crate::debugger::debugger_connected() && is_running(Cpu::AppCpu) {
return Err(Error::CoreAlreadyRunning);
}
setup_second_core_stack(stack, stack_guard_offset, entry);
crate::soc::cpu_control::start_core1(start_core1_init::<F> as *const u32);
self.unpark_core(Cpu::AppCpu);
Ok(AppCoreGuard {
phantom: PhantomData,
})
}
}
fn setup_second_core_stack<'a, F, const SIZE: usize>(
stack: &'static mut Stack<SIZE>,
stack_guard_offset: Option<usize>,
entry: F,
) where
F: FnOnce(),
F: Send + 'a,
{
let entry = ManuallyDrop::new(entry);
unsafe {
let stack_bottom = stack.bottom().cast::<u8>();
let (stack_guard, stack_bottom_above_guard) =
if let Some(stack_guard_offset) = stack_guard_offset {
assert!(stack_guard_offset.is_multiple_of(4));
assert!(stack_guard_offset <= stack.len() - 4);
(
stack_bottom.byte_add(stack_guard_offset),
stack_bottom.byte_add(stack_guard_offset).byte_add(4),
)
} else {
(core::ptr::null_mut(), stack_bottom)
};
let align_offset = stack_bottom_above_guard.align_offset(core::mem::align_of::<F>());
let entry_dst = stack_bottom_above_guard
.add(align_offset)
.cast::<ManuallyDrop<F>>();
entry_dst.write(entry);
let entry_fn = entry_dst.cast::<()>();
START_CORE1_FUNCTION.store(entry_fn, Ordering::Release);
APP_CORE_STACK_TOP.store(stack.top(), Ordering::Release);
APP_CORE_STACK_GUARD.store(stack_guard.cast(), Ordering::Release);
}
}