use fugit::HertzU32;
use strum::FromRepr;
use crate::{
clock::{clocks_ll::regi2c_write_mask, Clock, XtalClock},
peripherals::{LPWR, LP_AON, PCR, PMU, TIMG0},
};
const I2C_PMU: u8 = 0x6d;
const I2C_PMU_HOSTID: u8 = 0;
const I2C_PMU_EN_I2C_RTC_DREG: u8 = 8;
const I2C_PMU_EN_I2C_RTC_DREG_MSB: u8 = 0;
const I2C_PMU_EN_I2C_RTC_DREG_LSB: u8 = 0;
const I2C_PMU_EN_I2C_DIG_DREG: u8 = 8;
const I2C_PMU_EN_I2C_DIG_DREG_MSB: u8 = 1;
const I2C_PMU_EN_I2C_DIG_DREG_LSB: u8 = 1;
const I2C_PMU_EN_I2C_RTC_DREG_SLP: u8 = 8;
const I2C_PMU_EN_I2C_RTC_DREG_SLP_MSB: u8 = 2;
const I2C_PMU_EN_I2C_RTC_DREG_SLP_LSB: u8 = 2;
const I2C_PMU_EN_I2C_DIG_DREG_SLP: u8 = 8;
const I2C_PMU_EN_I2C_DIG_DREG_SLP_MSB: u8 = 3;
const I2C_PMU_EN_I2C_DIG_DREG_SLP_LSB: u8 = 3;
const I2C_PMU_OR_XPD_RTC_REG: u8 = 8;
const I2C_PMU_OR_XPD_RTC_REG_MSB: u8 = 4;
const I2C_PMU_OR_XPD_RTC_REG_LSB: u8 = 4;
const I2C_PMU_OR_XPD_DIG_REG: u8 = 8;
const I2C_PMU_OR_XPD_DIG_REG_MSB: u8 = 5;
const I2C_PMU_OR_XPD_DIG_REG_LSB: u8 = 5;
const I2C_PMU_OR_XPD_TRX: u8 = 15;
const I2C_PMU_OR_XPD_TRX_MSB: u8 = 2;
const I2C_PMU_OR_XPD_TRX_LSB: u8 = 2;
const DR_REG_PMU_BASE: u32 = 0x600B0000;
const PMU_POWER_PD_TOP_CNTL_REG: u32 = DR_REG_PMU_BASE + 0xf4;
const PMU_POWER_PD_HPAON_CNTL_REG: u32 = DR_REG_PMU_BASE + 0xf8;
const PMU_POWER_PD_HPCPU_CNTL_REG: u32 = DR_REG_PMU_BASE + 0xfc;
const PMU_POWER_PD_HPPERI_RESERVE_REG: u32 = DR_REG_PMU_BASE + 0x100;
const PMU_POWER_PD_HPWIFI_CNTL_REG: u32 = DR_REG_PMU_BASE + 0x104;
const PMU_POWER_PD_LPPERI_CNTL_REG: u32 = DR_REG_PMU_BASE + 0x108;
pub(crate) fn init() {
unsafe {
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_EN_I2C_RTC_DREG,
I2C_PMU_EN_I2C_RTC_DREG_MSB,
I2C_PMU_EN_I2C_RTC_DREG_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_EN_I2C_DIG_DREG,
I2C_PMU_EN_I2C_DIG_DREG_MSB,
I2C_PMU_EN_I2C_DIG_DREG_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_EN_I2C_RTC_DREG_SLP,
I2C_PMU_EN_I2C_RTC_DREG_SLP_MSB,
I2C_PMU_EN_I2C_RTC_DREG_SLP_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_EN_I2C_DIG_DREG_SLP,
I2C_PMU_EN_I2C_DIG_DREG_SLP_MSB,
I2C_PMU_EN_I2C_DIG_DREG_SLP_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_OR_XPD_RTC_REG,
I2C_PMU_OR_XPD_RTC_REG_MSB,
I2C_PMU_OR_XPD_RTC_REG_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_OR_XPD_DIG_REG,
I2C_PMU_OR_XPD_DIG_REG_MSB,
I2C_PMU_OR_XPD_DIG_REG_LSB,
0,
);
regi2c_write_mask(
I2C_PMU,
I2C_PMU_HOSTID,
I2C_PMU_OR_XPD_TRX,
I2C_PMU_OR_XPD_TRX_MSB,
I2C_PMU_OR_XPD_TRX_LSB,
0,
);
(PMU_POWER_PD_TOP_CNTL_REG as *mut u32).write_volatile(0);
(PMU_POWER_PD_HPAON_CNTL_REG as *mut u32).write_volatile(0);
(PMU_POWER_PD_HPCPU_CNTL_REG as *mut u32).write_volatile(0);
(PMU_POWER_PD_HPPERI_RESERVE_REG as *mut u32).write_volatile(0);
(PMU_POWER_PD_HPWIFI_CNTL_REG as *mut u32).write_volatile(0);
(PMU_POWER_PD_LPPERI_CNTL_REG as *mut u32).write_volatile(0);
let pmu = &*PMU::ptr();
pmu.hp_active_hp_regulator0()
.modify(|_, w| w.hp_active_hp_regulator_dbias().bits(25));
pmu.hp_sleep_lp_regulator0()
.modify(|_, w| w.hp_sleep_lp_regulator_dbias().bits(26));
pmu.slp_wakeup_cntl5()
.modify(|_, w| w.lp_ana_wait_target().bits(15));
pmu.slp_wakeup_cntl7()
.modify(|_, w| w.ana_wait_target().bits(1700));
}
}
pub(crate) fn configure_clock() {
assert!(matches!(
RtcClock::get_xtal_freq(),
XtalClock::RtcXtalFreq32M
));
RtcClock::set_fast_freq(RtcFastClock::RtcFastClockRcFast);
let cal_val = loop {
RtcClock::set_slow_freq(RtcSlowClock::RtcSlowClockRcSlow);
let res = RtcClock::calibrate(RtcCalSel::RtcCalRtcMux, 1024);
if res != 0 {
break res;
}
};
unsafe {
let lp_aon = &*LP_AON::ptr();
lp_aon.store1().modify(|_, w| w.bits(cal_val));
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, FromRepr)]
pub enum SocResetReason {
ChipPowerOn = 0x01,
CoreSw = 0x03,
CoreDeepSleep = 0x05,
CoreMwdt0 = 0x07,
CoreMwdt1 = 0x08,
CoreRtcWdt = 0x09,
Cpu0Mwdt0 = 0x0B,
Cpu0Sw = 0x0C,
Cpu0RtcWdt = 0x0D,
SysBrownOut = 0x0F,
SysRtcWdt = 0x10,
Cpu0Mwdt1 = 0x11,
SysSuperWdt = 0x12,
SysClkGlitch = 0x13,
CoreEfuseCrc = 0x14,
CoreUsbUart = 0x15,
CoreUsbJtag = 0x16,
CorePwrGlitch = 0x17,
}
#[derive(Debug, Clone, Copy)]
pub(crate) enum RtcFastClock {
RtcFastClockRcFast = 0,
#[allow(dead_code)]
RtcFastClockXtalD2 = 1,
}
impl Clock for RtcFastClock {
fn frequency(&self) -> HertzU32 {
match self {
RtcFastClock::RtcFastClockXtalD2 => HertzU32::Hz(16_000_000),
RtcFastClock::RtcFastClockRcFast => HertzU32::Hz(8_000_000),
}
}
}
extern "C" {
fn ets_delay_us(us: u32);
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) enum RtcSlowClock {
RtcSlowClockRcSlow = 0,
RtcSlowClock32kXtal = 1,
RtcSlowClock32kRc = 2,
RtcSlowOscSlow = 3,
}
impl Clock for RtcSlowClock {
fn frequency(&self) -> HertzU32 {
match self {
RtcSlowClock::RtcSlowClockRcSlow => HertzU32::Hz(150_000),
RtcSlowClock::RtcSlowClock32kXtal => HertzU32::Hz(32_768),
RtcSlowClock::RtcSlowClock32kRc => HertzU32::Hz(32_768),
RtcSlowClock::RtcSlowOscSlow => HertzU32::Hz(32_768),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) enum RtcCalSel {
RtcCalRtcMux = -1,
RtcCalRcSlow = 0,
RtcCal32kXtal = 1,
RtcCal32kRc = 2,
RtcCal32kOscSlow = 3,
RtcCalRcFast,
}
#[derive(Clone)]
pub(crate) enum RtcCaliClkSel {
CaliClkRcSlow = 0,
CaliClkRcFast = 1,
CaliClk32k = 2,
}
pub struct RtcClock;
impl RtcClock {
const CAL_FRACT: u32 = 19;
fn get_xtal_freq() -> XtalClock {
let xtal_freq_reg = unsafe { &*LP_AON::PTR }.store4().read().bits();
let clk_val_is_valid = |val| {
(val & 0xffffu32) == ((val >> 16u32) & 0xffffu32) && val != 0u32 && val != u32::MAX
};
let reg_val_to_clk_val = |val| val & u16::MAX as u32;
if !clk_val_is_valid(xtal_freq_reg) {
return XtalClock::RtcXtalFreq32M;
}
match reg_val_to_clk_val(xtal_freq_reg) {
32 => XtalClock::RtcXtalFreq32M,
other => XtalClock::RtcXtalFreqOther(other),
}
}
fn set_fast_freq(fast_freq: RtcFastClock) {
unsafe {
let lp_clkrst = &*LPWR::PTR;
lp_clkrst.lp_clk_conf().modify(|_, w| {
w.fast_clk_sel().bits(match fast_freq {
RtcFastClock::RtcFastClockRcFast => 0b00,
RtcFastClock::RtcFastClockXtalD2 => 0b01,
})
});
ets_delay_us(3);
}
}
fn set_slow_freq(slow_freq: RtcSlowClock) {
unsafe {
let lp_clkrst = &*LPWR::PTR;
lp_clkrst
.lp_clk_conf()
.modify(|_, w| w.slow_clk_sel().bits(slow_freq as u8));
lp_clkrst.clk_to_hp().modify(|_, w| {
w.icg_hp_xtal32k()
.bit(matches!(slow_freq, RtcSlowClock::RtcSlowClock32kXtal))
.icg_hp_xtal32k()
.bit(matches!(slow_freq, RtcSlowClock::RtcSlowClock32kXtal))
});
}
}
pub(crate) fn get_slow_freq() -> RtcSlowClock {
let lp_clrst = unsafe { &*LPWR::ptr() };
let slow_freq = lp_clrst.lp_clk_conf().read().slow_clk_sel().bits();
match slow_freq {
0 => RtcSlowClock::RtcSlowClockRcSlow,
1 => RtcSlowClock::RtcSlowClock32kXtal,
2 => RtcSlowClock::RtcSlowClock32kRc,
3 => RtcSlowClock::RtcSlowOscSlow,
_ => unreachable!(),
}
}
fn calibrate(cal_clk: RtcCalSel, slowclk_cycles: u32) -> u32 {
let xtal_freq = RtcClock::get_xtal_freq();
let xtal_cycles = RtcClock::calibrate_internal(cal_clk, slowclk_cycles) as u64;
let divider = xtal_freq.mhz() as u64 * slowclk_cycles as u64;
let period_64 = ((xtal_cycles << RtcClock::CAL_FRACT) + divider / 2u64 - 1u64) / divider;
(period_64 & u32::MAX as u64) as u32
}
fn calibrate_internal(mut cal_clk: RtcCalSel, slowclk_cycles: u32) -> u32 {
const SOC_CLK_RC_FAST_FREQ_APPROX: u32 = 17_500_000;
const SOC_CLK_RC_SLOW_FREQ_APPROX: u32 = 136_000;
const SOC_CLK_XTAL32K_FREQ_APPROX: u32 = 32768;
if cal_clk == RtcCalSel::RtcCalRtcMux {
cal_clk = match cal_clk {
RtcCalSel::RtcCalRtcMux => match RtcClock::get_slow_freq() {
RtcSlowClock::RtcSlowClock32kXtal => RtcCalSel::RtcCal32kXtal,
RtcSlowClock::RtcSlowClock32kRc => RtcCalSel::RtcCal32kRc,
_ => cal_clk,
},
RtcCalSel::RtcCal32kOscSlow => RtcCalSel::RtcCalRtcMux,
_ => cal_clk,
};
}
let lp_clkrst = unsafe { &*LPWR::ptr() };
let pcr = unsafe { &*PCR::ptr() };
let pmu = unsafe { &*PMU::ptr() };
let clk_src = RtcClock::get_slow_freq();
if cal_clk == RtcCalSel::RtcCalRtcMux {
cal_clk = match clk_src {
RtcSlowClock::RtcSlowClockRcSlow => RtcCalSel::RtcCalRcSlow,
RtcSlowClock::RtcSlowClock32kXtal => RtcCalSel::RtcCal32kXtal,
RtcSlowClock::RtcSlowClock32kRc => RtcCalSel::RtcCal32kRc,
RtcSlowClock::RtcSlowOscSlow => RtcCalSel::RtcCal32kOscSlow,
};
}
let cali_clk_sel;
if cal_clk == RtcCalSel::RtcCalRtcMux {
cal_clk = match clk_src {
RtcSlowClock::RtcSlowClockRcSlow => RtcCalSel::RtcCalRcSlow,
RtcSlowClock::RtcSlowClock32kXtal => RtcCalSel::RtcCal32kXtal,
RtcSlowClock::RtcSlowClock32kRc => RtcCalSel::RtcCal32kRc,
RtcSlowClock::RtcSlowOscSlow => RtcCalSel::RtcCalRcSlow,
}
}
if cal_clk == RtcCalSel::RtcCalRcFast {
cali_clk_sel = RtcCaliClkSel::CaliClkRcFast;
} else if cal_clk == RtcCalSel::RtcCalRcSlow {
cali_clk_sel = RtcCaliClkSel::CaliClkRcSlow;
} else {
cali_clk_sel = RtcCaliClkSel::CaliClk32k;
match cal_clk {
RtcCalSel::RtcCalRtcMux | RtcCalSel::RtcCalRcSlow | RtcCalSel::RtcCalRcFast => (),
RtcCalSel::RtcCal32kRc => pcr
.ctrl_32k_conf()
.modify(|_, w| unsafe { w.clk_32k_sel().bits(0) }),
RtcCalSel::RtcCal32kXtal => pcr
.ctrl_32k_conf()
.modify(|_, w| unsafe { w.clk_32k_sel().bits(1) }),
RtcCalSel::RtcCal32kOscSlow => pcr
.ctrl_32k_conf()
.modify(|_, w| unsafe { w.clk_32k_sel().bits(2) }),
}
}
let dig_32k_xtal_enabled = lp_clkrst.clk_to_hp().read().icg_hp_xtal32k().bit_is_set();
if cal_clk == RtcCalSel::RtcCal32kXtal && !dig_32k_xtal_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_xtal32k().set_bit());
}
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_xtal32k().set_bit());
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_xtal32k().set_bit());
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_rc32k().set_bit());
let rc_fast_enabled = pmu
.hp_sleep_lp_ck_power()
.read()
.hp_sleep_xpd_fosc_clk()
.bit_is_set();
let dig_rc_fast_enabled = lp_clkrst.clk_to_hp().read().icg_hp_fosc().bit_is_set();
if cal_clk == RtcCalSel::RtcCalRcFast {
if !rc_fast_enabled {
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_fosc_clk().set_bit());
unsafe {
ets_delay_us(50);
}
}
if !dig_rc_fast_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_fosc().set_bit());
unsafe {
ets_delay_us(5);
}
}
}
let rc32k_enabled = pmu
.hp_sleep_lp_ck_power()
.read()
.hp_sleep_xpd_rc32k()
.bit_is_set();
let dig_rc32k_enabled = lp_clkrst.clk_to_hp().read().icg_hp_osc32k().bit_is_set();
if cal_clk == RtcCalSel::RtcCal32kRc {
if !rc32k_enabled {
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_rc32k().set_bit());
unsafe {
ets_delay_us(300);
}
}
if !dig_rc32k_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_osc32k().set_bit());
}
}
let timg0 = unsafe { &*TIMG0::ptr() };
if timg0
.rtccalicfg()
.read()
.rtc_cali_start_cycling()
.bit_is_set()
{
timg0
.rtccalicfg2()
.modify(|_, w| unsafe { w.rtc_cali_timeout_thres().bits(1) });
while !timg0.rtccalicfg().read().rtc_cali_rdy().bit_is_set()
&& !timg0.rtccalicfg2().read().rtc_cali_timeout().bit_is_set()
{}
}
timg0
.rtccalicfg()
.modify(|_, w| unsafe { w.rtc_cali_clk_sel().bits(cali_clk_sel.clone() as u8) });
timg0
.rtccalicfg()
.modify(|_, w| w.rtc_cali_start_cycling().clear_bit());
timg0
.rtccalicfg()
.modify(|_, w| unsafe { w.rtc_cali_max().bits(slowclk_cycles as u16) });
let expected_freq = match cali_clk_sel {
RtcCaliClkSel::CaliClk32k => {
timg0.rtccalicfg2().modify(|_, w| unsafe {
w.rtc_cali_timeout_thres().bits(slowclk_cycles << 12)
});
SOC_CLK_XTAL32K_FREQ_APPROX
}
RtcCaliClkSel::CaliClkRcFast => {
timg0
.rtccalicfg2()
.modify(|_, w| unsafe { w.rtc_cali_timeout_thres().bits(0x01FFFFFF) });
SOC_CLK_RC_FAST_FREQ_APPROX
}
_ => {
timg0.rtccalicfg2().modify(|_, w| unsafe {
w.rtc_cali_timeout_thres().bits(slowclk_cycles << 10)
});
SOC_CLK_RC_SLOW_FREQ_APPROX
}
};
let us_time_estimate = (HertzU32::MHz(slowclk_cycles) / expected_freq).to_Hz();
timg0
.rtccalicfg()
.modify(|_, w| w.rtc_cali_start().clear_bit());
timg0
.rtccalicfg()
.modify(|_, w| w.rtc_cali_start().set_bit());
unsafe {
ets_delay_us(us_time_estimate);
}
let cal_val = loop {
if timg0.rtccalicfg().read().rtc_cali_rdy().bit_is_set() {
break timg0.rtccalicfg1().read().rtc_cali_value().bits();
}
if timg0.rtccalicfg2().read().rtc_cali_timeout().bit_is_set() {
break 0;
}
};
timg0
.rtccalicfg()
.modify(|_, w| w.rtc_cali_start().clear_bit());
if cal_clk == RtcCalSel::RtcCal32kXtal && !dig_32k_xtal_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_xtal32k().clear_bit());
}
if cal_clk == RtcCalSel::RtcCalRcFast {
if rc_fast_enabled {
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_fosc_clk().set_bit());
unsafe {
ets_delay_us(50);
}
}
if dig_rc_fast_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_fosc().set_bit());
unsafe {
ets_delay_us(5);
}
}
}
if cal_clk == RtcCalSel::RtcCal32kRc {
if rc32k_enabled {
pmu.hp_sleep_lp_ck_power()
.modify(|_, w| w.hp_sleep_xpd_rc32k().set_bit());
unsafe {
ets_delay_us(300);
}
}
if dig_rc32k_enabled {
lp_clkrst
.clk_to_hp()
.modify(|_, w| w.icg_hp_osc32k().set_bit());
}
}
cal_val
}
pub(crate) fn cycles_to_1ms() -> u16 {
let period_13q19 = RtcClock::calibrate(
match RtcClock::get_slow_freq() {
RtcSlowClock::RtcSlowClockRcSlow => RtcCalSel::RtcCalRtcMux,
RtcSlowClock::RtcSlowClock32kXtal => RtcCalSel::RtcCal32kXtal,
RtcSlowClock::RtcSlowClock32kRc => RtcCalSel::RtcCal32kRc,
RtcSlowClock::RtcSlowOscSlow => RtcCalSel::RtcCal32kOscSlow,
},
1024,
);
let period = (100_000_000 * period_13q19 as u64) / (1 << RtcClock::CAL_FRACT);
(100_000_000 * 1000 / period) as u16
}
pub(crate) fn estimate_xtal_frequency() -> u32 {
let timg0 = unsafe { crate::peripherals::TIMG0::steal() };
while timg0.rtccalicfg().read().rtc_cali_rdy().bit_is_clear() {}
timg0.rtccalicfg().modify(|_, w| {
w.rtc_cali_clk_sel()
.variant(0) .rtc_cali_max()
.variant(100)
.rtc_cali_start_cycling()
.clear_bit()
.rtc_cali_start()
.set_bit()
});
while timg0.rtccalicfg().read().rtc_cali_rdy().bit_is_clear() {}
(timg0.rtccalicfg1().read().rtc_cali_value().bits()
* (RtcSlowClock::RtcSlowClockRcSlow.frequency().to_Hz() / 100))
/ 1_000_000
}
}