tudelft_quadrupel/
time.rs1use core::arch::asm;
2use core::ops::Sub;
3use core::sync::atomic::{AtomicBool, AtomicU32, Ordering};
4use core::time::Duration;
5use nrf51_pac::interrupt;
6
7use crate::mutex::Mutex;
8use crate::once_cell::OnceCell;
9pub use cortex_m::asm::delay as assembly_delay;
12use cortex_m::peripheral::NVIC;
13use nrf51_hal::rtc::{RtcCompareReg, RtcInterrupt};
14use nrf51_hal::Rtc;
15use nrf51_pac::RTC0;
16
17#[derive(Debug, Copy, Clone)]
19pub struct Instant {
20 time: u64,
21}
22
23impl Instant {
24 #[must_use]
26 pub fn now() -> Self {
27 Self {
28 time: get_time_ns(),
29 }
30 }
31
32 #[must_use]
39 pub fn duration_since(self, other: Self) -> Duration {
40 assert!(self.time >= other.time);
41 Duration::from_nanos(self.time - other.time)
42 }
43
44 #[must_use]
46 pub fn add_duration(self, d: Duration) -> Self {
47 Self {
48 time: self.time + d.as_nanos() as u64,
49 }
50 }
51
52 #[must_use]
56 pub fn is_later_than(self, other: Self) -> bool {
57 self.time > other.time
58 }
59
60 #[must_use]
63 pub fn ns_since_start(&self) -> u64 {
64 self.time
65 }
66}
67
68impl Sub<Self> for Instant {
69 type Output = Duration;
70
71 fn sub(self, rhs: Self) -> Self::Output {
73 self.duration_since(rhs)
74 }
75}
76
77impl Eq for Instant {}
78
79impl PartialEq<Self> for Instant {
80 fn eq(&self, other: &Self) -> bool {
81 self.time == other.time
82 }
83}
84
85impl PartialOrd<Self> for Instant {
86 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
87 Some(self.cmp(other))
88 }
89}
90
91impl Ord for Instant {
92 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
93 self.time.cmp(&other.time)
94 }
95}
96
97static RTC: Mutex<OnceCell<Rtc<RTC0>>> = Mutex::new(OnceCell::uninitialized());
99
100const PRESCALER: u32 = 0;
103const PERIOD: u64 = 30517;
105const COUNTER_MAX: u32 = 1 << 24;
107
108static TIMER_FLAG: AtomicBool = AtomicBool::new(false);
111
112static GLOBAL_TIME: Mutex<u64> = Mutex::new(0);
115
116static PREV_COUNTER: AtomicU32 = AtomicU32::new(0);
118
119static COUNTER_PERIOD: AtomicU32 = AtomicU32::new(0);
121
122pub(crate) fn initialize(clock_instance: RTC0, nvic: &mut NVIC) {
123 RTC.modify(|rtc| {
124 rtc.initialize(Rtc::new(clock_instance, PRESCALER).unwrap());
125 rtc.enable_event(RtcInterrupt::Compare0);
126 rtc.enable_interrupt(RtcInterrupt::Compare0, Some(nvic));
127 rtc.enable_counter();
128 });
129}
130
131fn get_time_ns() -> u64 {
133 GLOBAL_TIME.modify(|global_time| {
134 let counter = RTC.modify(|counter| counter.get_counter());
135
136 let prev_counter = PREV_COUNTER.load(Ordering::SeqCst);
138
139 (*global_time + u64::from(counter_diff(prev_counter, counter))) * PERIOD
141 })
142}
143
144fn counter_diff(prev: u32, curr: u32) -> u32 {
147 if curr < prev {
148 (COUNTER_MAX - prev) + curr
151 } else {
152 curr - prev
153 }
154}
155
156#[interrupt]
157unsafe fn RTC0() {
158 let rtc = RTC.no_critical_section_lock_mut();
160 let global_time = GLOBAL_TIME.no_critical_section_lock_mut();
162
163 if rtc.is_event_triggered(RtcInterrupt::Compare0) {
164 let counter = rtc.get_counter();
165 let prev_counter = PREV_COUNTER.load(Ordering::SeqCst);
166
167 *global_time += u64::from(counter_diff(prev_counter, counter));
168 PREV_COUNTER.store(counter, Ordering::SeqCst);
169
170 let mut new_counter = counter + COUNTER_PERIOD.load(Ordering::SeqCst);
171 if new_counter >= COUNTER_MAX {
172 new_counter -= COUNTER_MAX;
173 }
174
175 rtc.set_compare(RtcCompareReg::Compare0, new_counter)
176 .unwrap();
177 rtc.reset_event(RtcInterrupt::Compare0);
178 TIMER_FLAG.store(true, Ordering::SeqCst);
179 }
180}
181
182pub fn wait_for_next_tick() {
184 if RTC.modify(|rtc| {
185 if rtc.is_event_triggered(RtcInterrupt::Compare0) {
186 TIMER_FLAG.store(false, Ordering::SeqCst);
188 true
189 } else {
190 false
191 }
192 }) {
193 return;
194 }
195
196 while !TIMER_FLAG.load(Ordering::SeqCst) {
197 cortex_m::asm::wfi();
198 }
199 TIMER_FLAG.store(false, Ordering::SeqCst);
200}
201
202#[allow(clippy::missing_panics_doc)]
207pub fn set_tick_frequency(hz: u64) {
208 RTC.modify(|rtc| {
209 let counter_setting = (1_000_000_000 / hz) / PERIOD;
210 debug_assert!(counter_setting < (1 << 24), "counter period should be less than 1<<24 (roughly 6 minutes with the default PRESCALER settings)");
212
213 COUNTER_PERIOD.store(counter_setting as u32, Ordering::SeqCst);
214
215 rtc.set_compare(RtcCompareReg::Compare0, counter_setting as u32)
216 .unwrap();
217 rtc.clear_counter();
218 PREV_COUNTER.store(0, Ordering::SeqCst);
219 });
220
221 delay_us_assembly(500);
223}
224
225#[allow(unused_assignments)]
228pub fn delay_us_assembly(mut number_of_us: u32) {
229 unsafe {
230 asm!(
231 "1:",
232 "subs {}, #1",
233 "nop",
234 "nop",
235 "nop",
236 "nop",
237 "nop",
238 "nop",
239 "nop",
240 "nop",
241 "nop",
242 "nop",
243 "nop",
244 "nop",
245 "bne 1b",
246 inout(reg) number_of_us,
247 options(nomem, nostack)
248 );
249 }
250}
251
252pub fn delay_ms_assembly(number_of_ms: u32) {
255 for _ in 0..number_of_ms {
256 delay_us_assembly(999);
257 }
258}