persistent_buff/lib.rs
1//! [](https://crates.io/crates/persistent-buff) [](https://docs.rs/persistent-buff)
2//!
3//! A buffer that persists between boot.
4//! Inspired by [panic-persist](https://github.com/jamesmunns/panic-persist)
5//!
6//! A region in RAM is reseved for this buffer.
7//! Your linker script should make sure the start and end of the buffer are outside of other sections
8//!
9//! ## Usage
10//!
11//! ### Linker script
12//! You need to create a new reserved section for the buffer and make sure it's
13//! outside of other sections to avoid zero initializations.
14//!
15//! #### Example
16//! `memory.x` file before modification:
17//!
18//! ```text
19//! MEMORY
20//! {
21//! /* NOTE 1 K = 1 KiBi = 1024 bytes */
22//! FLASH : ORIGIN = 0x00000000, LENGTH = 1024K
23//! RAM : ORIGIN = 0x20000000, LENGTH = 128K
24//! }
25//! ```
26//!
27//! `memory.x` file after modification to hold a 1K region:
28//! ```text
29//! MEMORY
30//! {
31//! /* NOTE 1 K = 1 KiBi = 1024 bytes */
32//! FLASH : ORIGIN = 0x00000000, LENGTH = 1024K
33//! RAM : ORIGIN = 0x20000000, LENGTH = 128K - 1K
34//! PERSISTENT_BUFF: ORIGIN = ORIGIN(RAM) + LENGTH(RAM), LENGTH = 1K
35//! }
36//! _persistent_buff_start = ORIGIN(PERSISTENT_BUFF);
37//! _persistent_buff_end = ORIGIN(PERSISTENT_BUFF) + LENGTH(PERSISTENT_BUFF);
38//! ```
39//!
40//! ### Program
41//!
42//! ```ignore
43//! #![no_std]
44//!
45//! #[entry]
46//! fn main() -> ! {
47//! let mut pbuff = persistent_buff::PersistentBuff::take_managed().unwrap();
48//!
49//! // Trivial way to initialize is to fill it with 0
50//! let buff = pbuff.validate(|b| b.fill(0));
51//!
52//! buff[0] = (buff[0] % 255) + 1;
53//! info!("Value is now {}", buff[0]);
54//! }
55//! ```
56//!
57//! ## License
58//! Licensed under either of
59//! - Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or
60//! <http://www.apache.org/licenses/LICENSE-2.0>)
61//!
62//! - MIT license ([LICENSE-MIT](LICENSE-MIT) or <http://opensource.org/licenses/MIT>)
63//!
64//! at your option.
65//!
66//! ## Contribution
67//! Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
68
69#![no_std]
70#![no_main]
71#![deny(missing_docs)]
72
73use core::sync::atomic::{AtomicBool, Ordering};
74
75const MAGIC_NUMBER: u32 = 0xFAB42069;
76static mut PERSISTENT_BUFF_TAKEN: AtomicBool = AtomicBool::new(false);
77
78/// Strut to request the persistent buff and manage it somewhat "safely".
79/// When acquiring the buffer you need to validate/init it to a known sate.
80pub struct PersistentBuff {
81 magic: *mut u32,
82 buff: &'static mut [u8],
83}
84
85impl PersistentBuff {
86 /// Take a managed version fo the persistent buff.
87 /// Allows to check if the buffer is valid or not before usage.
88 /// Note that compared to the [Self::take] function, you will lose some bytes for storage of the marker.
89 pub fn take_managed() -> Option<Self> {
90 Self::take_raw().map(|b| Self {
91 magic: b.as_mut_ptr().cast::<u32>(),
92 buff: &mut b[core::mem::size_of::<u32>()..],
93 })
94 }
95
96 /// Steal a managed version for the persistent buff without check.
97 /// See [Self::take_managed]
98 ///
99 /// # Safety
100 /// Calling this function could allow to have two mutable reference to the same buffer.
101 /// Make sure to only have one reference at a time to avoid multiple mutable reference.
102 pub unsafe fn steal_managed() -> Self {
103 let b = Self::steal();
104 Self {
105 magic: b.as_mut_ptr().cast::<u32>(),
106 buff: &mut b[core::mem::size_of::<u32>()..],
107 }
108 }
109
110 /// Get the raw persistent slice.
111 pub fn take_raw() -> Option<&'static mut [u8]> {
112 unsafe {
113 if PERSISTENT_BUFF_TAKEN.swap(true, Ordering::Relaxed) {
114 None
115 } else {
116 Some(Self::steal())
117 }
118 }
119 }
120
121 /// Steal the raw persistent slice.
122 /// Ignore if it was already taken or not.
123 ///
124 /// # Safety
125 /// Calling this function could allow to have two mutable reference to the same buffer.
126 /// Make sure to only have one reference at a time to avoid multiple mutable reference.
127 pub unsafe fn steal() -> &'static mut [u8] {
128 PERSISTENT_BUFF_TAKEN.store(true, Ordering::SeqCst);
129 extern "C" {
130 static mut _persistent_buff_start: u8;
131 static mut _persistent_buff_end: u8;
132 }
133 let start = &mut _persistent_buff_start as *mut u8;
134 let end = &mut _persistent_buff_end as *mut u8;
135 let len = end as usize - start as usize;
136
137 core::slice::from_raw_parts_mut(start, len)
138 }
139
140 /// Mark the persistent buffer with valid data in it.
141 fn mark(&mut self) {
142 unsafe {
143 self.magic.write_unaligned(MAGIC_NUMBER);
144 }
145 }
146
147 /// Unmark the persistent buffer with valid data in it.
148 fn unmark(&mut self) {
149 unsafe {
150 self.magic.write_unaligned(0);
151 }
152 }
153
154 /// Verify if the persistent buffer has valid data in it.
155 pub fn valid(&self) -> bool {
156 unsafe { self.magic.read_unaligned() == MAGIC_NUMBER }
157 }
158
159 /// Take the static internal buffer from the managed buff if valid
160 pub fn take(self) -> Option<&'static mut [u8]> {
161 if self.valid() {
162 return Some(self.buff);
163 } else {
164 return None;
165 }
166 }
167
168 /// Force to reset the buffer to a known state via the closure and mark as valid for next boot then
169 /// takes the static buff from the managed buff
170 pub fn take_reset<F>(mut self, f: F) -> &'static mut [u8]
171 where
172 F: FnOnce(&mut [u8]),
173 {
174 f(self.buff);
175 self.mark();
176 self.buff
177 }
178
179 /// Check if the buffer is valid, if not call the provided closure.
180 /// Then mark the buffer as valid and initialize it to a known state.
181 /// This is to make sure the data in it is always "valid" and not garbage after a powerloss.
182 /// Then the static buff is taken from the managed buff
183 pub fn take_validate<F>(mut self, f: F) -> &'static mut [u8]
184 where
185 F: FnOnce(&mut [u8]),
186 {
187 if !self.valid() {
188 f(self.buff)
189 }
190 self.mark();
191 self.buff
192 }
193
194 /// Get the buffer if the data is valid, if not, return None
195 pub fn get(&mut self) -> Option<&mut [u8]> {
196 if self.valid() {
197 return Some(self.buff);
198 } else {
199 return None;
200 }
201 }
202
203 /// Force reset the buffer to a known state via the closure, mark as valid and return the buffer
204 pub fn reset<F>(&mut self, f: F) -> &mut [u8]
205 where
206 F: FnOnce(&mut [u8]),
207 {
208 f(self.buff);
209 self.mark();
210 self.buff
211 }
212
213 /// Check if the buffer is valid, if not call the provided closure.
214 /// Then mark the buffer as valid.
215 /// This is to make sure the data in it is always "valid" and not garbage after a powerloss.
216 pub fn validate<F>(&mut self, f: F) -> &mut [u8]
217 where
218 F: FnOnce(&mut [u8]),
219 {
220 if !self.valid() {
221 f(self.buff)
222 }
223 self.mark();
224 self.buff
225 }
226
227 /// Mark the buffer as invalid
228 pub fn invalidate(&mut self) {
229 self.unmark();
230 }
231}