1#![cfg_attr(not(any(test, doctest, feature = "std")), no_std)]
2#![warn(missing_docs)]
3#![doc = include_str!("../README.md")]
4#![allow(clippy::cast_possible_truncation)]
5
6use core::num::NonZeroUsize;
7use core::{
8 fmt::Debug,
9 marker::PhantomData,
10 ops::{Deref, DerefMut, Range},
11};
12use embedded_storage_async::nor_flash::NorFlash;
13use map::SerializationError;
14
15#[cfg(feature = "alloc")]
16mod alloc_impl;
17#[cfg(feature = "arrayvec")]
18mod arrayvec_impl;
19pub mod cache;
20#[cfg(feature = "heapless-09")]
21mod heapless_09_impl;
22#[cfg(feature = "heapless")]
23mod heapless_impl;
24mod item;
25pub mod map;
26pub mod queue;
27
28#[cfg(any(test, doctest, feature = "_test"))]
29pub mod mock_flash;
31
32const MAX_WORD_SIZE: usize = 32;
37
38struct GenericStorage<S: NorFlash, C: CacheImpl> {
47 flash: S,
48 flash_range: Range<u32>,
49 cache: C,
50}
51
52impl<S: NorFlash, C: CacheImpl> GenericStorage<S, C> {
53 pub async fn erase_all(&mut self) -> Result<(), Error<S::Error>> {
57 self.flash
58 .erase(self.flash_range.start, self.flash_range.end)
59 .await
60 .map_err(|e| Error::Storage {
61 value: e,
62 #[cfg(feature = "_test")]
63 backtrace: std::backtrace::Backtrace::capture(),
64 })
65 }
66
67 #[must_use]
72 pub const fn item_overhead_size() -> u32 {
73 item::ItemHeader::data_address::<S>(0)
74 }
75
76 async fn try_general_repair(&mut self) -> Result<(), Error<S::Error>> {
77 for page_index in self.get_pages(0) {
80 if matches!(
81 self.get_page_state(page_index).await,
82 Err(Error::Corrupted { .. })
83 ) {
84 self.open_page(page_index).await?;
85 }
86 }
87
88 #[cfg(fuzzing_repro)]
89 eprintln!("General repair has been called");
90
91 Ok(())
92 }
93
94 async fn find_first_page(
98 &mut self,
99 starting_page_index: usize,
100 page_state: PageState,
101 ) -> Result<Option<usize>, Error<S::Error>> {
102 for page_index in self.get_pages(starting_page_index) {
103 if page_state == self.get_page_state(page_index).await? {
104 return Ok(Some(page_index));
105 }
106 }
107
108 Ok(None)
109 }
110
111 fn page_count(&self) -> NonZeroUsize {
112 let page_count = self.flash_range.len() / S::ERASE_SIZE;
113 NonZeroUsize::new(page_count.max(1)).unwrap()
115 }
116
117 fn get_pages(
119 &self,
120 starting_page_index: usize,
121 ) -> impl DoubleEndedIterator<Item = usize> + use<S, C> {
122 let page_count = self.page_count();
123 (0..page_count.get()).map(move |index| (index + starting_page_index) % page_count)
124 }
125
126 fn next_page(&self, page_index: usize) -> usize {
128 let page_count = self.page_count();
129 (page_index + 1) % page_count
130 }
131
132 fn previous_page(&self, page_index: usize) -> usize {
134 let page_count = self.page_count();
135
136 match page_index.checked_sub(1) {
137 Some(new_page_index) => new_page_index,
138 None => page_count.get() - 1,
139 }
140 }
141
142 async fn get_page_state(&mut self, page_index: usize) -> Result<PageState, Error<S::Error>> {
144 if let Some(cached_page_state) = self.cache.get_page_state(page_index) {
145 return Ok(cached_page_state);
146 }
147
148 let page_address = calculate_page_address::<S>(self.flash_range.clone(), page_index);
149 const HALF_MARKER_BITS: u32 = 4;
153
154 let mut buffer = [0; MAX_WORD_SIZE];
155 self.flash
156 .read(page_address, &mut buffer[..S::READ_SIZE])
157 .await
158 .map_err(|e| Error::Storage {
159 value: e,
160 #[cfg(feature = "_test")]
161 backtrace: std::backtrace::Backtrace::capture(),
162 })?;
163 let start_marked = buffer[..S::READ_SIZE]
164 .iter()
165 .map(|marker_byte| marker_byte.count_zeros())
166 .sum::<u32>()
167 >= HALF_MARKER_BITS;
168
169 self.flash
170 .read(
171 page_address + (S::ERASE_SIZE - S::READ_SIZE) as u32,
172 &mut buffer[..S::READ_SIZE],
173 )
174 .await
175 .map_err(|e| Error::Storage {
176 value: e,
177 #[cfg(feature = "_test")]
178 backtrace: std::backtrace::Backtrace::capture(),
179 })?;
180 let end_marked = buffer[..S::READ_SIZE]
181 .iter()
182 .map(|marker_byte| marker_byte.count_zeros())
183 .sum::<u32>()
184 >= HALF_MARKER_BITS;
185
186 let discovered_state = match (start_marked, end_marked) {
187 (true, true) => PageState::Closed,
188 (true, false) => PageState::PartialOpen,
189 (false, true) => {
191 return Err(Error::Corrupted {
192 #[cfg(feature = "_test")]
193 backtrace: std::backtrace::Backtrace::capture(),
194 });
195 }
196 (false, false) => PageState::Open,
197 };
198
199 self.cache
201 .notice_page_state(page_index, discovered_state, false);
202
203 Ok(discovered_state)
204 }
205
206 async fn open_page(&mut self, page_index: usize) -> Result<(), Error<S::Error>> {
208 self.cache
209 .notice_page_state(page_index, PageState::Open, true);
210
211 let page_address = calculate_page_address::<S>(self.flash_range.clone(), page_index);
212 let page_end_address =
213 calculate_page_end_address::<S>(self.flash_range.clone(), page_index);
214
215 self.flash
216 .erase(page_address, page_end_address)
217 .await
218 .map_err(|e| Error::Storage {
219 value: e,
220 #[cfg(feature = "_test")]
221 backtrace: std::backtrace::Backtrace::capture(),
222 })?;
223
224 Ok(())
225 }
226
227 async fn close_page(&mut self, page_index: usize) -> Result<(), Error<S::Error>> {
229 let current_state = self.partial_close_page(page_index).await?;
230
231 if current_state != PageState::PartialOpen {
232 return Ok(());
233 }
234
235 self.cache
236 .notice_page_state(page_index, PageState::Closed, true);
237
238 let buffer = AlignedBuf([MARKER; MAX_WORD_SIZE]);
239 let page_end_address =
240 calculate_page_end_address::<S>(self.flash_range.clone(), page_index)
241 - S::WORD_SIZE as u32;
242 self.flash
244 .write(page_end_address, &buffer[..S::WORD_SIZE])
245 .await
246 .map_err(|e| Error::Storage {
247 value: e,
248 #[cfg(feature = "_test")]
249 backtrace: std::backtrace::Backtrace::capture(),
250 })?;
251
252 Ok(())
253 }
254
255 async fn partial_close_page(
257 &mut self,
258 page_index: usize,
259 ) -> Result<PageState, Error<S::Error>> {
260 let current_state = self.get_page_state(page_index).await?;
261
262 if current_state != PageState::Open {
263 return Ok(current_state);
264 }
265
266 let new_state = match current_state {
267 PageState::Closed => PageState::Closed,
268 PageState::PartialOpen | PageState::Open => PageState::PartialOpen,
269 };
270
271 self.cache.notice_page_state(page_index, new_state, true);
272
273 let buffer = AlignedBuf([MARKER; MAX_WORD_SIZE]);
274 let page_start_address = calculate_page_address::<S>(self.flash_range.clone(), page_index);
275 self.flash
277 .write(page_start_address, &buffer[..S::WORD_SIZE])
278 .await
279 .map_err(|e| Error::Storage {
280 value: e,
281 #[cfg(feature = "_test")]
282 backtrace: std::backtrace::Backtrace::capture(),
283 })?;
284
285 Ok(new_state)
286 }
287
288 #[cfg(any(test, feature = "std"))]
289 pub async fn print_items(&mut self) -> String {
291 use crate::NorFlashExt;
292 use std::fmt::Write;
293
294 let mut buf = [0; 1024 * 16];
295
296 let mut s = String::new();
297
298 writeln!(s, "Items in flash:").unwrap();
299
300 for page_index in self.get_pages(0) {
301 writeln!(
302 s,
303 " Page {page_index} ({}):",
304 match self.get_page_state(page_index).await {
305 Ok(value) => format!("{value:?}"),
306 Err(e) => format!("Error ({e:?})"),
307 }
308 )
309 .unwrap();
310 let page_data_start =
311 crate::calculate_page_address::<S>(self.flash_range.clone(), page_index)
312 + S::WORD_SIZE as u32;
313 let page_data_end =
314 crate::calculate_page_end_address::<S>(self.flash_range.clone(), page_index)
315 - S::WORD_SIZE as u32;
316
317 let mut it = crate::item::ItemHeaderIter::new(page_data_start, page_data_end);
318 while let (Some(header), item_address) =
319 it.traverse(&mut self.flash, |_, _| false).await.unwrap()
320 {
321 let next_item_address = header.next_item_address::<S>(item_address);
322 let maybe_item = match header
323 .read_item(&mut self.flash, &mut buf, item_address, page_data_end)
324 .await
325 {
326 Ok(maybe_item) => maybe_item,
327 Err(e) => {
328 writeln!(
329 s,
330 " Item COULD NOT BE READ at {item_address}..{next_item_address}"
331 )
332 .unwrap();
333
334 println!("{s}");
335 panic!("{e:?}");
336 }
337 };
338
339 writeln!(
340 s,
341 " Item {maybe_item:?} at {item_address}..{next_item_address}"
342 )
343 .unwrap();
344 }
345 }
346
347 s
348 }
349
350 pub fn destroy(self) -> (S, C) {
354 (self.flash, self.cache)
355 }
356
357 pub const fn flash(&mut self) -> &mut S {
359 &mut self.flash
360 }
361
362 pub const fn flash_range(&self) -> Range<u32> {
364 self.flash_range.start..self.flash_range.end
365 }
366}
367
368const fn round_up_to_alignment<S: NorFlash>(value: u32) -> u32 {
371 value.next_multiple_of(S::WORD_SIZE as u32)
372}
373
374const fn round_up_to_alignment_usize<S: NorFlash>(value: usize) -> usize {
377 value.next_multiple_of(S::WORD_SIZE)
378}
379
380const fn round_down_to_alignment<S: NorFlash>(value: u32) -> u32 {
383 let alignment = S::WORD_SIZE as u32;
384 (value / alignment) * alignment
385}
386
387const fn round_down_to_alignment_usize<S: NorFlash>(value: usize) -> usize {
390 round_down_to_alignment::<S>(value as u32) as usize
391}
392
393const fn calculate_page_address<S: NorFlash>(flash_range: Range<u32>, page_index: usize) -> u32 {
395 flash_range.start + (S::ERASE_SIZE * page_index) as u32
396}
397
398const fn calculate_page_end_address<S: NorFlash>(
400 flash_range: Range<u32>,
401 page_index: usize,
402) -> u32 {
403 flash_range.start + (S::ERASE_SIZE * (page_index + 1)) as u32
404}
405
406const fn calculate_page_index<S: NorFlash>(flash_range: Range<u32>, address: u32) -> usize {
408 (address - flash_range.start) as usize / S::ERASE_SIZE
409}
410
411const fn calculate_page_size<S: NorFlash>() -> usize {
412 S::ERASE_SIZE - S::WORD_SIZE * 2
414}
415
416const MARKER: u8 = 0;
418
419#[derive(Debug, Clone, Copy, PartialEq, Eq)]
421#[cfg_attr(feature = "defmt", derive(defmt::Format))]
422enum PageState {
423 Closed,
425 PartialOpen,
427 Open,
429}
430
431#[allow(dead_code)]
432impl PageState {
433 #[must_use]
437 fn is_closed(self) -> bool {
438 matches!(self, Self::Closed)
439 }
440
441 #[must_use]
445 fn is_partial_open(self) -> bool {
446 matches!(self, Self::PartialOpen)
447 }
448
449 #[must_use]
453 fn is_open(self) -> bool {
454 matches!(self, Self::Open)
455 }
456}
457
458#[non_exhaustive]
460#[derive(Debug)]
461#[cfg_attr(feature = "defmt", derive(defmt::Format))]
462pub enum Error<S> {
463 Storage {
465 value: S,
467 #[cfg(feature = "_test")]
468 backtrace: std::backtrace::Backtrace,
470 },
471 FullStorage,
473 Corrupted {
476 #[cfg(feature = "_test")]
477 backtrace: std::backtrace::Backtrace,
479 },
480 LogicBug {
483 #[cfg(feature = "_test")]
484 backtrace: std::backtrace::Backtrace,
486 },
487 BufferTooBig,
489 BufferTooSmall(usize),
491 SerializationError(SerializationError),
493 ItemTooBig,
498}
499
500impl<S> From<SerializationError> for Error<S> {
501 fn from(v: SerializationError) -> Self {
502 Self::SerializationError(v)
503 }
504}
505
506impl<S: PartialEq> PartialEq for Error<S> {
507 fn eq(&self, other: &Self) -> bool {
508 match (self, other) {
509 (Self::Storage { value: l_value, .. }, Self::Storage { value: r_value, .. }) => {
510 l_value == r_value
511 }
512 (Self::BufferTooSmall(l0), Self::BufferTooSmall(r0)) => l0 == r0,
513 _ => core::mem::discriminant(self) == core::mem::discriminant(other),
514 }
515 }
516}
517
518impl<S> core::fmt::Display for Error<S>
519where
520 S: core::fmt::Display,
521{
522 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
523 match self {
524 Error::Storage { value, .. } => write!(f, "Storage error: {value}"),
525 Error::FullStorage => write!(f, "Storage is full"),
526 #[cfg(not(feature = "_test"))]
527 Error::Corrupted { .. } => write!(f, "Storage is corrupted"),
528 #[cfg(feature = "_test")]
529 Error::Corrupted { backtrace } => write!(f, "Storage is corrupted\n{backtrace}"),
530 #[cfg(not(feature = "_test"))]
531 Error::LogicBug { .. } => write!(f, "Logic bug"),
532 #[cfg(feature = "_test")]
533 Error::LogicBug { backtrace } => write!(f, "Logic bug\n{backtrace}"),
534 Error::BufferTooBig => write!(f, "A provided buffer was to big to be used"),
535 Error::BufferTooSmall(needed) => write!(
536 f,
537 "A provided buffer was to small to be used. Needed was {needed}"
538 ),
539 Error::SerializationError(value) => write!(f, "Map value error: {value}"),
540 Error::ItemTooBig => write!(f, "The item is too big to fit in the flash"),
541 }
542 }
543}
544
545impl<S> core::error::Error for Error<S> where S: core::fmt::Display + core::fmt::Debug {}
546
547#[repr(align(4))]
549pub(crate) struct AlignedBuf<const SIZE: usize>(pub(crate) [u8; SIZE]);
550impl<const SIZE: usize> Deref for AlignedBuf<SIZE> {
551 type Target = [u8];
552 fn deref(&self) -> &Self::Target {
553 &self.0
554 }
555}
556
557impl<const SIZE: usize> DerefMut for AlignedBuf<SIZE> {
558 fn deref_mut(&mut self) -> &mut Self::Target {
559 &mut self.0
560 }
561}
562
563trait NorFlashExt {
565 const WORD_SIZE: usize;
567}
568
569impl<S: NorFlash> NorFlashExt for S {
570 const WORD_SIZE: usize = {
571 assert_read_write_sizes(Self::WRITE_SIZE, Self::READ_SIZE);
572
573 if Self::WRITE_SIZE > Self::READ_SIZE {
574 Self::WRITE_SIZE
575 } else {
576 Self::READ_SIZE
577 }
578 };
579}
580
581#[track_caller]
582const fn assert_read_write_sizes(write_size: usize, read_size: usize) {
583 assert!(
584 write_size.is_multiple_of(read_size) || read_size.is_multiple_of(write_size),
585 "Only flash with read and write sizes that are multiple of each other are supported"
586 );
587}
588
589macro_rules! run_with_auto_repair {
590 (function = $function:expr, repair = $repair_function:expr) => {
591 match $function {
592 Err(Error::Corrupted {
593 #[cfg(feature = "_test")]
594 backtrace: _backtrace,
595 ..
596 }) => {
597 #[cfg(all(feature = "_test", fuzzing_repro))]
598 eprintln!(
599 "### Encountered curruption! Repairing now. Originated from:\n{_backtrace:#}"
600 );
601 $repair_function;
602 $function
603 }
604 val => val,
605 }
606 };
607}
608
609pub(crate) use run_with_auto_repair;
610
611use crate::cache::CacheImpl;
612
613#[cfg(test)]
614mod tests {
615 use crate::cache::NoCache;
616
617 use super::*;
618 use futures_test::test;
619
620 type MockFlash = mock_flash::MockFlashBase<4, 4, 64>;
621
622 async fn write_aligned(
623 flash: &mut MockFlash,
624 offset: u32,
625 bytes: &[u8],
626 ) -> Result<(), mock_flash::MockFlashError> {
627 let mut buf = AlignedBuf([0; 256]);
628 buf[..bytes.len()].copy_from_slice(bytes);
629 flash.write(offset, &buf[..bytes.len()]).await
630 }
631
632 #[test]
633 async fn test_find_pages() {
634 let mut flash = MockFlash::default();
641
642 write_aligned(&mut flash, 0x000, &[MARKER, 0, 0, 0])
644 .await
645 .unwrap();
646 write_aligned(&mut flash, 0x100 - 4, &[0, 0, 0, MARKER])
647 .await
648 .unwrap();
649 write_aligned(&mut flash, 0x100, &[MARKER, 0, 0, 0])
651 .await
652 .unwrap();
653 write_aligned(&mut flash, 0x200 - 4, &[0, 0, 0, MARKER])
654 .await
655 .unwrap();
656 write_aligned(&mut flash, 0x200, &[MARKER, 0, 0, 0])
658 .await
659 .unwrap();
660
661 let mut storage = GenericStorage {
662 flash: flash,
663 flash_range: 0x000..0x400,
664 cache: NoCache::new(),
665 };
666
667 assert_eq!(
668 storage.find_first_page(0, PageState::Open).await.unwrap(),
669 Some(3)
670 );
671 assert_eq!(
672 storage
673 .find_first_page(0, PageState::PartialOpen)
674 .await
675 .unwrap(),
676 Some(2)
677 );
678 assert_eq!(
679 storage
680 .find_first_page(1, PageState::PartialOpen)
681 .await
682 .unwrap(),
683 Some(2)
684 );
685 assert_eq!(
686 storage
687 .find_first_page(2, PageState::PartialOpen)
688 .await
689 .unwrap(),
690 Some(2)
691 );
692 assert_eq!(
693 storage.find_first_page(3, PageState::Open).await.unwrap(),
694 Some(3)
695 );
696
697 storage.flash_range = 0x000..0x200;
698 assert_eq!(
699 storage
700 .find_first_page(0, PageState::PartialOpen)
701 .await
702 .unwrap(),
703 None
704 );
705 storage.flash_range = 0x000..0x400;
706
707 assert_eq!(
708 storage.find_first_page(0, PageState::Closed).await.unwrap(),
709 Some(0)
710 );
711 assert_eq!(
712 storage.find_first_page(1, PageState::Closed).await.unwrap(),
713 Some(1)
714 );
715 assert_eq!(
716 storage.find_first_page(2, PageState::Closed).await.unwrap(),
717 Some(0)
718 );
719 assert_eq!(
720 storage.find_first_page(3, PageState::Closed).await.unwrap(),
721 Some(0)
722 );
723
724 storage.flash_range = 0x200..0x400;
725 assert_eq!(
726 storage.find_first_page(0, PageState::Closed).await.unwrap(),
727 None
728 );
729 }
730
731 #[test]
732 async fn read_write_sizes() {
733 assert_read_write_sizes(1, 1);
734 assert_read_write_sizes(1, 4);
735 assert_read_write_sizes(4, 4);
736 assert_read_write_sizes(4, 1);
737 }
738}