use crate::{
cache::Cache,
item::{find_next_free_item_spot, Item, ItemHeader, ItemIter},
};
use self::cache::{CacheImpl, PrivateCacheImpl};
use super::*;
pub async fn fetch_item<I: StorageItem, S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
mut cache: impl CacheImpl,
data_buffer: &mut [u8],
search_key: I::Key,
) -> Result<Option<I>, MapError<I::Error, S::Error>> {
Ok(
fetch_item_with_location(flash, flash_range, cache.inner(), data_buffer, search_key)
.await?
.map(|(item, _, _)| item),
)
}
#[allow(clippy::type_complexity)]
async fn fetch_item_with_location<I: StorageItem, S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
cache: &mut Cache<impl PageStatesCache>,
data_buffer: &mut [u8],
search_key: I::Key,
) -> Result<Option<(I, u32, ItemHeader)>, MapError<I::Error, S::Error>> {
assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
if cache.is_dirty() {
cache.invalidate_cache_state();
}
let mut last_used_page =
find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?;
if last_used_page.is_none() {
if let Some(first_open_page) =
find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open).await?
{
let previous_page = previous_page::<S>(flash_range.clone(), first_open_page);
if get_page_state(flash, flash_range.clone(), cache, previous_page)
.await?
.is_closed()
{
last_used_page = Some(previous_page);
} else {
cache.unmark_dirty();
return Ok(None);
}
} else {
return Err(MapError::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
}
let mut current_page_to_check = last_used_page.unwrap();
let mut newest_found_item = None;
loop {
let page_data_start_address =
calculate_page_address::<S>(flash_range.clone(), current_page_to_check)
+ S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(flash_range.clone(), current_page_to_check)
- S::WORD_SIZE as u32;
let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
while let Some((item, address)) = it.next(flash, data_buffer).await? {
if I::deserialize_key_only(item.data()).map_err(MapError::Item)? == search_key {
newest_found_item = Some((
I::deserialize_from(item.data()).map_err(MapError::Item)?,
address,
item.header,
));
}
}
if newest_found_item.is_some() {
break;
}
let previous_page = previous_page::<S>(flash_range.clone(), current_page_to_check);
if get_page_state(flash, flash_range.clone(), cache, previous_page).await?
!= PageState::Closed
{
cache.unmark_dirty();
return Ok(None);
}
current_page_to_check = previous_page;
}
cache.unmark_dirty();
Ok(newest_found_item)
}
pub async fn store_item<I: StorageItem, S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
mut cache: impl CacheImpl,
data_buffer: &mut [u8],
item: &I,
) -> Result<(), MapError<I::Error, S::Error>> {
assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
let cache = cache.inner();
if cache.is_dirty() {
cache.invalidate_cache_state();
}
let mut recursion_level = 0;
loop {
if recursion_level == get_pages::<S>(flash_range.clone(), 0).count() {
cache.unmark_dirty();
return Err(MapError::FullStorage);
}
let next_page_to_use = if let Some(partial_open_page) =
find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
{
if !get_page_state(
flash,
flash_range.clone(),
cache,
next_page::<S>(flash_range.clone(), partial_open_page),
)
.await?
.is_open()
{
return Err(MapError::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
let page_data_start_address =
calculate_page_address::<S>(flash_range.clone(), partial_open_page)
+ S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(flash_range.clone(), partial_open_page)
- S::WORD_SIZE as u32;
let item_data_length = item.serialize_into(data_buffer).map_err(MapError::Item)?;
let free_spot_address = find_next_free_item_spot(
flash,
page_data_start_address,
page_data_end_address,
item_data_length as u32,
)
.await?;
match free_spot_address {
Some(free_spot_address) => {
Item::write_new(flash, free_spot_address, &data_buffer[..item_data_length])
.await?;
cache.unmark_dirty();
return Ok(());
}
None => {
close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
Some(next_page::<S>(flash_range.clone(), partial_open_page))
}
}
} else {
None
};
match next_page_to_use {
Some(next_page_to_use) => {
let next_page_state =
get_page_state(flash, flash_range.clone(), cache, next_page_to_use).await?;
if !next_page_state.is_open() {
return Err(MapError::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
partial_close_page(flash, flash_range.clone(), cache, next_page_to_use).await?;
let next_buffer_page = next_page::<S>(flash_range.clone(), next_page_to_use);
let next_buffer_page_state =
get_page_state(flash, flash_range.clone(), cache, next_buffer_page).await?;
if !next_buffer_page_state.is_open() {
migrate_items::<I, _>(
flash,
flash_range.clone(),
cache,
data_buffer,
next_buffer_page,
next_page_to_use,
)
.await?;
}
}
None => {
let first_open_page =
match find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
.await?
{
Some(first_open_page) => first_open_page,
None => {
return Err(MapError::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
};
partial_close_page(flash, flash_range.clone(), cache, first_open_page).await?;
}
}
recursion_level += 1;
}
}
pub trait StorageItem {
type Key: Eq;
type Error;
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, Self::Error>;
fn deserialize_from(buffer: &[u8]) -> Result<Self, Self::Error>
where
Self: Sized;
fn deserialize_key_only(buffer: &[u8]) -> Result<Self::Key, Self::Error>
where
Self: Sized,
{
Ok(Self::deserialize_from(buffer)?.key())
}
fn key(&self) -> Self::Key;
}
#[non_exhaustive]
#[derive(Debug)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum MapError<I, S> {
Item(I),
Storage {
value: S,
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace,
},
FullStorage,
Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace,
},
BufferTooBig,
BufferTooSmall(usize),
}
impl<S, I> From<super::Error<S>> for MapError<I, S> {
fn from(value: super::Error<S>) -> Self {
match value {
Error::Storage {
value,
#[cfg(feature = "_test")]
backtrace,
} => Self::Storage {
value,
#[cfg(feature = "_test")]
backtrace,
},
Error::FullStorage => Self::FullStorage,
Error::Corrupted {
#[cfg(feature = "_test")]
backtrace,
} => Self::Corrupted {
#[cfg(feature = "_test")]
backtrace,
},
Error::BufferTooBig => Self::BufferTooBig,
Error::BufferTooSmall(needed) => Self::BufferTooSmall(needed),
}
}
}
impl<S: PartialEq, I: PartialEq> PartialEq for MapError<I, S> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Item(l0), Self::Item(r0)) => l0 == r0,
(Self::Storage { value: l_value, .. }, Self::Storage { value: r_value, .. }) => {
l_value == r_value
}
(Self::BufferTooSmall(l0), Self::BufferTooSmall(r0)) => l0 == r0,
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
}
}
}
async fn migrate_items<I: StorageItem, S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
cache: &mut Cache<impl PageStatesCache>,
data_buffer: &mut [u8],
source_page: usize,
target_page: usize,
) -> Result<(), MapError<I::Error, S::Error>> {
let mut next_page_write_address =
calculate_page_address::<S>(flash_range.clone(), target_page) + S::WORD_SIZE as u32;
let mut it = ItemIter::new(
calculate_page_address::<S>(flash_range.clone(), source_page) + S::WORD_SIZE as u32,
calculate_page_end_address::<S>(flash_range.clone(), source_page) - S::WORD_SIZE as u32,
);
while let Some((item, item_address)) = it.next(flash, data_buffer).await? {
let key = I::deserialize_key_only(item.data()).map_err(MapError::Item)?;
let (item_header, data_buffer) = item.destruct();
let Some((_, found_address, _)) =
fetch_item_with_location::<I, S>(flash, flash_range.clone(), cache, data_buffer, key)
.await?
else {
return Err(MapError::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
};
if found_address == item_address {
let item = item_header
.read_item(flash, data_buffer, item_address, u32::MAX)
.await?
.unwrap()?;
item.write(flash, next_page_write_address).await?;
next_page_write_address = item.header.next_item_address::<S>(next_page_write_address);
}
}
open_page(flash, flash_range.clone(), cache, source_page).await?;
Ok(())
}
pub async fn try_repair<I: StorageItem, S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
mut cache: impl CacheImpl,
data_buffer: &mut [u8],
) -> Result<(), MapError<I::Error, S::Error>> {
let cache = cache.inner();
cache.invalidate_cache_state();
#[allow(dropping_references)]
drop(cache);
crate::try_general_repair(flash, flash_range.clone()).await?;
if let Some(partial_open_page) = find_first_page(
flash,
flash_range.clone(),
cache::NoCache::new().inner(),
0,
PageState::PartialOpen,
)
.await?
{
let buffer_page = next_page::<S>(flash_range.clone(), partial_open_page);
if !get_page_state(
flash,
flash_range.clone(),
cache::NoCache::new().inner(),
buffer_page,
)
.await?
.is_open()
{
open_page(
flash,
flash_range.clone(),
cache::NoCache::new().inner(),
partial_open_page,
)
.await?;
partial_close_page(
flash,
flash_range.clone(),
cache::NoCache::new().inner(),
partial_open_page,
)
.await?;
migrate_items::<I, _>(
flash,
flash_range.clone(),
cache::NoCache::new().inner(),
data_buffer,
buffer_page,
partial_open_page,
)
.await?;
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use futures_test::test;
type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
#[derive(Debug, PartialEq, Eq)]
struct MockStorageItem {
key: u8,
value: Vec<u8>,
}
#[derive(Debug, PartialEq, Eq)]
enum MockStorageItemError {
BufferTooSmall,
InvalidKey,
BufferTooBig,
}
impl StorageItem for MockStorageItem {
type Key = u8;
type Error = MockStorageItemError;
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, Self::Error> {
if buffer.len() < 2 + self.value.len() {
return Err(MockStorageItemError::BufferTooSmall);
}
if self.value.len() > 255 {
return Err(MockStorageItemError::BufferTooBig);
}
if self.key == 0xFF {
return Err(MockStorageItemError::InvalidKey);
}
buffer[0] = self.key;
buffer[1] = self.value.len() as u8;
buffer[2..][..self.value.len()].copy_from_slice(&self.value);
Ok(2 + self.value.len())
}
fn deserialize_from(buffer: &[u8]) -> Result<Self, Self::Error>
where
Self: Sized,
{
if buffer.len() < 2 {
return Err(MockStorageItemError::BufferTooSmall);
}
if buffer[0] == 0xFF {
return Err(MockStorageItemError::InvalidKey);
}
let len = buffer[1];
if buffer.len() < 2 + len as usize {
return Err(MockStorageItemError::BufferTooSmall);
}
Ok(Self {
key: buffer[0],
value: buffer[2..][..len as usize].to_vec(),
})
}
fn key(&self) -> Self::Key {
self.key
}
}
#[test]
async fn store_and_fetch() {
let mut flash = MockFlashBig::default();
let flash_range = 0x000..0x1000;
let mut data_buffer = AlignedBuf([0; 128]);
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
0,
)
.await
.unwrap();
assert_eq!(item, None);
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
60,
)
.await
.unwrap();
assert_eq!(item, None);
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
0xFF,
)
.await
.unwrap();
assert_eq!(item, None);
store_item::<_, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: 0,
value: vec![5],
},
)
.await
.unwrap();
store_item::<_, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: 0,
value: vec![5, 6],
},
)
.await
.unwrap();
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
0,
)
.await
.unwrap()
.unwrap();
assert_eq!(item.key, 0);
assert_eq!(item.value, vec![5, 6]);
store_item::<_, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: 1,
value: vec![2, 2, 2, 2, 2, 2],
},
)
.await
.unwrap();
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
0,
)
.await
.unwrap()
.unwrap();
assert_eq!(item.key, 0);
assert_eq!(item.value, vec![5, 6]);
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
1,
)
.await
.unwrap()
.unwrap();
assert_eq!(item.key, 1);
assert_eq!(item.value, vec![2, 2, 2, 2, 2, 2]);
for index in 0..4000 {
store_item::<_, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: (index % 10) as u8,
value: vec![(index % 10) as u8 * 2; index % 10],
},
)
.await
.unwrap();
}
for i in 0..10 {
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
i,
)
.await
.unwrap()
.unwrap();
assert_eq!(item.key, i);
assert_eq!(item.value, vec![(i % 10) as u8 * 2; (i % 10) as usize]);
}
for _ in 0..4000 {
store_item::<_, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: 11,
value: vec![0; 10],
},
)
.await
.unwrap();
}
for i in 0..10 {
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
flash_range.clone(),
cache::NoCache::new(),
&mut data_buffer,
i,
)
.await
.unwrap()
.unwrap();
assert_eq!(item.key, i);
assert_eq!(item.value, vec![(i % 10) as u8 * 2; (i % 10) as usize]);
}
println!(
"Erases: {}, reads: {}, writes: {}",
flash.erases, flash.reads, flash.writes
);
}
#[test]
async fn store_too_many_items() {
const UPPER_BOUND: u8 = 2;
let mut tiny_flash = MockFlashTiny::default();
let mut data_buffer = AlignedBuf([0; 128]);
for i in 0..UPPER_BOUND {
let item = MockStorageItem {
key: i as u8,
value: vec![i as u8; i as usize],
};
println!("Storing {item:?}");
store_item::<_, _>(
&mut tiny_flash,
0x00..0x40,
cache::NoCache::new(),
&mut data_buffer,
&item,
)
.await
.unwrap();
}
assert_eq!(
store_item::<_, _>(
&mut tiny_flash,
0x00..0x40,
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: UPPER_BOUND,
value: vec![0; UPPER_BOUND as usize],
},
)
.await,
Err(MapError::FullStorage)
);
for i in 0..UPPER_BOUND {
let item = fetch_item::<MockStorageItem, _>(
&mut tiny_flash,
0x00..0x40,
cache::NoCache::new(),
&mut data_buffer,
i as u8,
)
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item.value, vec![i as u8; i as usize]);
}
}
#[test]
async fn store_too_many_items_big() {
const UPPER_BOUND: u8 = 67;
let mut big_flash = MockFlashBig::default();
let mut data_buffer = AlignedBuf([0; 128]);
for i in 0..UPPER_BOUND {
let item = MockStorageItem {
key: i as u8,
value: vec![i as u8; i as usize],
};
println!("Storing {item:?}");
store_item::<_, _>(
&mut big_flash,
0x0000..0x1000,
cache::NoCache::new(),
&mut data_buffer,
&item,
)
.await
.unwrap();
}
assert_eq!(
store_item::<_, _>(
&mut big_flash,
0x0000..0x1000,
cache::NoCache::new(),
&mut data_buffer,
&MockStorageItem {
key: UPPER_BOUND,
value: vec![0; UPPER_BOUND as usize],
},
)
.await,
Err(MapError::FullStorage)
);
for i in 0..UPPER_BOUND {
let item = fetch_item::<MockStorageItem, _>(
&mut big_flash,
0x0000..0x1000,
cache::NoCache::new(),
&mut data_buffer,
i as u8,
)
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item.value, vec![i as u8; i as usize]);
}
}
#[test]
async fn store_many_items_big() {
let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::default();
let mut data_buffer = AlignedBuf([0; 128]);
const LENGHT_PER_KEY: [usize; 24] = [
11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
];
for _ in 0..100 {
for i in 0..24 {
let item = MockStorageItem {
key: i as u8,
value: vec![i as u8; LENGHT_PER_KEY[i]],
};
store_item::<_, _>(
&mut flash,
0x0000..0x4000,
cache::NoCache::new(),
&mut data_buffer,
&item,
)
.await
.unwrap();
}
}
for i in 0..24 {
let item = fetch_item::<MockStorageItem, _>(
&mut flash,
0x0000..0x4000,
cache::NoCache::new(),
&mut data_buffer,
i as u8,
)
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item.value, vec![i as u8; LENGHT_PER_KEY[i]]);
}
}
}