use core::{marker::PhantomData, mem::size_of};
use embedded_storage_async::nor_flash::MultiwriteNorFlash;
#[cfg(feature = "postcard")]
use serde::{Deserialize, Serialize};
use crate::item::{Item, ItemHeader, ItemIter};
use self::{
cache::KeyCacheImpl,
item::{ItemHeaderIter, ItemUnborrowed},
};
use super::{
Debug, Error, GenericStorage, MAX_WORD_SIZE, NorFlash, NorFlashExt, PageState, Range, cache,
calculate_page_address, calculate_page_end_address, calculate_page_index, calculate_page_size,
item, run_with_auto_repair,
};
pub struct MapConfig<S> {
flash_range: Range<u32>,
_phantom: PhantomData<S>,
}
impl<S: NorFlash> MapConfig<S> {
#[must_use]
pub const fn new(flash_range: Range<u32>) -> Self {
Self::try_new(flash_range).expect("Map config must be correct")
}
#[must_use]
pub const fn try_new(flash_range: Range<u32>) -> Option<Self> {
if !flash_range.start.is_multiple_of(S::ERASE_SIZE as u32) {
return None;
}
if !flash_range.end.is_multiple_of(S::ERASE_SIZE as u32) {
return None;
}
if flash_range.end - flash_range.start < S::ERASE_SIZE as u32 * 2 {
return None;
}
if S::ERASE_SIZE < S::WORD_SIZE * 3 {
return None;
}
if S::WORD_SIZE > MAX_WORD_SIZE {
return None;
}
Some(Self {
flash_range,
_phantom: PhantomData,
})
}
}
pub struct MapStorage<K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
inner: GenericStorage<S, C>,
_phantom: PhantomData<K>,
}
impl<S: NorFlash, C: KeyCacheImpl<K>, K: Key> MapStorage<K, S, C> {
pub const fn new(storage: S, config: MapConfig<S>, cache: C) -> Self {
Self {
inner: GenericStorage {
flash: storage,
flash_range: config.flash_range,
cache,
},
_phantom: PhantomData,
}
}
pub async fn fetch_item<'d, V: Value<'d>>(
&mut self,
data_buffer: &'d mut [u8],
search_key: &K,
) -> Result<Option<V>, Error<S::Error>> {
let result = run_with_auto_repair!(
function = self.fetch_item_with_location(data_buffer, search_key).await,
repair = self.try_repair(data_buffer).await?
);
let Some((item, _, item_key_len)) = result? else {
return Ok(None);
};
let data_len = item.header.length as usize;
let item_key_len = match item_key_len {
Some(item_key_len) => item_key_len,
None => K::get_len(&data_buffer[..data_len])?,
};
let (value, _size) =
V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
.map_err(Error::SerializationError)?;
Ok(Some(value))
}
#[allow(clippy::type_complexity)]
async fn fetch_item_with_location(
&mut self,
data_buffer: &mut [u8],
search_key: &K,
) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
if self.inner.cache.is_dirty() {
self.inner.cache.invalidate_cache_state();
}
'cache: {
if let Some(cached_location) = self.inner.cache.key_location(search_key) {
let page_index = calculate_page_index::<S>(self.flash_range(), cached_location);
let page_data_end_address =
calculate_page_end_address::<S>(self.flash_range(), page_index)
- S::WORD_SIZE as u32;
let Some(header) = ItemHeader::read_new(
&mut self.inner.flash,
cached_location,
page_data_end_address,
)
.await?
else {
#[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
{
assert!(
!cfg!(feature = "_test"),
"Wrong cache value. Addr: {cached_location}"
);
}
self.inner.cache.invalidate_cache_state();
break 'cache;
};
let item = header
.read_item(
&mut self.inner.flash,
data_buffer,
cached_location,
page_data_end_address,
)
.await?;
match item {
item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
#[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
{
assert!(
!cfg!(feature = "_test"),
"Wrong cache value. Addr: {cached_location}"
);
}
self.inner.cache.invalidate_cache_state();
break 'cache;
}
item::MaybeItem::Present(item) => {
return Ok(Some((item.unborrow(), cached_location, None)));
}
}
}
}
let mut last_used_page = self
.inner
.find_first_page(0, PageState::PartialOpen)
.await?;
if last_used_page.is_none() {
if let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await? {
let previous_page = self.inner.previous_page(first_open_page);
if self.inner.get_page_state(previous_page).await?.is_closed() {
last_used_page = Some(previous_page);
} else {
self.inner.cache.unmark_dirty();
return Ok(None);
}
} else {
return Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
}
let mut current_page_to_check = last_used_page.unwrap();
let mut newest_found_item_data = None;
loop {
let page_data_start_address =
calculate_page_address::<S>(self.flash_range(), current_page_to_check)
+ S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(self.flash_range(), current_page_to_check)
- S::WORD_SIZE as u32;
let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
while let Some((item, address)) = it.next(&mut self.inner.flash, data_buffer).await? {
let (found_key, found_key_len) = K::deserialize_from(item.data())?;
if found_key == *search_key {
newest_found_item_data = Some((address, found_key_len));
}
}
if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
self.inner
.cache
.notice_key_location(search_key, *newest_found_item_address, false);
break;
}
let previous_page = self.inner.previous_page(current_page_to_check);
if self.inner.get_page_state(previous_page).await? != PageState::Closed {
self.inner.cache.unmark_dirty();
return Ok(None);
}
current_page_to_check = previous_page;
}
self.inner.cache.unmark_dirty();
if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data
{
let item =
ItemHeader::read_new(&mut self.inner.flash, newest_found_item_address, u32::MAX)
.await?
.ok_or_else(|| {
Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
}
})?
.read_item(
&mut self.inner.flash,
data_buffer,
newest_found_item_address,
u32::MAX,
)
.await?;
Ok(Some((
item.unwrap()?.unborrow(),
newest_found_item_address,
Some(newest_found_item_key_len),
)))
} else {
Ok(None)
}
}
pub async fn store_item<'d, V: Value<'d>>(
&mut self,
data_buffer: &mut [u8],
key: &K,
item: &V,
) -> Result<(), Error<S::Error>> {
run_with_auto_repair!(
function = self.store_item_inner(data_buffer, key, item).await,
repair = self.try_repair(data_buffer).await?
)
}
async fn store_item_inner(
&mut self,
data_buffer: &mut [u8],
key: &K,
item: &dyn Value<'_>,
) -> Result<(), Error<S::Error>> {
if self.inner.cache.is_dirty() {
self.inner.cache.invalidate_cache_state();
}
let mut recursion_level = 0;
loop {
if recursion_level == self.inner.get_pages(0).count() {
self.inner.cache.unmark_dirty();
return Err(Error::FullStorage);
}
let next_page_to_use = if let Some(partial_open_page) = self
.inner
.find_first_page(0, PageState::PartialOpen)
.await?
{
if !self
.inner
.get_page_state(self.inner.next_page(partial_open_page))
.await?
.is_open()
{
return Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
let page_data_start_address =
calculate_page_address::<S>(self.flash_range(), partial_open_page)
+ S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(self.flash_range(), partial_open_page)
- S::WORD_SIZE as u32;
let key_len = key.serialize_into(data_buffer)?;
let item_data_length = key_len
+ item
.serialize_into(&mut data_buffer[key_len..])
.map_err(Error::SerializationError)?;
if item_data_length > u16::MAX as usize
|| item_data_length
> calculate_page_size::<S>()
.saturating_sub(ItemHeader::data_address::<S>(0) as usize)
{
self.inner.cache.unmark_dirty();
return Err(Error::ItemTooBig);
}
let free_spot_address = self
.inner
.find_next_free_item_spot(
page_data_start_address,
page_data_end_address,
item_data_length as u32,
)
.await?;
if let Some(free_spot_address) = free_spot_address {
self.inner
.cache
.notice_key_location(key, free_spot_address, true);
Item::write_new(
&mut self.inner.flash,
self.inner.flash_range.clone(),
&mut self.inner.cache,
free_spot_address,
&data_buffer[..item_data_length],
)
.await?;
self.inner.cache.unmark_dirty();
return Ok(());
}
self.inner.close_page(partial_open_page).await?;
Some(self.inner.next_page(partial_open_page))
} else {
None
};
if let Some(next_page_to_use) = next_page_to_use {
let next_page_state = self.inner.get_page_state(next_page_to_use).await?;
if !next_page_state.is_open() {
return Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
}
self.inner.partial_close_page(next_page_to_use).await?;
let next_buffer_page = self.inner.next_page(next_page_to_use);
let next_buffer_page_state = self.inner.get_page_state(next_buffer_page).await?;
if !next_buffer_page_state.is_open() {
self.migrate_items(data_buffer, next_buffer_page, next_page_to_use)
.await?;
}
} else {
let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await?
else {
return Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
};
self.inner.partial_close_page(first_open_page).await?;
}
recursion_level += 1;
}
}
pub async fn remove_item(
&mut self,
data_buffer: &mut [u8],
search_key: &K,
) -> Result<(), Error<S::Error>>
where
S: MultiwriteNorFlash,
{
run_with_auto_repair!(
function = self.remove_item_inner(data_buffer, Some(search_key)).await,
repair = self.try_repair(data_buffer).await?
)
}
pub async fn remove_all_items(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>>
where
S: MultiwriteNorFlash,
{
run_with_auto_repair!(
function = self.remove_item_inner(data_buffer, None).await,
repair = self.try_repair(data_buffer).await?
)
}
async fn remove_item_inner(
&mut self,
data_buffer: &mut [u8],
search_key: Option<&K>,
) -> Result<(), Error<S::Error>>
where
S: MultiwriteNorFlash,
{
if let Some(key) = &search_key {
self.inner.cache.notice_key_erased(key);
} else {
self.inner.cache.invalidate_cache_state();
}
let last_used_page = self
.inner
.find_first_page(0, PageState::PartialOpen)
.await?
.unwrap_or_default();
for page_index in self.inner.get_pages(self.inner.next_page(last_used_page)) {
if self.inner.get_page_state(page_index).await?.is_open() {
continue;
}
let page_data_start_address =
calculate_page_address::<S>(self.flash_range(), page_index) + S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(self.flash_range(), page_index)
- S::WORD_SIZE as u32;
let mut item_headers =
ItemHeaderIter::new(page_data_start_address, page_data_end_address);
while let (Some(item_header), item_address) =
item_headers.next(&mut self.inner.flash).await?
{
let item = item_header
.read_item(
&mut self.inner.flash,
data_buffer,
item_address,
page_data_end_address,
)
.await?;
match item {
item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => continue,
item::MaybeItem::Present(item) => {
let item_match = match search_key {
Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
_ => true,
};
if item_match {
item.header
.erase_data(
&mut self.inner.flash,
self.inner.flash_range.clone(),
&mut self.inner.cache,
item_address,
)
.await?;
}
}
}
}
}
self.inner.cache.unmark_dirty();
Ok(())
}
pub async fn fetch_all_items(
&mut self,
data_buffer: &mut [u8],
) -> Result<MapItemIter<'_, K, S, C>, Error<S::Error>> {
let first_page = run_with_auto_repair!(
function = {
match self
.inner
.find_first_page(0, PageState::PartialOpen)
.await?
{
Some(last_used_page) => {
Ok(self.inner.next_page(last_used_page))
}
None => {
if let Some(first_open_page) =
self.inner.find_first_page(0, PageState::Open).await?
{
let previous_page = self.inner.previous_page(first_open_page);
if self.inner.get_page_state(previous_page).await?.is_closed() {
Ok(first_open_page)
} else {
self.inner.cache.unmark_dirty();
Ok(0)
}
} else {
Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
})
}
}
}
},
repair = self.try_repair(data_buffer).await?
)?;
let start_address =
calculate_page_address::<S>(self.flash_range(), first_page) + S::WORD_SIZE as u32;
let end_address =
calculate_page_end_address::<S>(self.flash_range(), first_page) - S::WORD_SIZE as u32;
Ok(MapItemIter {
storage: self,
first_page,
current_page_index: first_page,
current_iter: ItemIter::new(start_address, end_address),
_key: PhantomData,
})
}
async fn migrate_items(
&mut self,
data_buffer: &mut [u8],
source_page: usize,
target_page: usize,
) -> Result<(), Error<S::Error>> {
let mut next_page_write_address =
calculate_page_address::<S>(self.flash_range(), target_page) + S::WORD_SIZE as u32;
let mut it = ItemIter::new(
calculate_page_address::<S>(self.flash_range(), source_page) + S::WORD_SIZE as u32,
calculate_page_end_address::<S>(self.flash_range(), source_page) - S::WORD_SIZE as u32,
);
while let Some((item, item_address)) = it.next(&mut self.inner.flash, data_buffer).await? {
let (key, _) = K::deserialize_from(item.data())?;
self.inner.cache.unmark_dirty();
let Some((found_item, found_address, _)) =
self.fetch_item_with_location(data_buffer, &key).await?
else {
return Err(Error::Corrupted {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
});
};
let found_item = found_item
.reborrow(data_buffer)
.ok_or_else(|| Error::LogicBug {
#[cfg(feature = "_test")]
backtrace: std::backtrace::Backtrace::capture(),
})?;
if found_address == item_address {
self.inner
.cache
.notice_key_location(&key, next_page_write_address, true);
found_item
.write(
&mut self.inner.flash,
self.inner.flash_range.clone(),
&mut self.inner.cache,
next_page_write_address,
)
.await?;
next_page_write_address = found_item
.header
.next_item_address::<S>(next_page_write_address);
}
}
self.inner.open_page(source_page).await?;
Ok(())
}
async fn try_repair(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>> {
self.inner.cache.invalidate_cache_state();
self.inner.try_general_repair().await?;
if let Some(partial_open_page) = self
.inner
.find_first_page(0, PageState::PartialOpen)
.await?
{
let buffer_page = self.inner.next_page(partial_open_page);
if !self.inner.get_page_state(buffer_page).await?.is_open() {
self.inner.open_page(partial_open_page).await?;
self.inner.partial_close_page(partial_open_page).await?;
self.migrate_items(data_buffer, buffer_page, partial_open_page)
.await?;
}
}
Ok(())
}
pub fn erase_all(&mut self) -> impl Future<Output = Result<(), Error<S::Error>>> {
self.inner.erase_all()
}
#[must_use]
pub const fn item_overhead_size() -> u32 {
GenericStorage::<S, C>::item_overhead_size()
}
pub fn destroy(self) -> (S, C) {
self.inner.destroy()
}
pub const fn flash(&mut self) -> &mut S {
self.inner.flash()
}
pub const fn flash_range(&self) -> Range<u32> {
self.inner.flash_range()
}
#[cfg(any(test, feature = "std"))]
pub fn print_items(&mut self) -> impl Future<Output = String> {
self.inner.print_items()
}
}
pub struct MapItemIter<'s, K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
storage: &'s mut MapStorage<K, S, C>,
first_page: usize,
current_page_index: usize,
pub(crate) current_iter: ItemIter,
_key: PhantomData<K>,
}
impl<K: Key, S: NorFlash, C: KeyCacheImpl<K>> MapItemIter<'_, K, S, C> {
pub async fn next<'a, V: Value<'a>>(
&mut self,
data_buffer: &'a mut [u8],
) -> Result<Option<(K, V)>, Error<S::Error>> {
let item = loop {
if let Some((item, _address)) = self
.current_iter
.next(&mut self.storage.inner.flash, data_buffer)
.await?
{
break item;
}
loop {
self.current_page_index = self.storage.inner.next_page(self.current_page_index);
if self.current_page_index == self.first_page {
return Ok(None);
}
match self
.storage
.inner
.get_page_state(self.current_page_index)
.await
{
Ok(PageState::Closed | PageState::PartialOpen) => {
self.current_iter = ItemIter::new(
calculate_page_address::<S>(
self.storage.inner.flash_range.clone(),
self.current_page_index,
) + S::WORD_SIZE as u32,
calculate_page_end_address::<S>(
self.storage.inner.flash_range.clone(),
self.current_page_index,
) - S::WORD_SIZE as u32,
);
break;
}
_ => continue,
}
}
};
let data_len = item.header.length as usize;
let (key, key_len) = K::deserialize_from(item.data())?;
let (value, _value_len) = V::deserialize_from(&data_buffer[..data_len][key_len..])
.map_err(Error::SerializationError)?;
Ok(Some((key, value)))
}
}
pub trait Key: Eq + Clone + Sized {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
Self::deserialize_from(buffer).map(|(_, len)| len)
}
}
macro_rules! impl_key_num {
($int:ty) => {
impl Key for $int {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
let self_bytes = self.to_le_bytes();
match buffer.get_mut(..self_bytes.len()) {
Some(buffer) => {
buffer.copy_from_slice(&self_bytes);
Ok(buffer.len())
}
None => Err(SerializationError::BufferTooSmall),
}
}
fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
let value = Self::from_le_bytes(
buffer
.get(..size_of::<Self>())
.ok_or(SerializationError::BufferTooSmall)?
.try_into()
.unwrap(),
);
Ok((value, size_of::<Self>()))
}
fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
Ok(size_of::<Self>())
}
}
};
}
impl_key_num!(u8);
impl_key_num!(u16);
impl_key_num!(u32);
impl_key_num!(u64);
impl_key_num!(u128);
impl_key_num!(i8);
impl_key_num!(i16);
impl_key_num!(i32);
impl_key_num!(i64);
impl_key_num!(i128);
impl<const N: usize> Key for [u8; N] {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
buffer
.get_mut(..N)
.ok_or(SerializationError::BufferTooSmall)?
.copy_from_slice(self);
Ok(N)
}
fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
Ok((
buffer
.get(..N)
.ok_or(SerializationError::BufferTooSmall)?
.try_into()
.unwrap(),
N,
))
}
fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
Ok(N)
}
}
impl Key for () {
fn serialize_into(&self, _buffer: &mut [u8]) -> Result<usize, SerializationError> {
Ok(0)
}
fn deserialize_from(_buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
Ok(((), 0))
}
}
pub trait Value<'a> {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
where
Self: Sized;
}
impl<'a> Value<'a> for bool {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
<u8 as Value>::serialize_into(&u8::from(*self), buffer)
}
fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
where
Self: Sized,
{
let (value, size) = <u8 as Value>::deserialize_from(buffer)?;
Ok((value != 0, size))
}
}
impl<'a, T: Value<'a>> Value<'a> for Option<T> {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
if let Some(val) = self {
let mut size = 0;
size += <bool as Value>::serialize_into(&true, buffer)?;
size += <T as Value>::serialize_into(
val,
buffer
.get_mut(size..)
.ok_or(SerializationError::BufferTooSmall)?,
)?;
Ok(size)
} else {
<bool as Value>::serialize_into(&false, buffer)
}
}
fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
where
Self: Sized,
{
let (is_some, tag_size) = <bool as Value>::deserialize_from(buffer)?;
if is_some {
let (value, value_size) = <T as Value>::deserialize_from(
buffer
.get(tag_size..)
.ok_or(SerializationError::BufferTooSmall)?,
)?;
Ok((Some(value), tag_size + value_size))
} else {
Ok((None, tag_size))
}
}
}
impl<'a> Value<'a> for &'a [u8] {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
buffer
.get_mut(..self.len())
.ok_or(SerializationError::BufferTooSmall)?
.copy_from_slice(self);
Ok(self.len())
}
fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
where
Self: Sized,
{
Ok((buffer, buffer.len()))
}
}
#[cfg(feature = "postcard")]
pub trait PostcardValue<'a>: Serialize + Deserialize<'a> {}
#[cfg(feature = "postcard")]
impl<'a, T> Value<'a> for T
where
T: PostcardValue<'a>,
{
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
Ok(postcard::to_slice(self, buffer).map(|s| s.len())?)
}
fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
where
Self: Sized,
{
Ok((postcard::from_bytes(buffer)?, buffer.len()))
}
}
#[cfg(feature = "postcard")]
impl From<postcard::Error> for SerializationError {
fn from(error: postcard::Error) -> SerializationError {
use postcard::Error::*;
match error {
SerializeBufferFull => SerializationError::BufferTooSmall,
SerializeSeqLengthUnknown => SerializationError::InvalidData,
DeserializeUnexpectedEnd
| DeserializeBadVarint
| DeserializeBadBool
| DeserializeBadChar
| DeserializeBadUtf8
| DeserializeBadOption
| DeserializeBadEnum
| DeserializeBadEncoding
| DeserializeBadCrc => SerializationError::InvalidFormat,
_ => SerializationError::Custom(error as i32),
}
}
}
macro_rules! impl_map_item_num {
($num:ty) => {
impl<'a> Value<'a> for $num {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
let self_bytes = self.to_le_bytes();
match buffer.get_mut(..self_bytes.len()) {
Some(buffer) => {
buffer.copy_from_slice(&self_bytes);
Ok(buffer.len())
}
None => Err(SerializationError::BufferTooSmall),
}
}
fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
let value = Self::from_le_bytes(
buffer
.get(..size_of::<Self>())
.ok_or(SerializationError::BufferTooSmall)?
.try_into()
.unwrap(),
);
Ok((value, size_of::<Self>()))
}
}
impl<'a, const N: usize> Value<'a> for [$num; N] {
fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
let elem_size = size_of::<$num>();
let buffer = buffer
.get_mut(0..elem_size * N)
.ok_or(SerializationError::BufferTooSmall)?;
for (chunk, number) in buffer.chunks_exact_mut(elem_size).zip(self.iter()) {
chunk.copy_from_slice(&number.to_le_bytes())
}
Ok(elem_size * N)
}
fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
let elem_size = size_of::<$num>();
if buffer.len() < elem_size * N {
return Err(SerializationError::BufferTooSmall);
}
let mut array = [0 as $num; N];
for (chunk, number) in buffer.chunks_exact(elem_size).zip(array.iter_mut()) {
*number = <$num>::from_le_bytes(chunk.try_into().unwrap());
}
Ok((array, elem_size * N))
}
}
};
}
impl_map_item_num!(u8);
impl_map_item_num!(u16);
impl_map_item_num!(u32);
impl_map_item_num!(u64);
impl_map_item_num!(u128);
impl_map_item_num!(i8);
impl_map_item_num!(i16);
impl_map_item_num!(i32);
impl_map_item_num!(i64);
impl_map_item_num!(i128);
impl_map_item_num!(f32);
impl_map_item_num!(f64);
#[non_exhaustive]
#[derive(Debug, PartialEq, Eq, Clone)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum SerializationError {
BufferTooSmall,
InvalidData,
InvalidFormat,
Custom(i32),
}
impl core::fmt::Display for SerializationError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
SerializationError::InvalidData => write!(f, "Invalid data"),
SerializationError::InvalidFormat => write!(f, "Invalid format"),
SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
}
}
}
#[cfg(test)]
mod tests {
use crate::{AlignedBuf, cache::NoCache, mock_flash};
use super::*;
use futures_test::test;
type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
#[test]
async fn store_and_fetch() {
let mut storage = MapStorage::<u8, _, _>::new(
MockFlashBig::default(),
MapConfig::new(0x000..0x1000),
cache::NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
let start_snapshot = storage.flash().stats_snapshot();
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &0)
.await
.unwrap();
assert_eq!(item, None);
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &60)
.await
.unwrap();
assert_eq!(item, None);
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &0xFF)
.await
.unwrap();
assert_eq!(item, None);
storage
.store_item(&mut data_buffer, &0u8, &[5u8])
.await
.unwrap();
storage
.store_item(&mut data_buffer, &0u8, &[5u8, 6])
.await
.unwrap();
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &0)
.await
.unwrap()
.unwrap();
assert_eq!(item, &[5, 6]);
storage
.store_item(&mut data_buffer, &1u8, &[2u8, 2, 2, 2, 2, 2])
.await
.unwrap();
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &0)
.await
.unwrap()
.unwrap();
assert_eq!(item, &[5, 6]);
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &1)
.await
.unwrap()
.unwrap();
assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
for index in 0..4000 {
storage
.store_item(
&mut data_buffer,
&((index % 10) as u8),
&vec![(index % 10) as u8 * 2; index % 10].as_slice(),
)
.await
.unwrap();
}
for i in 0..10 {
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.unwrap();
assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
}
for _ in 0..4000 {
storage
.store_item(&mut data_buffer, &11u8, &[0; 10])
.await
.unwrap();
}
for i in 0..10 {
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.unwrap();
assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
}
println!(
"{:?}",
start_snapshot.compare_to(storage.flash().stats_snapshot()),
);
}
#[test]
async fn store_too_many_items() {
const UPPER_BOUND: u8 = 3;
let mut storage = MapStorage::new(
MockFlashTiny::default(),
const { MapConfig::new(0x00..0x40) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
for i in 0..UPPER_BOUND {
println!("Storing {i:?}");
storage
.store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
.await
.unwrap();
}
assert_eq!(
storage
.store_item(
&mut data_buffer,
&UPPER_BOUND,
&vec![0; UPPER_BOUND as usize].as_slice(),
)
.await,
Err(Error::FullStorage)
);
for i in 0..UPPER_BOUND {
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item, vec![i; i as usize]);
}
}
#[test]
async fn store_too_many_items_big() {
const UPPER_BOUND: u8 = 68;
let mut storage = MapStorage::new(
MockFlashBig::default(),
const { MapConfig::new(0x0000..0x1000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
for i in 0..UPPER_BOUND {
println!("Storing {i:?}");
storage
.store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
.await
.unwrap();
}
assert_eq!(
storage
.store_item(
&mut data_buffer,
&UPPER_BOUND,
&vec![0; UPPER_BOUND as usize].as_slice(),
)
.await,
Err(Error::FullStorage)
);
for i in 0..UPPER_BOUND {
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item, vec![i; i as usize]);
}
}
#[test]
async fn store_many_items_big() {
let mut storage = MapStorage::new(
mock_flash::MockFlashBase::<4, 1, 4096>::default(),
const { MapConfig::new(0x0000..0x4000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
const LENGHT_PER_KEY: [usize; 24] = [
11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
];
for _ in 0..100 {
#[allow(clippy::needless_range_loop)]
for i in 0..24 {
storage
.store_item(
&mut data_buffer,
&(i as u16),
&vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
)
.await
.unwrap();
}
}
#[allow(clippy::needless_range_loop)]
for i in 0..24 {
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &(i as u16))
.await
.unwrap()
.unwrap();
println!("Fetched {item:?}");
assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
}
}
#[test]
async fn remove_items() {
let mut storage = MapStorage::new(
mock_flash::MockFlashBase::<4, 1, 4096>::new(
mock_flash::WriteCountCheck::Twice,
None,
true,
),
const { MapConfig::new(0x0000..0x4000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
for j in 0..10 {
for i in 0..24 {
storage
.store_item(
&mut data_buffer,
&(i as u8),
&vec![i as u8; j + 2].as_slice(),
)
.await
.unwrap();
}
}
for j in (0..24).rev() {
for i in 0..=j {
assert!(
storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.is_some()
);
}
storage.remove_item(&mut data_buffer, &j).await.unwrap();
for i in 0..j {
assert!(
storage
.fetch_item::<&[u8]>(&mut data_buffer, &i)
.await
.unwrap()
.is_some()
);
}
assert!(
storage
.fetch_item::<&[u8]>(&mut data_buffer, &j)
.await
.unwrap()
.is_none()
);
}
}
#[test]
async fn remove_all() {
let mut storage = MapStorage::new(
mock_flash::MockFlashBase::<4, 1, 4096>::new(
mock_flash::WriteCountCheck::Twice,
None,
true,
),
const { MapConfig::new(0x0000..0x4000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
for value in 0..10 {
for key in 0..24u8 {
storage
.store_item(&mut data_buffer, &key, &vec![key; value + 2].as_slice())
.await
.unwrap();
}
}
for key in 0..24u8 {
assert!(
storage
.fetch_item::<&[u8]>(&mut data_buffer, &key)
.await
.unwrap()
.is_some()
);
}
storage.remove_all_items(&mut data_buffer).await.unwrap();
for key in 0..24 {
assert!(
storage
.fetch_item::<&[u8]>(&mut data_buffer, &key)
.await
.unwrap()
.is_none()
);
}
}
#[test]
async fn store_too_big_item() {
let mut storage = MapStorage::new(
MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true),
const { MapConfig::new(0x000..0x1000) },
NoCache::new(),
);
storage
.store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1])
.await
.unwrap();
assert_eq!(
storage
.store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],)
.await,
Err(Error::ItemTooBig)
);
}
#[test]
async fn item_iterator() {
const UPPER_BOUND: u8 = 64;
let mut storage = MapStorage::new(
MockFlashBig::default(),
const { MapConfig::new(0x000..0x1000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
for i in 0..UPPER_BOUND {
storage
.store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
.await
.unwrap();
}
for i in 0..10 {
storage
.store_item(&mut data_buffer, &1u8, &vec![i; i as usize].as_slice())
.await
.unwrap();
}
let mut map_iter = storage.fetch_all_items(&mut data_buffer).await.unwrap();
let mut count = 0;
let mut last_value_buffer = [0u8; 64];
let mut last_value_length = 0;
while let Ok(Some((key, value))) = map_iter.next::<&[u8]>(&mut data_buffer).await {
if key == 1 {
last_value_length = value.len();
last_value_buffer[..value.len()].copy_from_slice(value);
} else {
assert_eq!(value, vec![key; key as usize]);
count += 1;
}
}
assert_eq!(last_value_length, 9);
assert_eq!(
&last_value_buffer[..last_value_length],
vec![9u8; 9].as_slice()
);
assert_eq!(count + 1, UPPER_BOUND);
}
#[test]
async fn store_unit_key() {
let mut storage = MapStorage::new(
MockFlashBig::default(),
const { MapConfig::new(0x000..0x1000) },
NoCache::new(),
);
let mut data_buffer = AlignedBuf([0; 128]);
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &())
.await
.unwrap();
assert_eq!(item, None);
storage
.store_item(&mut data_buffer, &(), &[5u8])
.await
.unwrap();
storage
.store_item(&mut data_buffer, &(), &[5u8, 6])
.await
.unwrap();
let item = storage
.fetch_item::<&[u8]>(&mut data_buffer, &())
.await
.unwrap()
.unwrap();
assert_eq!(item, &[5, 6]);
}
#[test]
async fn option_value() {
let mut buffer = [0; 2];
assert_eq!(Some(42u8).serialize_into(&mut buffer), Ok(2));
assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((Some(42u8), 2)));
assert_eq!(buffer, [1, 42]);
let mut buffer = [0; 1];
assert_eq!(Option::<u8>::None.serialize_into(&mut buffer), Ok(1));
assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((None, 1)));
assert_eq!(buffer, [0]);
}
#[test]
async fn array_value() {
let mut buffer = [0; 3];
assert_eq!(Value::serialize_into(&[1u8, 2, 3], &mut buffer), Ok(3));
assert_eq!(buffer, [1, 2, 3]);
assert_eq!(
<[u8; 3] as Value>::deserialize_from(&buffer),
Ok(([1, 2, 3], 3))
);
let mut buffer = [0; 4];
assert_eq!(
Value::serialize_into(&[0x1234u16, 0x5678], &mut buffer),
Ok(4)
);
assert_eq!(buffer, [0x34, 0x12, 0x78, 0x56]);
assert_eq!(
<[u16; 2]>::deserialize_from(&buffer),
Ok(([0x1234, 0x5678], 4))
);
}
#[cfg(feature = "postcard")]
#[test]
async fn postcard_value() {
#[derive(PartialEq, Debug, serde::Serialize, serde::Deserialize)]
struct Foo(u32);
impl crate::map::PostcardValue<'_> for Foo {}
let mut buffer = [0; 8];
assert_eq!(Value::serialize_into(&Foo(123), &mut buffer), Ok(1));
assert_eq!(
<Foo as Value>::deserialize_from(&buffer[..1]),
Ok((Foo(123), 1))
);
}
}