1use core::{marker::PhantomData, mem::size_of};
4use embedded_storage_async::nor_flash::MultiwriteNorFlash;
5
6#[cfg(feature = "postcard")]
7use serde::{Deserialize, Serialize};
8
9use crate::item::{Item, ItemHeader, ItemIter};
10
11use self::{
12 cache::KeyCacheImpl,
13 item::{ItemHeaderIter, ItemUnborrowed},
14};
15
16use super::{
17 Debug, Error, GenericStorage, MAX_WORD_SIZE, NorFlash, NorFlashExt, PageState, Range, cache,
18 calculate_page_address, calculate_page_end_address, calculate_page_index, calculate_page_size,
19 item, run_with_auto_repair,
20};
21
22pub struct MapConfig<S> {
24 flash_range: Range<u32>,
25 _phantom: PhantomData<S>,
26}
27
28impl<S: NorFlash> MapConfig<S> {
29 #[must_use]
32 pub const fn new(flash_range: Range<u32>) -> Self {
33 Self::try_new(flash_range).expect("Map config must be correct")
34 }
35
36 #[must_use]
38 pub const fn try_new(flash_range: Range<u32>) -> Option<Self> {
39 if !flash_range.start.is_multiple_of(S::ERASE_SIZE as u32) {
40 return None;
41 }
42 if !flash_range.end.is_multiple_of(S::ERASE_SIZE as u32) {
43 return None;
44 }
45 if flash_range.end - flash_range.start < S::ERASE_SIZE as u32 * 2 {
47 return None;
48 }
49
50 if S::ERASE_SIZE < S::WORD_SIZE * 3 {
51 return None;
52 }
53 if S::WORD_SIZE > MAX_WORD_SIZE {
54 return None;
55 }
56
57 Some(Self {
58 flash_range,
59 _phantom: PhantomData,
60 })
61 }
62}
63
64pub struct MapStorage<K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
138 inner: GenericStorage<S, C>,
139 _phantom: PhantomData<K>,
140}
141
142impl<S: NorFlash, C: KeyCacheImpl<K>, K: Key> MapStorage<K, S, C> {
143 pub const fn new(storage: S, config: MapConfig<S>, cache: C) -> Self {
149 Self {
150 inner: GenericStorage {
151 flash: storage,
152 flash_range: config.flash_range,
153 cache,
154 },
155 _phantom: PhantomData,
156 }
157 }
158
159 pub async fn fetch_item<'d, V: Value<'d>>(
168 &mut self,
169 data_buffer: &'d mut [u8],
170 search_key: &K,
171 ) -> Result<Option<V>, Error<S::Error>> {
172 let result = run_with_auto_repair!(
173 function = self.fetch_item_with_location(data_buffer, search_key).await,
174 repair = self.try_repair(data_buffer).await?
175 );
176
177 let Some((item, _, item_key_len)) = result? else {
178 return Ok(None);
179 };
180
181 let data_len = item.header.length as usize;
182 let item_key_len = match item_key_len {
183 Some(item_key_len) => item_key_len,
184 None => K::get_len(&data_buffer[..data_len])?,
185 };
186
187 let (value, _size) =
188 V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
189 .map_err(Error::SerializationError)?;
190 Ok(Some(value))
191 }
192
193 #[allow(clippy::type_complexity)]
195 async fn fetch_item_with_location(
196 &mut self,
197 data_buffer: &mut [u8],
198 search_key: &K,
199 ) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
200 if self.inner.cache.is_dirty() {
201 self.inner.cache.invalidate_cache_state();
202 }
203
204 'cache: {
205 if let Some(cached_location) = self.inner.cache.key_location(search_key) {
206 let page_index = calculate_page_index::<S>(self.flash_range(), cached_location);
207 let page_data_end_address =
208 calculate_page_end_address::<S>(self.flash_range(), page_index)
209 - S::WORD_SIZE as u32;
210
211 let Some(header) = ItemHeader::read_new(
212 &mut self.inner.flash,
213 cached_location,
214 page_data_end_address,
215 )
216 .await?
217 else {
218 #[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
220 {
221 assert!(
222 !cfg!(feature = "_test"),
223 "Wrong cache value. Addr: {cached_location}"
224 );
225 }
226 self.inner.cache.invalidate_cache_state();
227 break 'cache;
228 };
229
230 let item = header
231 .read_item(
232 &mut self.inner.flash,
233 data_buffer,
234 cached_location,
235 page_data_end_address,
236 )
237 .await?;
238
239 match item {
240 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
241 #[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
242 {
243 assert!(
244 !cfg!(feature = "_test"),
245 "Wrong cache value. Addr: {cached_location}"
246 );
247 }
248
249 self.inner.cache.invalidate_cache_state();
251 break 'cache;
252 }
253 item::MaybeItem::Present(item) => {
254 return Ok(Some((item.unborrow(), cached_location, None)));
255 }
256 }
257 }
258 }
259
260 let mut last_used_page = self
262 .inner
263 .find_first_page(0, PageState::PartialOpen)
264 .await?;
265
266 if last_used_page.is_none() {
267 if let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await? {
270 let previous_page = self.inner.previous_page(first_open_page);
271 if self.inner.get_page_state(previous_page).await?.is_closed() {
272 last_used_page = Some(previous_page);
273 } else {
274 self.inner.cache.unmark_dirty();
277 return Ok(None);
278 }
279 } else {
280 return Err(Error::Corrupted {
284 #[cfg(feature = "_test")]
285 backtrace: std::backtrace::Backtrace::capture(),
286 });
287 }
288 }
289
290 let mut current_page_to_check = last_used_page.unwrap();
294 let mut newest_found_item_data = None;
295
296 loop {
297 let page_data_start_address =
298 calculate_page_address::<S>(self.flash_range(), current_page_to_check)
299 + S::WORD_SIZE as u32;
300 let page_data_end_address =
301 calculate_page_end_address::<S>(self.flash_range(), current_page_to_check)
302 - S::WORD_SIZE as u32;
303
304 let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
305 while let Some((item, address)) = it.next(&mut self.inner.flash, data_buffer).await? {
306 let (found_key, found_key_len) = K::deserialize_from(item.data())?;
307 if found_key == *search_key {
308 newest_found_item_data = Some((address, found_key_len));
309 }
310 }
311
312 if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
314 self.inner
315 .cache
316 .notice_key_location(search_key, *newest_found_item_address, false);
317
318 break;
319 }
320
321 let previous_page = self.inner.previous_page(current_page_to_check);
323
324 if self.inner.get_page_state(previous_page).await? != PageState::Closed {
325 self.inner.cache.unmark_dirty();
327 return Ok(None);
328 }
329
330 current_page_to_check = previous_page;
331 }
332
333 self.inner.cache.unmark_dirty();
334
335 if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data
338 {
339 let item =
340 ItemHeader::read_new(&mut self.inner.flash, newest_found_item_address, u32::MAX)
341 .await?
342 .ok_or_else(|| {
343 Error::Corrupted {
345 #[cfg(feature = "_test")]
346 backtrace: std::backtrace::Backtrace::capture(),
347 }
348 })?
349 .read_item(
350 &mut self.inner.flash,
351 data_buffer,
352 newest_found_item_address,
353 u32::MAX,
354 )
355 .await?;
356
357 Ok(Some((
358 item.unwrap()?.unborrow(),
359 newest_found_item_address,
360 Some(newest_found_item_key_len),
361 )))
362 } else {
363 Ok(None)
364 }
365 }
366
367 pub async fn store_item<'d, V: Value<'d>>(
373 &mut self,
374 data_buffer: &mut [u8],
375 key: &K,
376 item: &V,
377 ) -> Result<(), Error<S::Error>> {
378 run_with_auto_repair!(
379 function = self.store_item_inner(data_buffer, key, item).await,
380 repair = self.try_repair(data_buffer).await?
381 )
382 }
383
384 async fn store_item_inner(
385 &mut self,
386 data_buffer: &mut [u8],
387 key: &K,
388 item: &dyn Value<'_>,
389 ) -> Result<(), Error<S::Error>> {
390 if self.inner.cache.is_dirty() {
391 self.inner.cache.invalidate_cache_state();
392 }
393
394 let mut recursion_level = 0;
395 loop {
396 if recursion_level == self.inner.get_pages(0).count() {
398 self.inner.cache.unmark_dirty();
399 return Err(Error::FullStorage);
400 }
401
402 let next_page_to_use = if let Some(partial_open_page) = self
404 .inner
405 .find_first_page(0, PageState::PartialOpen)
406 .await?
407 {
408 if !self
410 .inner
411 .get_page_state(self.inner.next_page(partial_open_page))
412 .await?
413 .is_open()
414 {
415 return Err(Error::Corrupted {
420 #[cfg(feature = "_test")]
421 backtrace: std::backtrace::Backtrace::capture(),
422 });
423 }
424
425 let page_data_start_address =
428 calculate_page_address::<S>(self.flash_range(), partial_open_page)
429 + S::WORD_SIZE as u32;
430 let page_data_end_address =
431 calculate_page_end_address::<S>(self.flash_range(), partial_open_page)
432 - S::WORD_SIZE as u32;
433
434 let key_len = key.serialize_into(data_buffer)?;
435 let item_data_length = key_len
436 + item
437 .serialize_into(&mut data_buffer[key_len..])
438 .map_err(Error::SerializationError)?;
439
440 if item_data_length > u16::MAX as usize
441 || item_data_length
442 > calculate_page_size::<S>()
443 .saturating_sub(ItemHeader::data_address::<S>(0) as usize)
444 {
445 self.inner.cache.unmark_dirty();
446 return Err(Error::ItemTooBig);
447 }
448
449 let free_spot_address = self
450 .inner
451 .find_next_free_item_spot(
452 page_data_start_address,
453 page_data_end_address,
454 item_data_length as u32,
455 )
456 .await?;
457
458 if let Some(free_spot_address) = free_spot_address {
459 self.inner
460 .cache
461 .notice_key_location(key, free_spot_address, true);
462 Item::write_new(
463 &mut self.inner.flash,
464 self.inner.flash_range.clone(),
465 &mut self.inner.cache,
466 free_spot_address,
467 &data_buffer[..item_data_length],
468 )
469 .await?;
470
471 self.inner.cache.unmark_dirty();
472 return Ok(());
473 }
474
475 self.inner.close_page(partial_open_page).await?;
477 Some(self.inner.next_page(partial_open_page))
478 } else {
479 None
480 };
481
482 if let Some(next_page_to_use) = next_page_to_use {
488 let next_page_state = self.inner.get_page_state(next_page_to_use).await?;
489
490 if !next_page_state.is_open() {
491 return Err(Error::Corrupted {
493 #[cfg(feature = "_test")]
494 backtrace: std::backtrace::Backtrace::capture(),
495 });
496 }
497
498 self.inner.partial_close_page(next_page_to_use).await?;
502
503 let next_buffer_page = self.inner.next_page(next_page_to_use);
504 let next_buffer_page_state = self.inner.get_page_state(next_buffer_page).await?;
505
506 if !next_buffer_page_state.is_open() {
507 self.migrate_items(data_buffer, next_buffer_page, next_page_to_use)
508 .await?;
509 }
510 } else {
511 let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await?
513 else {
514 return Err(Error::Corrupted {
518 #[cfg(feature = "_test")]
519 backtrace: std::backtrace::Backtrace::capture(),
520 });
521 };
522
523 self.inner.partial_close_page(first_open_page).await?;
524 }
525
526 recursion_level += 1;
528 }
529 }
530
531 pub async fn remove_item(
544 &mut self,
545 data_buffer: &mut [u8],
546 search_key: &K,
547 ) -> Result<(), Error<S::Error>>
548 where
549 S: MultiwriteNorFlash,
550 {
551 run_with_auto_repair!(
552 function = self.remove_item_inner(data_buffer, Some(search_key)).await,
553 repair = self.try_repair(data_buffer).await?
554 )
555 }
556
557 pub async fn remove_all_items(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>>
568 where
569 S: MultiwriteNorFlash,
570 {
571 run_with_auto_repair!(
572 function = self.remove_item_inner(data_buffer, None).await,
573 repair = self.try_repair(data_buffer).await?
574 )
575 }
576
577 async fn remove_item_inner(
579 &mut self,
580 data_buffer: &mut [u8],
581 search_key: Option<&K>,
582 ) -> Result<(), Error<S::Error>>
583 where
584 S: MultiwriteNorFlash,
585 {
586 if let Some(key) = &search_key {
587 self.inner.cache.notice_key_erased(key);
588 } else {
589 self.inner.cache.invalidate_cache_state();
590 }
591
592 let last_used_page = self
596 .inner
597 .find_first_page(0, PageState::PartialOpen)
598 .await?
599 .unwrap_or_default();
600
601 for page_index in self.inner.get_pages(self.inner.next_page(last_used_page)) {
603 if self.inner.get_page_state(page_index).await?.is_open() {
604 continue;
606 }
607
608 let page_data_start_address =
609 calculate_page_address::<S>(self.flash_range(), page_index) + S::WORD_SIZE as u32;
610 let page_data_end_address =
611 calculate_page_end_address::<S>(self.flash_range(), page_index)
612 - S::WORD_SIZE as u32;
613
614 let mut item_headers =
616 ItemHeaderIter::new(page_data_start_address, page_data_end_address);
617
618 while let (Some(item_header), item_address) =
619 item_headers.next(&mut self.inner.flash).await?
620 {
621 let item = item_header
622 .read_item(
623 &mut self.inner.flash,
624 data_buffer,
625 item_address,
626 page_data_end_address,
627 )
628 .await?;
629
630 match item {
631 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => continue,
632 item::MaybeItem::Present(item) => {
633 let item_match = match search_key {
634 Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
635 _ => true,
636 };
637 if item_match {
640 item.header
641 .erase_data(
642 &mut self.inner.flash,
643 self.inner.flash_range.clone(),
644 &mut self.inner.cache,
645 item_address,
646 )
647 .await?;
648 }
649 }
650 }
651 }
652 }
653
654 self.inner.cache.unmark_dirty();
656
657 Ok(())
658 }
659
660 pub async fn fetch_all_items(
721 &mut self,
722 data_buffer: &mut [u8],
723 ) -> Result<MapItemIter<'_, K, S, C>, Error<S::Error>> {
724 let first_page = run_with_auto_repair!(
727 function = {
728 match self
729 .inner
730 .find_first_page(0, PageState::PartialOpen)
731 .await?
732 {
733 Some(last_used_page) => {
734 Ok(self.inner.next_page(last_used_page))
736 }
737 None => {
738 if let Some(first_open_page) =
741 self.inner.find_first_page(0, PageState::Open).await?
742 {
743 let previous_page = self.inner.previous_page(first_open_page);
744 if self.inner.get_page_state(previous_page).await?.is_closed() {
745 Ok(first_open_page)
747 } else {
748 self.inner.cache.unmark_dirty();
751 Ok(0)
752 }
753 } else {
754 Err(Error::Corrupted {
758 #[cfg(feature = "_test")]
759 backtrace: std::backtrace::Backtrace::capture(),
760 })
761 }
762 }
763 }
764 },
765 repair = self.try_repair(data_buffer).await?
766 )?;
767
768 let start_address =
769 calculate_page_address::<S>(self.flash_range(), first_page) + S::WORD_SIZE as u32;
770 let end_address =
771 calculate_page_end_address::<S>(self.flash_range(), first_page) - S::WORD_SIZE as u32;
772
773 Ok(MapItemIter {
774 storage: self,
775 first_page,
776 current_page_index: first_page,
777 current_iter: ItemIter::new(start_address, end_address),
778 _key: PhantomData,
779 })
780 }
781
782 async fn migrate_items(
783 &mut self,
784 data_buffer: &mut [u8],
785 source_page: usize,
786 target_page: usize,
787 ) -> Result<(), Error<S::Error>> {
788 let mut next_page_write_address =
792 calculate_page_address::<S>(self.flash_range(), target_page) + S::WORD_SIZE as u32;
793
794 let mut it = ItemIter::new(
795 calculate_page_address::<S>(self.flash_range(), source_page) + S::WORD_SIZE as u32,
796 calculate_page_end_address::<S>(self.flash_range(), source_page) - S::WORD_SIZE as u32,
797 );
798 while let Some((item, item_address)) = it.next(&mut self.inner.flash, data_buffer).await? {
799 let (key, _) = K::deserialize_from(item.data())?;
800
801 self.inner.cache.unmark_dirty();
803
804 let Some((found_item, found_address, _)) =
806 self.fetch_item_with_location(data_buffer, &key).await?
807 else {
808 return Err(Error::Corrupted {
810 #[cfg(feature = "_test")]
811 backtrace: std::backtrace::Backtrace::capture(),
812 });
813 };
814
815 let found_item = found_item
816 .reborrow(data_buffer)
817 .ok_or_else(|| Error::LogicBug {
818 #[cfg(feature = "_test")]
819 backtrace: std::backtrace::Backtrace::capture(),
820 })?;
821
822 if found_address == item_address {
823 self.inner
824 .cache
825 .notice_key_location(&key, next_page_write_address, true);
826 found_item
827 .write(
828 &mut self.inner.flash,
829 self.inner.flash_range.clone(),
830 &mut self.inner.cache,
831 next_page_write_address,
832 )
833 .await?;
834 next_page_write_address = found_item
835 .header
836 .next_item_address::<S>(next_page_write_address);
837 }
838 }
839
840 self.inner.open_page(source_page).await?;
841
842 Ok(())
843 }
844
845 async fn try_repair(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>> {
856 self.inner.cache.invalidate_cache_state();
857
858 self.inner.try_general_repair().await?;
859
860 if let Some(partial_open_page) = self
862 .inner
863 .find_first_page(0, PageState::PartialOpen)
864 .await?
865 {
866 let buffer_page = self.inner.next_page(partial_open_page);
867 if !self.inner.get_page_state(buffer_page).await?.is_open() {
868 self.inner.open_page(partial_open_page).await?;
871
872 self.inner.partial_close_page(partial_open_page).await?;
874
875 self.migrate_items(data_buffer, buffer_page, partial_open_page)
876 .await?;
877 }
878 }
879
880 Ok(())
881 }
882
883 pub fn erase_all(&mut self) -> impl Future<Output = Result<(), Error<S::Error>>> {
887 self.inner.erase_all()
888 }
889
890 #[must_use]
895 pub const fn item_overhead_size() -> u32 {
896 GenericStorage::<S, C>::item_overhead_size()
897 }
898
899 pub fn destroy(self) -> (S, C) {
903 self.inner.destroy()
904 }
905
906 pub const fn flash(&mut self) -> &mut S {
908 self.inner.flash()
909 }
910
911 pub const fn flash_range(&self) -> Range<u32> {
913 self.inner.flash_range()
914 }
915
916 #[cfg(any(test, feature = "std"))]
917 pub fn print_items(&mut self) -> impl Future<Output = String> {
921 self.inner.print_items()
922 }
923}
924
925pub struct MapItemIter<'s, K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
974 storage: &'s mut MapStorage<K, S, C>,
975 first_page: usize,
976 current_page_index: usize,
977 pub(crate) current_iter: ItemIter,
978 _key: PhantomData<K>,
979}
980
981impl<K: Key, S: NorFlash, C: KeyCacheImpl<K>> MapItemIter<'_, K, S, C> {
982 pub async fn next<'a, V: Value<'a>>(
984 &mut self,
985 data_buffer: &'a mut [u8],
986 ) -> Result<Option<(K, V)>, Error<S::Error>> {
987 let item = loop {
989 if let Some((item, _address)) = self
990 .current_iter
991 .next(&mut self.storage.inner.flash, data_buffer)
992 .await?
993 {
994 break item;
996 }
997
998 loop {
1001 self.current_page_index = self.storage.inner.next_page(self.current_page_index);
1002
1003 if self.current_page_index == self.first_page {
1005 return Ok(None);
1006 }
1007
1008 match self
1009 .storage
1010 .inner
1011 .get_page_state(self.current_page_index)
1012 .await
1013 {
1014 Ok(PageState::Closed | PageState::PartialOpen) => {
1015 self.current_iter = ItemIter::new(
1016 calculate_page_address::<S>(
1017 self.storage.inner.flash_range.clone(),
1018 self.current_page_index,
1019 ) + S::WORD_SIZE as u32,
1020 calculate_page_end_address::<S>(
1021 self.storage.inner.flash_range.clone(),
1022 self.current_page_index,
1023 ) - S::WORD_SIZE as u32,
1024 );
1025 break;
1026 }
1027 _ => continue,
1028 }
1029 }
1030 };
1031
1032 let data_len = item.header.length as usize;
1033 let (key, key_len) = K::deserialize_from(item.data())?;
1034 let (value, _value_len) = V::deserialize_from(&data_buffer[..data_len][key_len..])
1035 .map_err(Error::SerializationError)?;
1036
1037 Ok(Some((key, value)))
1038 }
1039}
1040
1041pub trait Key: Eq + Clone + Sized {
1050 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1053 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
1056 fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
1059 Self::deserialize_from(buffer).map(|(_, len)| len)
1060 }
1061}
1062
1063macro_rules! impl_key_num {
1064 ($int:ty) => {
1065 impl Key for $int {
1066 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1067 let self_bytes = self.to_le_bytes();
1068
1069 match buffer.get_mut(..self_bytes.len()) {
1070 Some(buffer) => {
1071 buffer.copy_from_slice(&self_bytes);
1072 Ok(buffer.len())
1073 }
1074 None => Err(SerializationError::BufferTooSmall),
1075 }
1076 }
1077
1078 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1079 let value = Self::from_le_bytes(
1080 buffer
1081 .get(..size_of::<Self>())
1082 .ok_or(SerializationError::BufferTooSmall)?
1083 .try_into()
1084 .unwrap(),
1085 );
1086 Ok((value, size_of::<Self>()))
1087 }
1088
1089 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1090 Ok(size_of::<Self>())
1091 }
1092 }
1093 };
1094}
1095
1096impl_key_num!(u8);
1097impl_key_num!(u16);
1098impl_key_num!(u32);
1099impl_key_num!(u64);
1100impl_key_num!(u128);
1101impl_key_num!(i8);
1102impl_key_num!(i16);
1103impl_key_num!(i32);
1104impl_key_num!(i64);
1105impl_key_num!(i128);
1106
1107impl<const N: usize> Key for [u8; N] {
1108 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1109 buffer
1110 .get_mut(..N)
1111 .ok_or(SerializationError::BufferTooSmall)?
1112 .copy_from_slice(self);
1113 Ok(N)
1114 }
1115
1116 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1117 Ok((
1118 buffer
1119 .get(..N)
1120 .ok_or(SerializationError::BufferTooSmall)?
1121 .try_into()
1122 .unwrap(),
1123 N,
1124 ))
1125 }
1126
1127 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1128 Ok(N)
1129 }
1130}
1131
1132impl Key for () {
1133 fn serialize_into(&self, _buffer: &mut [u8]) -> Result<usize, SerializationError> {
1134 Ok(0)
1135 }
1136
1137 fn deserialize_from(_buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1138 Ok(((), 0))
1139 }
1140}
1141
1142pub trait Value<'a> {
1147 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1150 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1159 where
1160 Self: Sized;
1161}
1162
1163impl<'a> Value<'a> for bool {
1164 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1165 <u8 as Value>::serialize_into(&u8::from(*self), buffer)
1166 }
1167
1168 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1169 where
1170 Self: Sized,
1171 {
1172 let (value, size) = <u8 as Value>::deserialize_from(buffer)?;
1173 Ok((value != 0, size))
1174 }
1175}
1176
1177impl<'a, T: Value<'a>> Value<'a> for Option<T> {
1178 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1179 if let Some(val) = self {
1180 let mut size = 0;
1181 size += <bool as Value>::serialize_into(&true, buffer)?;
1182 size += <T as Value>::serialize_into(
1183 val,
1184 buffer
1185 .get_mut(size..)
1186 .ok_or(SerializationError::BufferTooSmall)?,
1187 )?;
1188 Ok(size)
1189 } else {
1190 <bool as Value>::serialize_into(&false, buffer)
1191 }
1192 }
1193
1194 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1195 where
1196 Self: Sized,
1197 {
1198 let (is_some, tag_size) = <bool as Value>::deserialize_from(buffer)?;
1199 if is_some {
1200 let (value, value_size) = <T as Value>::deserialize_from(
1201 buffer
1202 .get(tag_size..)
1203 .ok_or(SerializationError::BufferTooSmall)?,
1204 )?;
1205 Ok((Some(value), tag_size + value_size))
1206 } else {
1207 Ok((None, tag_size))
1208 }
1209 }
1210}
1211
1212impl<'a> Value<'a> for &'a [u8] {
1213 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1214 buffer
1215 .get_mut(..self.len())
1216 .ok_or(SerializationError::BufferTooSmall)?
1217 .copy_from_slice(self);
1218 Ok(self.len())
1219 }
1220
1221 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1222 where
1223 Self: Sized,
1224 {
1225 Ok((buffer, buffer.len()))
1226 }
1227}
1228
1229#[cfg(feature = "postcard")]
1230pub trait PostcardValue<'a>: Serialize + Deserialize<'a> {}
1237
1238#[cfg(feature = "postcard")]
1239impl<'a, T> Value<'a> for T
1240where
1241 T: PostcardValue<'a>,
1242{
1243 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1244 Ok(postcard::to_slice(self, buffer).map(|s| s.len())?)
1245 }
1246
1247 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1248 where
1249 Self: Sized,
1250 {
1251 Ok((postcard::from_bytes(buffer)?, buffer.len()))
1252 }
1253}
1254
1255#[cfg(feature = "postcard")]
1256impl From<postcard::Error> for SerializationError {
1257 fn from(error: postcard::Error) -> SerializationError {
1258 use postcard::Error::*;
1259 match error {
1260 SerializeBufferFull => SerializationError::BufferTooSmall,
1261 SerializeSeqLengthUnknown => SerializationError::InvalidData,
1262 DeserializeUnexpectedEnd
1263 | DeserializeBadVarint
1264 | DeserializeBadBool
1265 | DeserializeBadChar
1266 | DeserializeBadUtf8
1267 | DeserializeBadOption
1268 | DeserializeBadEnum
1269 | DeserializeBadEncoding
1270 | DeserializeBadCrc => SerializationError::InvalidFormat,
1271 _ => SerializationError::Custom(error as i32),
1272 }
1273 }
1274}
1275
1276macro_rules! impl_map_item_num {
1277 ($num:ty) => {
1278 impl<'a> Value<'a> for $num {
1279 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1280 let self_bytes = self.to_le_bytes();
1281
1282 match buffer.get_mut(..self_bytes.len()) {
1283 Some(buffer) => {
1284 buffer.copy_from_slice(&self_bytes);
1285 Ok(buffer.len())
1286 }
1287 None => Err(SerializationError::BufferTooSmall),
1288 }
1289 }
1290
1291 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1292 let value = Self::from_le_bytes(
1293 buffer
1294 .get(..size_of::<Self>())
1295 .ok_or(SerializationError::BufferTooSmall)?
1296 .try_into()
1297 .unwrap(),
1298 );
1299 Ok((value, size_of::<Self>()))
1300 }
1301 }
1302
1303 impl<'a, const N: usize> Value<'a> for [$num; N] {
1305 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1306 let elem_size = size_of::<$num>();
1307
1308 let buffer = buffer
1309 .get_mut(0..elem_size * N)
1310 .ok_or(SerializationError::BufferTooSmall)?;
1311
1312 for (chunk, number) in buffer.chunks_exact_mut(elem_size).zip(self.iter()) {
1313 chunk.copy_from_slice(&number.to_le_bytes())
1314 }
1315 Ok(elem_size * N)
1316 }
1317
1318 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1319 let elem_size = size_of::<$num>();
1320 if buffer.len() < elem_size * N {
1321 return Err(SerializationError::BufferTooSmall);
1322 }
1323
1324 let mut array = [0 as $num; N];
1325 for (chunk, number) in buffer.chunks_exact(elem_size).zip(array.iter_mut()) {
1326 *number = <$num>::from_le_bytes(chunk.try_into().unwrap());
1327 }
1328 Ok((array, elem_size * N))
1329 }
1330 }
1331 };
1332}
1333
1334impl_map_item_num!(u8);
1335impl_map_item_num!(u16);
1336impl_map_item_num!(u32);
1337impl_map_item_num!(u64);
1338impl_map_item_num!(u128);
1339impl_map_item_num!(i8);
1340impl_map_item_num!(i16);
1341impl_map_item_num!(i32);
1342impl_map_item_num!(i64);
1343impl_map_item_num!(i128);
1344impl_map_item_num!(f32);
1345impl_map_item_num!(f64);
1346
1347#[non_exhaustive]
1351#[derive(Debug, PartialEq, Eq, Clone)]
1352#[cfg_attr(feature = "defmt", derive(defmt::Format))]
1353pub enum SerializationError {
1354 BufferTooSmall,
1356 InvalidData,
1358 InvalidFormat,
1360 Custom(i32),
1363}
1364
1365impl core::fmt::Display for SerializationError {
1366 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1367 match self {
1368 SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
1369 SerializationError::InvalidData => write!(f, "Invalid data"),
1370 SerializationError::InvalidFormat => write!(f, "Invalid format"),
1371 SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
1372 }
1373 }
1374}
1375
1376#[cfg(test)]
1377mod tests {
1378 use crate::{AlignedBuf, cache::NoCache, mock_flash};
1379
1380 use super::*;
1381 use futures_test::test;
1382
1383 type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
1384 type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
1385
1386 #[test]
1387 async fn store_and_fetch() {
1388 let mut storage = MapStorage::<u8, _, _>::new(
1389 MockFlashBig::default(),
1390 MapConfig::new(0x000..0x1000),
1391 cache::NoCache::new(),
1392 );
1393
1394 let mut data_buffer = AlignedBuf([0; 128]);
1395
1396 let start_snapshot = storage.flash().stats_snapshot();
1397
1398 let item = storage
1399 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1400 .await
1401 .unwrap();
1402 assert_eq!(item, None);
1403
1404 let item = storage
1405 .fetch_item::<&[u8]>(&mut data_buffer, &60)
1406 .await
1407 .unwrap();
1408 assert_eq!(item, None);
1409
1410 let item = storage
1411 .fetch_item::<&[u8]>(&mut data_buffer, &0xFF)
1412 .await
1413 .unwrap();
1414 assert_eq!(item, None);
1415
1416 storage
1417 .store_item(&mut data_buffer, &0u8, &[5u8])
1418 .await
1419 .unwrap();
1420 storage
1421 .store_item(&mut data_buffer, &0u8, &[5u8, 6])
1422 .await
1423 .unwrap();
1424
1425 let item = storage
1426 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1427 .await
1428 .unwrap()
1429 .unwrap();
1430 assert_eq!(item, &[5, 6]);
1431
1432 storage
1433 .store_item(&mut data_buffer, &1u8, &[2u8, 2, 2, 2, 2, 2])
1434 .await
1435 .unwrap();
1436
1437 let item = storage
1438 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1439 .await
1440 .unwrap()
1441 .unwrap();
1442 assert_eq!(item, &[5, 6]);
1443
1444 let item = storage
1445 .fetch_item::<&[u8]>(&mut data_buffer, &1)
1446 .await
1447 .unwrap()
1448 .unwrap();
1449 assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
1450
1451 for index in 0..4000 {
1452 storage
1453 .store_item(
1454 &mut data_buffer,
1455 &((index % 10) as u8),
1456 &vec![(index % 10) as u8 * 2; index % 10].as_slice(),
1457 )
1458 .await
1459 .unwrap();
1460 }
1461
1462 for i in 0..10 {
1463 let item = storage
1464 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1465 .await
1466 .unwrap()
1467 .unwrap();
1468 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1469 }
1470
1471 for _ in 0..4000 {
1472 storage
1473 .store_item(&mut data_buffer, &11u8, &[0; 10])
1474 .await
1475 .unwrap();
1476 }
1477
1478 for i in 0..10 {
1479 let item = storage
1480 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1481 .await
1482 .unwrap()
1483 .unwrap();
1484 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1485 }
1486
1487 println!(
1488 "{:?}",
1489 start_snapshot.compare_to(storage.flash().stats_snapshot()),
1490 );
1491 }
1492
1493 #[test]
1494 async fn store_too_many_items() {
1495 const UPPER_BOUND: u8 = 3;
1496
1497 let mut storage = MapStorage::new(
1498 MockFlashTiny::default(),
1499 const { MapConfig::new(0x00..0x40) },
1500 NoCache::new(),
1501 );
1502 let mut data_buffer = AlignedBuf([0; 128]);
1503
1504 for i in 0..UPPER_BOUND {
1505 println!("Storing {i:?}");
1506
1507 storage
1508 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1509 .await
1510 .unwrap();
1511 }
1512
1513 assert_eq!(
1514 storage
1515 .store_item(
1516 &mut data_buffer,
1517 &UPPER_BOUND,
1518 &vec![0; UPPER_BOUND as usize].as_slice(),
1519 )
1520 .await,
1521 Err(Error::FullStorage)
1522 );
1523
1524 for i in 0..UPPER_BOUND {
1525 let item = storage
1526 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1527 .await
1528 .unwrap()
1529 .unwrap();
1530
1531 println!("Fetched {item:?}");
1532
1533 assert_eq!(item, vec![i; i as usize]);
1534 }
1535 }
1536
1537 #[test]
1538 async fn store_too_many_items_big() {
1539 const UPPER_BOUND: u8 = 68;
1540
1541 let mut storage = MapStorage::new(
1542 MockFlashBig::default(),
1543 const { MapConfig::new(0x0000..0x1000) },
1544 NoCache::new(),
1545 );
1546 let mut data_buffer = AlignedBuf([0; 128]);
1547
1548 for i in 0..UPPER_BOUND {
1549 println!("Storing {i:?}");
1550
1551 storage
1552 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1553 .await
1554 .unwrap();
1555 }
1556
1557 assert_eq!(
1558 storage
1559 .store_item(
1560 &mut data_buffer,
1561 &UPPER_BOUND,
1562 &vec![0; UPPER_BOUND as usize].as_slice(),
1563 )
1564 .await,
1565 Err(Error::FullStorage)
1566 );
1567
1568 for i in 0..UPPER_BOUND {
1569 let item = storage
1570 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1571 .await
1572 .unwrap()
1573 .unwrap();
1574
1575 println!("Fetched {item:?}");
1576
1577 assert_eq!(item, vec![i; i as usize]);
1578 }
1579 }
1580
1581 #[test]
1582 async fn store_many_items_big() {
1583 let mut storage = MapStorage::new(
1584 mock_flash::MockFlashBase::<4, 1, 4096>::default(),
1585 const { MapConfig::new(0x0000..0x4000) },
1586 NoCache::new(),
1587 );
1588 let mut data_buffer = AlignedBuf([0; 128]);
1589
1590 const LENGHT_PER_KEY: [usize; 24] = [
1591 11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
1592 ];
1593
1594 for _ in 0..100 {
1595 #[allow(clippy::needless_range_loop)]
1596 for i in 0..24 {
1597 storage
1598 .store_item(
1599 &mut data_buffer,
1600 &(i as u16),
1601 &vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
1602 )
1603 .await
1604 .unwrap();
1605 }
1606 }
1607
1608 #[allow(clippy::needless_range_loop)]
1609 for i in 0..24 {
1610 let item = storage
1611 .fetch_item::<&[u8]>(&mut data_buffer, &(i as u16))
1612 .await
1613 .unwrap()
1614 .unwrap();
1615
1616 println!("Fetched {item:?}");
1617
1618 assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
1619 }
1620 }
1621
1622 #[test]
1623 async fn remove_items() {
1624 let mut storage = MapStorage::new(
1625 mock_flash::MockFlashBase::<4, 1, 4096>::new(
1626 mock_flash::WriteCountCheck::Twice,
1627 None,
1628 true,
1629 ),
1630 const { MapConfig::new(0x0000..0x4000) },
1631 NoCache::new(),
1632 );
1633 let mut data_buffer = AlignedBuf([0; 128]);
1634
1635 for j in 0..10 {
1637 for i in 0..24 {
1638 storage
1639 .store_item(
1640 &mut data_buffer,
1641 &(i as u8),
1642 &vec![i as u8; j + 2].as_slice(),
1643 )
1644 .await
1645 .unwrap();
1646 }
1647 }
1648
1649 for j in (0..24).rev() {
1650 for i in 0..=j {
1652 assert!(
1653 storage
1654 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1655 .await
1656 .unwrap()
1657 .is_some()
1658 );
1659 }
1660
1661 storage.remove_item(&mut data_buffer, &j).await.unwrap();
1663
1664 for i in 0..j {
1666 assert!(
1667 storage
1668 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1669 .await
1670 .unwrap()
1671 .is_some()
1672 );
1673 }
1674
1675 assert!(
1676 storage
1677 .fetch_item::<&[u8]>(&mut data_buffer, &j)
1678 .await
1679 .unwrap()
1680 .is_none()
1681 );
1682 }
1683 }
1684
1685 #[test]
1686 async fn remove_all() {
1687 let mut storage = MapStorage::new(
1688 mock_flash::MockFlashBase::<4, 1, 4096>::new(
1689 mock_flash::WriteCountCheck::Twice,
1690 None,
1691 true,
1692 ),
1693 const { MapConfig::new(0x0000..0x4000) },
1694 NoCache::new(),
1695 );
1696 let mut data_buffer = AlignedBuf([0; 128]);
1697
1698 for value in 0..10 {
1700 for key in 0..24u8 {
1701 storage
1702 .store_item(&mut data_buffer, &key, &vec![key; value + 2].as_slice())
1703 .await
1704 .unwrap();
1705 }
1706 }
1707
1708 for key in 0..24u8 {
1710 assert!(
1711 storage
1712 .fetch_item::<&[u8]>(&mut data_buffer, &key)
1713 .await
1714 .unwrap()
1715 .is_some()
1716 );
1717 }
1718
1719 storage.remove_all_items(&mut data_buffer).await.unwrap();
1721
1722 for key in 0..24 {
1724 assert!(
1725 storage
1726 .fetch_item::<&[u8]>(&mut data_buffer, &key)
1727 .await
1728 .unwrap()
1729 .is_none()
1730 );
1731 }
1732 }
1733
1734 #[test]
1735 async fn store_too_big_item() {
1736 let mut storage = MapStorage::new(
1737 MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true),
1738 const { MapConfig::new(0x000..0x1000) },
1739 NoCache::new(),
1740 );
1741
1742 storage
1743 .store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1])
1744 .await
1745 .unwrap();
1746
1747 assert_eq!(
1748 storage
1749 .store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],)
1750 .await,
1751 Err(Error::ItemTooBig)
1752 );
1753 }
1754
1755 #[test]
1756 async fn item_iterator() {
1757 const UPPER_BOUND: u8 = 64;
1758 let mut storage = MapStorage::new(
1759 MockFlashBig::default(),
1760 const { MapConfig::new(0x000..0x1000) },
1761 NoCache::new(),
1762 );
1763
1764 let mut data_buffer = AlignedBuf([0; 128]);
1765
1766 for i in 0..UPPER_BOUND {
1767 storage
1768 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1769 .await
1770 .unwrap();
1771 }
1772
1773 for i in 0..10 {
1775 storage
1776 .store_item(&mut data_buffer, &1u8, &vec![i; i as usize].as_slice())
1777 .await
1778 .unwrap();
1779 }
1780
1781 let mut map_iter = storage.fetch_all_items(&mut data_buffer).await.unwrap();
1782
1783 let mut count = 0;
1784 let mut last_value_buffer = [0u8; 64];
1785 let mut last_value_length = 0;
1786 while let Ok(Some((key, value))) = map_iter.next::<&[u8]>(&mut data_buffer).await {
1787 if key == 1 {
1788 last_value_length = value.len();
1790 last_value_buffer[..value.len()].copy_from_slice(value);
1791 } else {
1792 assert_eq!(value, vec![key; key as usize]);
1793 count += 1;
1794 }
1795 }
1796
1797 assert_eq!(last_value_length, 9);
1798 assert_eq!(
1799 &last_value_buffer[..last_value_length],
1800 vec![9u8; 9].as_slice()
1801 );
1802
1803 assert_eq!(count + 1, UPPER_BOUND);
1805 }
1806
1807 #[test]
1808 async fn store_unit_key() {
1809 let mut storage = MapStorage::new(
1810 MockFlashBig::default(),
1811 const { MapConfig::new(0x000..0x1000) },
1812 NoCache::new(),
1813 );
1814
1815 let mut data_buffer = AlignedBuf([0; 128]);
1816
1817 let item = storage
1818 .fetch_item::<&[u8]>(&mut data_buffer, &())
1819 .await
1820 .unwrap();
1821 assert_eq!(item, None);
1822
1823 storage
1824 .store_item(&mut data_buffer, &(), &[5u8])
1825 .await
1826 .unwrap();
1827 storage
1828 .store_item(&mut data_buffer, &(), &[5u8, 6])
1829 .await
1830 .unwrap();
1831
1832 let item = storage
1833 .fetch_item::<&[u8]>(&mut data_buffer, &())
1834 .await
1835 .unwrap()
1836 .unwrap();
1837 assert_eq!(item, &[5, 6]);
1838 }
1839
1840 #[test]
1841 async fn option_value() {
1842 let mut buffer = [0; 2];
1843
1844 assert_eq!(Some(42u8).serialize_into(&mut buffer), Ok(2));
1845 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((Some(42u8), 2)));
1846 assert_eq!(buffer, [1, 42]);
1847
1848 let mut buffer = [0; 1];
1849
1850 assert_eq!(Option::<u8>::None.serialize_into(&mut buffer), Ok(1));
1851 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((None, 1)));
1852 assert_eq!(buffer, [0]);
1853 }
1854
1855 #[test]
1856 async fn array_value() {
1857 let mut buffer = [0; 3];
1858 assert_eq!(Value::serialize_into(&[1u8, 2, 3], &mut buffer), Ok(3));
1859 assert_eq!(buffer, [1, 2, 3]);
1860 assert_eq!(
1861 <[u8; 3] as Value>::deserialize_from(&buffer),
1862 Ok(([1, 2, 3], 3))
1863 );
1864
1865 let mut buffer = [0; 4];
1866 assert_eq!(
1867 Value::serialize_into(&[0x1234u16, 0x5678], &mut buffer),
1868 Ok(4)
1869 );
1870 assert_eq!(buffer, [0x34, 0x12, 0x78, 0x56]);
1871 assert_eq!(
1872 <[u16; 2]>::deserialize_from(&buffer),
1873 Ok(([0x1234, 0x5678], 4))
1874 );
1875 }
1876
1877 #[cfg(feature = "postcard")]
1878 #[test]
1879 async fn postcard_value() {
1880 #[derive(PartialEq, Debug, serde::Serialize, serde::Deserialize)]
1881 struct Foo(u32);
1882 impl crate::map::PostcardValue<'_> for Foo {}
1883 let mut buffer = [0; 8];
1884 assert_eq!(Value::serialize_into(&Foo(123), &mut buffer), Ok(1));
1885 assert_eq!(
1886 <Foo as Value>::deserialize_from(&buffer[..1]),
1887 Ok((Foo(123), 1))
1888 );
1889 }
1890}