1use core::{marker::PhantomData, mem::size_of};
4use embedded_storage_async::nor_flash::MultiwriteNorFlash;
5
6use crate::item::{Item, ItemHeader, ItemIter};
7
8use self::{
9 cache::KeyCacheImpl,
10 item::{ItemHeaderIter, ItemUnborrowed},
11};
12
13use super::{
14 Debug, Error, GenericStorage, MAX_WORD_SIZE, NorFlash, NorFlashExt, PageState, Range, cache,
15 calculate_page_address, calculate_page_end_address, calculate_page_index, calculate_page_size,
16 item, run_with_auto_repair,
17};
18
19pub struct MapConfig<S> {
21 flash_range: Range<u32>,
22 _phantom: PhantomData<S>,
23}
24
25impl<S: NorFlash> MapConfig<S> {
26 #[must_use]
29 pub const fn new(flash_range: Range<u32>) -> Self {
30 Self::try_new(flash_range).expect("Map config must be correct")
31 }
32
33 #[must_use]
35 pub const fn try_new(flash_range: Range<u32>) -> Option<Self> {
36 if !flash_range.start.is_multiple_of(S::ERASE_SIZE as u32) {
37 return None;
38 }
39 if !flash_range.end.is_multiple_of(S::ERASE_SIZE as u32) {
40 return None;
41 }
42 if flash_range.end - flash_range.start < S::ERASE_SIZE as u32 * 2 {
44 return None;
45 }
46
47 if S::ERASE_SIZE < S::WORD_SIZE * 3 {
48 return None;
49 }
50 if S::WORD_SIZE > MAX_WORD_SIZE {
51 return None;
52 }
53
54 Some(Self {
55 flash_range,
56 _phantom: PhantomData,
57 })
58 }
59}
60
61pub struct MapStorage<K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
135 inner: GenericStorage<S, C>,
136 _phantom: PhantomData<K>,
137}
138
139impl<S: NorFlash, C: KeyCacheImpl<K>, K: Key> MapStorage<K, S, C> {
140 pub const fn new(storage: S, config: MapConfig<S>, cache: C) -> Self {
146 Self {
147 inner: GenericStorage {
148 flash: storage,
149 flash_range: config.flash_range,
150 cache,
151 },
152 _phantom: PhantomData,
153 }
154 }
155
156 pub async fn fetch_item<'d, V: Value<'d>>(
165 &mut self,
166 data_buffer: &'d mut [u8],
167 search_key: &K,
168 ) -> Result<Option<V>, Error<S::Error>> {
169 let result = run_with_auto_repair!(
170 function = self.fetch_item_with_location(data_buffer, search_key).await,
171 repair = self.try_repair(data_buffer).await?
172 );
173
174 let Some((item, _, item_key_len)) = result? else {
175 return Ok(None);
176 };
177
178 let data_len = item.header.length as usize;
179 let item_key_len = match item_key_len {
180 Some(item_key_len) => item_key_len,
181 None => K::get_len(&data_buffer[..data_len])?,
182 };
183
184 let (value, _size) =
185 V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
186 .map_err(Error::SerializationError)?;
187 Ok(Some(value))
188 }
189
190 #[allow(clippy::type_complexity)]
192 async fn fetch_item_with_location(
193 &mut self,
194 data_buffer: &mut [u8],
195 search_key: &K,
196 ) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
197 if self.inner.cache.is_dirty() {
198 self.inner.cache.invalidate_cache_state();
199 }
200
201 'cache: {
202 if let Some(cached_location) = self.inner.cache.key_location(search_key) {
203 let page_index = calculate_page_index::<S>(self.flash_range(), cached_location);
204 let page_data_end_address =
205 calculate_page_end_address::<S>(self.flash_range(), page_index)
206 - S::WORD_SIZE as u32;
207
208 let Some(header) = ItemHeader::read_new(
209 &mut self.inner.flash,
210 cached_location,
211 page_data_end_address,
212 )
213 .await?
214 else {
215 #[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
217 {
218 assert!(
219 !cfg!(feature = "_test"),
220 "Wrong cache value. Addr: {cached_location}"
221 );
222 }
223 self.inner.cache.invalidate_cache_state();
224 break 'cache;
225 };
226
227 let item = header
228 .read_item(
229 &mut self.inner.flash,
230 data_buffer,
231 cached_location,
232 page_data_end_address,
233 )
234 .await?;
235
236 match item {
237 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
238 #[expect(clippy::assertions_on_constants, reason = "Clippy is wrong here")]
239 {
240 assert!(
241 !cfg!(feature = "_test"),
242 "Wrong cache value. Addr: {cached_location}"
243 );
244 }
245
246 self.inner.cache.invalidate_cache_state();
248 break 'cache;
249 }
250 item::MaybeItem::Present(item) => {
251 return Ok(Some((item.unborrow(), cached_location, None)));
252 }
253 }
254 }
255 }
256
257 let mut last_used_page = self
259 .inner
260 .find_first_page(0, PageState::PartialOpen)
261 .await?;
262
263 if last_used_page.is_none() {
264 if let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await? {
267 let previous_page = self.inner.previous_page(first_open_page);
268 if self.inner.get_page_state(previous_page).await?.is_closed() {
269 last_used_page = Some(previous_page);
270 } else {
271 self.inner.cache.unmark_dirty();
274 return Ok(None);
275 }
276 } else {
277 return Err(Error::Corrupted {
281 #[cfg(feature = "_test")]
282 backtrace: std::backtrace::Backtrace::capture(),
283 });
284 }
285 }
286
287 let mut current_page_to_check = last_used_page.unwrap();
291 let mut newest_found_item_data = None;
292
293 loop {
294 let page_data_start_address =
295 calculate_page_address::<S>(self.flash_range(), current_page_to_check)
296 + S::WORD_SIZE as u32;
297 let page_data_end_address =
298 calculate_page_end_address::<S>(self.flash_range(), current_page_to_check)
299 - S::WORD_SIZE as u32;
300
301 let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
302 while let Some((item, address)) = it.next(&mut self.inner.flash, data_buffer).await? {
303 let (found_key, found_key_len) = K::deserialize_from(item.data())?;
304 if found_key == *search_key {
305 newest_found_item_data = Some((address, found_key_len));
306 }
307 }
308
309 if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
311 self.inner
312 .cache
313 .notice_key_location(search_key, *newest_found_item_address, false);
314
315 break;
316 }
317
318 let previous_page = self.inner.previous_page(current_page_to_check);
320
321 if self.inner.get_page_state(previous_page).await? != PageState::Closed {
322 self.inner.cache.unmark_dirty();
324 return Ok(None);
325 }
326
327 current_page_to_check = previous_page;
328 }
329
330 self.inner.cache.unmark_dirty();
331
332 if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data
335 {
336 let item =
337 ItemHeader::read_new(&mut self.inner.flash, newest_found_item_address, u32::MAX)
338 .await?
339 .ok_or_else(|| {
340 Error::Corrupted {
342 #[cfg(feature = "_test")]
343 backtrace: std::backtrace::Backtrace::capture(),
344 }
345 })?
346 .read_item(
347 &mut self.inner.flash,
348 data_buffer,
349 newest_found_item_address,
350 u32::MAX,
351 )
352 .await?;
353
354 Ok(Some((
355 item.unwrap()?.unborrow(),
356 newest_found_item_address,
357 Some(newest_found_item_key_len),
358 )))
359 } else {
360 Ok(None)
361 }
362 }
363
364 pub async fn store_item<'d, V: Value<'d>>(
370 &mut self,
371 data_buffer: &mut [u8],
372 key: &K,
373 item: &V,
374 ) -> Result<(), Error<S::Error>> {
375 run_with_auto_repair!(
376 function = self.store_item_inner(data_buffer, key, item).await,
377 repair = self.try_repair(data_buffer).await?
378 )
379 }
380
381 async fn store_item_inner(
382 &mut self,
383 data_buffer: &mut [u8],
384 key: &K,
385 item: &dyn Value<'_>,
386 ) -> Result<(), Error<S::Error>> {
387 if self.inner.cache.is_dirty() {
388 self.inner.cache.invalidate_cache_state();
389 }
390
391 let mut recursion_level = 0;
392 loop {
393 if recursion_level == self.inner.get_pages(0).count() {
395 self.inner.cache.unmark_dirty();
396 return Err(Error::FullStorage);
397 }
398
399 let next_page_to_use = if let Some(partial_open_page) = self
401 .inner
402 .find_first_page(0, PageState::PartialOpen)
403 .await?
404 {
405 if !self
407 .inner
408 .get_page_state(self.inner.next_page(partial_open_page))
409 .await?
410 .is_open()
411 {
412 return Err(Error::Corrupted {
417 #[cfg(feature = "_test")]
418 backtrace: std::backtrace::Backtrace::capture(),
419 });
420 }
421
422 let page_data_start_address =
425 calculate_page_address::<S>(self.flash_range(), partial_open_page)
426 + S::WORD_SIZE as u32;
427 let page_data_end_address =
428 calculate_page_end_address::<S>(self.flash_range(), partial_open_page)
429 - S::WORD_SIZE as u32;
430
431 let key_len = key.serialize_into(data_buffer)?;
432 let item_data_length = key_len
433 + item
434 .serialize_into(&mut data_buffer[key_len..])
435 .map_err(Error::SerializationError)?;
436
437 if item_data_length > u16::MAX as usize
438 || item_data_length
439 > calculate_page_size::<S>()
440 .saturating_sub(ItemHeader::data_address::<S>(0) as usize)
441 {
442 self.inner.cache.unmark_dirty();
443 return Err(Error::ItemTooBig);
444 }
445
446 let free_spot_address = self
447 .inner
448 .find_next_free_item_spot(
449 page_data_start_address,
450 page_data_end_address,
451 item_data_length as u32,
452 )
453 .await?;
454
455 if let Some(free_spot_address) = free_spot_address {
456 self.inner
457 .cache
458 .notice_key_location(key, free_spot_address, true);
459 Item::write_new(
460 &mut self.inner.flash,
461 self.inner.flash_range.clone(),
462 &mut self.inner.cache,
463 free_spot_address,
464 &data_buffer[..item_data_length],
465 )
466 .await?;
467
468 self.inner.cache.unmark_dirty();
469 return Ok(());
470 }
471
472 self.inner.close_page(partial_open_page).await?;
474 Some(self.inner.next_page(partial_open_page))
475 } else {
476 None
477 };
478
479 if let Some(next_page_to_use) = next_page_to_use {
485 let next_page_state = self.inner.get_page_state(next_page_to_use).await?;
486
487 if !next_page_state.is_open() {
488 return Err(Error::Corrupted {
490 #[cfg(feature = "_test")]
491 backtrace: std::backtrace::Backtrace::capture(),
492 });
493 }
494
495 self.inner.partial_close_page(next_page_to_use).await?;
499
500 let next_buffer_page = self.inner.next_page(next_page_to_use);
501 let next_buffer_page_state = self.inner.get_page_state(next_buffer_page).await?;
502
503 if !next_buffer_page_state.is_open() {
504 self.migrate_items(data_buffer, next_buffer_page, next_page_to_use)
505 .await?;
506 }
507 } else {
508 let Some(first_open_page) = self.inner.find_first_page(0, PageState::Open).await?
510 else {
511 return Err(Error::Corrupted {
515 #[cfg(feature = "_test")]
516 backtrace: std::backtrace::Backtrace::capture(),
517 });
518 };
519
520 self.inner.partial_close_page(first_open_page).await?;
521 }
522
523 recursion_level += 1;
525 }
526 }
527
528 pub async fn remove_item(
541 &mut self,
542 data_buffer: &mut [u8],
543 search_key: &K,
544 ) -> Result<(), Error<S::Error>>
545 where
546 S: MultiwriteNorFlash,
547 {
548 run_with_auto_repair!(
549 function = self.remove_item_inner(data_buffer, Some(search_key)).await,
550 repair = self.try_repair(data_buffer).await?
551 )
552 }
553
554 pub async fn remove_all_items(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>>
565 where
566 S: MultiwriteNorFlash,
567 {
568 run_with_auto_repair!(
569 function = self.remove_item_inner(data_buffer, None).await,
570 repair = self.try_repair(data_buffer).await?
571 )
572 }
573
574 async fn remove_item_inner(
576 &mut self,
577 data_buffer: &mut [u8],
578 search_key: Option<&K>,
579 ) -> Result<(), Error<S::Error>>
580 where
581 S: MultiwriteNorFlash,
582 {
583 if let Some(key) = &search_key {
584 self.inner.cache.notice_key_erased(key);
585 } else {
586 self.inner.cache.invalidate_cache_state();
587 }
588
589 let last_used_page = self
593 .inner
594 .find_first_page(0, PageState::PartialOpen)
595 .await?
596 .unwrap_or_default();
597
598 for page_index in self.inner.get_pages(self.inner.next_page(last_used_page)) {
600 if self.inner.get_page_state(page_index).await?.is_open() {
601 continue;
603 }
604
605 let page_data_start_address =
606 calculate_page_address::<S>(self.flash_range(), page_index) + S::WORD_SIZE as u32;
607 let page_data_end_address =
608 calculate_page_end_address::<S>(self.flash_range(), page_index)
609 - S::WORD_SIZE as u32;
610
611 let mut item_headers =
613 ItemHeaderIter::new(page_data_start_address, page_data_end_address);
614
615 while let (Some(item_header), item_address) =
616 item_headers.next(&mut self.inner.flash).await?
617 {
618 let item = item_header
619 .read_item(
620 &mut self.inner.flash,
621 data_buffer,
622 item_address,
623 page_data_end_address,
624 )
625 .await?;
626
627 match item {
628 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => continue,
629 item::MaybeItem::Present(item) => {
630 let item_match = match search_key {
631 Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
632 _ => true,
633 };
634 if item_match {
637 item.header
638 .erase_data(
639 &mut self.inner.flash,
640 self.inner.flash_range.clone(),
641 &mut self.inner.cache,
642 item_address,
643 )
644 .await?;
645 }
646 }
647 }
648 }
649 }
650
651 self.inner.cache.unmark_dirty();
653
654 Ok(())
655 }
656
657 pub async fn fetch_all_items(
718 &mut self,
719 data_buffer: &mut [u8],
720 ) -> Result<MapItemIter<'_, K, S, C>, Error<S::Error>> {
721 let first_page = run_with_auto_repair!(
724 function = {
725 match self
726 .inner
727 .find_first_page(0, PageState::PartialOpen)
728 .await?
729 {
730 Some(last_used_page) => {
731 Ok(self.inner.next_page(last_used_page))
733 }
734 None => {
735 if let Some(first_open_page) =
738 self.inner.find_first_page(0, PageState::Open).await?
739 {
740 let previous_page = self.inner.previous_page(first_open_page);
741 if self.inner.get_page_state(previous_page).await?.is_closed() {
742 Ok(first_open_page)
744 } else {
745 self.inner.cache.unmark_dirty();
748 Ok(0)
749 }
750 } else {
751 Err(Error::Corrupted {
755 #[cfg(feature = "_test")]
756 backtrace: std::backtrace::Backtrace::capture(),
757 })
758 }
759 }
760 }
761 },
762 repair = self.try_repair(data_buffer).await?
763 )?;
764
765 let start_address =
766 calculate_page_address::<S>(self.flash_range(), first_page) + S::WORD_SIZE as u32;
767 let end_address =
768 calculate_page_end_address::<S>(self.flash_range(), first_page) - S::WORD_SIZE as u32;
769
770 Ok(MapItemIter {
771 storage: self,
772 first_page,
773 current_page_index: first_page,
774 current_iter: ItemIter::new(start_address, end_address),
775 _key: PhantomData,
776 })
777 }
778
779 async fn migrate_items(
780 &mut self,
781 data_buffer: &mut [u8],
782 source_page: usize,
783 target_page: usize,
784 ) -> Result<(), Error<S::Error>> {
785 let mut next_page_write_address =
789 calculate_page_address::<S>(self.flash_range(), target_page) + S::WORD_SIZE as u32;
790
791 let mut it = ItemIter::new(
792 calculate_page_address::<S>(self.flash_range(), source_page) + S::WORD_SIZE as u32,
793 calculate_page_end_address::<S>(self.flash_range(), source_page) - S::WORD_SIZE as u32,
794 );
795 while let Some((item, item_address)) = it.next(&mut self.inner.flash, data_buffer).await? {
796 let (key, _) = K::deserialize_from(item.data())?;
797
798 self.inner.cache.unmark_dirty();
800
801 let Some((found_item, found_address, _)) =
803 self.fetch_item_with_location(data_buffer, &key).await?
804 else {
805 return Err(Error::Corrupted {
807 #[cfg(feature = "_test")]
808 backtrace: std::backtrace::Backtrace::capture(),
809 });
810 };
811
812 let found_item = found_item
813 .reborrow(data_buffer)
814 .ok_or_else(|| Error::LogicBug {
815 #[cfg(feature = "_test")]
816 backtrace: std::backtrace::Backtrace::capture(),
817 })?;
818
819 if found_address == item_address {
820 self.inner
821 .cache
822 .notice_key_location(&key, next_page_write_address, true);
823 found_item
824 .write(
825 &mut self.inner.flash,
826 self.inner.flash_range.clone(),
827 &mut self.inner.cache,
828 next_page_write_address,
829 )
830 .await?;
831 next_page_write_address = found_item
832 .header
833 .next_item_address::<S>(next_page_write_address);
834 }
835 }
836
837 self.inner.open_page(source_page).await?;
838
839 Ok(())
840 }
841
842 async fn try_repair(&mut self, data_buffer: &mut [u8]) -> Result<(), Error<S::Error>> {
853 self.inner.cache.invalidate_cache_state();
854
855 self.inner.try_general_repair().await?;
856
857 if let Some(partial_open_page) = self
859 .inner
860 .find_first_page(0, PageState::PartialOpen)
861 .await?
862 {
863 let buffer_page = self.inner.next_page(partial_open_page);
864 if !self.inner.get_page_state(buffer_page).await?.is_open() {
865 self.inner.open_page(partial_open_page).await?;
868
869 self.inner.partial_close_page(partial_open_page).await?;
871
872 self.migrate_items(data_buffer, buffer_page, partial_open_page)
873 .await?;
874 }
875 }
876
877 Ok(())
878 }
879
880 pub fn erase_all(&mut self) -> impl Future<Output = Result<(), Error<S::Error>>> {
884 self.inner.erase_all()
885 }
886
887 #[must_use]
892 pub const fn item_overhead_size() -> u32 {
893 GenericStorage::<S, C>::item_overhead_size()
894 }
895
896 pub fn destroy(self) -> (S, C) {
900 self.inner.destroy()
901 }
902
903 pub const fn flash(&mut self) -> &mut S {
905 self.inner.flash()
906 }
907
908 pub const fn flash_range(&self) -> Range<u32> {
910 self.inner.flash_range()
911 }
912
913 #[cfg(any(test, feature = "std"))]
914 pub fn print_items(&mut self) -> impl Future<Output = String> {
918 self.inner.print_items()
919 }
920}
921
922pub struct MapItemIter<'s, K: Key, S: NorFlash, C: KeyCacheImpl<K>> {
971 storage: &'s mut MapStorage<K, S, C>,
972 first_page: usize,
973 current_page_index: usize,
974 pub(crate) current_iter: ItemIter,
975 _key: PhantomData<K>,
976}
977
978impl<K: Key, S: NorFlash, C: KeyCacheImpl<K>> MapItemIter<'_, K, S, C> {
979 pub async fn next<'a, V: Value<'a>>(
981 &mut self,
982 data_buffer: &'a mut [u8],
983 ) -> Result<Option<(K, V)>, Error<S::Error>> {
984 let item = loop {
986 if let Some((item, _address)) = self
987 .current_iter
988 .next(&mut self.storage.inner.flash, data_buffer)
989 .await?
990 {
991 break item;
993 }
994
995 loop {
998 self.current_page_index = self.storage.inner.next_page(self.current_page_index);
999
1000 if self.current_page_index == self.first_page {
1002 return Ok(None);
1003 }
1004
1005 match self
1006 .storage
1007 .inner
1008 .get_page_state(self.current_page_index)
1009 .await
1010 {
1011 Ok(PageState::Closed | PageState::PartialOpen) => {
1012 self.current_iter = ItemIter::new(
1013 calculate_page_address::<S>(
1014 self.storage.inner.flash_range.clone(),
1015 self.current_page_index,
1016 ) + S::WORD_SIZE as u32,
1017 calculate_page_end_address::<S>(
1018 self.storage.inner.flash_range.clone(),
1019 self.current_page_index,
1020 ) - S::WORD_SIZE as u32,
1021 );
1022 break;
1023 }
1024 _ => continue,
1025 }
1026 }
1027 };
1028
1029 let data_len = item.header.length as usize;
1030 let (key, key_len) = K::deserialize_from(item.data())?;
1031 let (value, _value_len) = V::deserialize_from(&data_buffer[..data_len][key_len..])
1032 .map_err(Error::SerializationError)?;
1033
1034 Ok(Some((key, value)))
1035 }
1036}
1037
1038pub trait Key: Eq + Clone + Sized {
1047 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1050 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
1053 fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
1056 Self::deserialize_from(buffer).map(|(_, len)| len)
1057 }
1058}
1059
1060macro_rules! impl_key_num {
1061 ($int:ty) => {
1062 impl Key for $int {
1063 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1064 let self_bytes = self.to_le_bytes();
1065
1066 match buffer.get_mut(..self_bytes.len()) {
1067 Some(buffer) => {
1068 buffer.copy_from_slice(&self_bytes);
1069 Ok(buffer.len())
1070 }
1071 None => Err(SerializationError::BufferTooSmall),
1072 }
1073 }
1074
1075 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1076 let value = Self::from_le_bytes(
1077 buffer
1078 .get(..size_of::<Self>())
1079 .ok_or(SerializationError::BufferTooSmall)?
1080 .try_into()
1081 .unwrap(),
1082 );
1083 Ok((value, size_of::<Self>()))
1084 }
1085
1086 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1087 Ok(size_of::<Self>())
1088 }
1089 }
1090 };
1091}
1092
1093impl_key_num!(u8);
1094impl_key_num!(u16);
1095impl_key_num!(u32);
1096impl_key_num!(u64);
1097impl_key_num!(u128);
1098impl_key_num!(i8);
1099impl_key_num!(i16);
1100impl_key_num!(i32);
1101impl_key_num!(i64);
1102impl_key_num!(i128);
1103
1104impl<const N: usize> Key for [u8; N] {
1105 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1106 buffer
1107 .get_mut(..N)
1108 .ok_or(SerializationError::BufferTooSmall)?
1109 .copy_from_slice(self);
1110 Ok(N)
1111 }
1112
1113 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1114 Ok((
1115 buffer
1116 .get(..N)
1117 .ok_or(SerializationError::BufferTooSmall)?
1118 .try_into()
1119 .unwrap(),
1120 N,
1121 ))
1122 }
1123
1124 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1125 Ok(N)
1126 }
1127}
1128
1129impl Key for () {
1130 fn serialize_into(&self, _buffer: &mut [u8]) -> Result<usize, SerializationError> {
1131 Ok(0)
1132 }
1133
1134 fn deserialize_from(_buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1135 Ok(((), 0))
1136 }
1137}
1138
1139pub trait Value<'a> {
1144 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1147 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1156 where
1157 Self: Sized;
1158}
1159
1160impl<'a> Value<'a> for bool {
1161 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1162 <u8 as Value>::serialize_into(&u8::from(*self), buffer)
1163 }
1164
1165 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1166 where
1167 Self: Sized,
1168 {
1169 let (value, size) = <u8 as Value>::deserialize_from(buffer)?;
1170 Ok((value != 0, size))
1171 }
1172}
1173
1174impl<'a, T: Value<'a>> Value<'a> for Option<T> {
1175 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1176 if let Some(val) = self {
1177 let mut size = 0;
1178 size += <bool as Value>::serialize_into(&true, buffer)?;
1179 size += <T as Value>::serialize_into(
1180 val,
1181 buffer
1182 .get_mut(size..)
1183 .ok_or(SerializationError::BufferTooSmall)?,
1184 )?;
1185 Ok(size)
1186 } else {
1187 <bool as Value>::serialize_into(&false, buffer)
1188 }
1189 }
1190
1191 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1192 where
1193 Self: Sized,
1194 {
1195 let (is_some, tag_size) = <bool as Value>::deserialize_from(buffer)?;
1196 if is_some {
1197 let (value, value_size) = <T as Value>::deserialize_from(
1198 buffer
1199 .get(tag_size..)
1200 .ok_or(SerializationError::BufferTooSmall)?,
1201 )?;
1202 Ok((Some(value), tag_size + value_size))
1203 } else {
1204 Ok((None, tag_size))
1205 }
1206 }
1207}
1208
1209impl<'a> Value<'a> for &'a [u8] {
1210 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1211 buffer
1212 .get_mut(..self.len())
1213 .ok_or(SerializationError::BufferTooSmall)?
1214 .copy_from_slice(self);
1215 Ok(self.len())
1216 }
1217
1218 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1219 where
1220 Self: Sized,
1221 {
1222 Ok((buffer, buffer.len()))
1223 }
1224}
1225
1226macro_rules! impl_map_item_num {
1227 ($num:ty) => {
1228 impl<'a> Value<'a> for $num {
1229 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1230 let self_bytes = self.to_le_bytes();
1231
1232 match buffer.get_mut(..self_bytes.len()) {
1233 Some(buffer) => {
1234 buffer.copy_from_slice(&self_bytes);
1235 Ok(buffer.len())
1236 }
1237 None => Err(SerializationError::BufferTooSmall),
1238 }
1239 }
1240
1241 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1242 let value = Self::from_le_bytes(
1243 buffer
1244 .get(..size_of::<Self>())
1245 .ok_or(SerializationError::BufferTooSmall)?
1246 .try_into()
1247 .unwrap(),
1248 );
1249 Ok((value, size_of::<Self>()))
1250 }
1251 }
1252
1253 impl<'a, const N: usize> Value<'a> for [$num; N] {
1255 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1256 let elem_size = size_of::<$num>();
1257
1258 let buffer = buffer
1259 .get_mut(0..elem_size * N)
1260 .ok_or(SerializationError::BufferTooSmall)?;
1261
1262 for (chunk, number) in buffer.chunks_exact_mut(elem_size).zip(self.iter()) {
1263 chunk.copy_from_slice(&number.to_le_bytes())
1264 }
1265 Ok(elem_size * N)
1266 }
1267
1268 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1269 let elem_size = size_of::<$num>();
1270 if buffer.len() < elem_size * N {
1271 return Err(SerializationError::BufferTooSmall);
1272 }
1273
1274 let mut array = [0 as $num; N];
1275 for (chunk, number) in buffer.chunks_exact(elem_size).zip(array.iter_mut()) {
1276 *number = <$num>::from_le_bytes(chunk.try_into().unwrap());
1277 }
1278 Ok((array, elem_size * N))
1279 }
1280 }
1281 };
1282}
1283
1284impl_map_item_num!(u8);
1285impl_map_item_num!(u16);
1286impl_map_item_num!(u32);
1287impl_map_item_num!(u64);
1288impl_map_item_num!(u128);
1289impl_map_item_num!(i8);
1290impl_map_item_num!(i16);
1291impl_map_item_num!(i32);
1292impl_map_item_num!(i64);
1293impl_map_item_num!(i128);
1294impl_map_item_num!(f32);
1295impl_map_item_num!(f64);
1296
1297#[non_exhaustive]
1301#[derive(Debug, PartialEq, Eq, Clone)]
1302#[cfg_attr(feature = "defmt", derive(defmt::Format))]
1303pub enum SerializationError {
1304 BufferTooSmall,
1306 InvalidData,
1308 InvalidFormat,
1310 Custom(i32),
1313}
1314
1315impl core::fmt::Display for SerializationError {
1316 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1317 match self {
1318 SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
1319 SerializationError::InvalidData => write!(f, "Invalid data"),
1320 SerializationError::InvalidFormat => write!(f, "Invalid format"),
1321 SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
1322 }
1323 }
1324}
1325
1326#[cfg(test)]
1327mod tests {
1328 use crate::{AlignedBuf, cache::NoCache, mock_flash};
1329
1330 use super::*;
1331 use futures_test::test;
1332
1333 type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
1334 type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
1335
1336 #[test]
1337 async fn store_and_fetch() {
1338 let mut storage = MapStorage::<u8, _, _>::new(
1339 MockFlashBig::default(),
1340 MapConfig::new(0x000..0x1000),
1341 cache::NoCache::new(),
1342 );
1343
1344 let mut data_buffer = AlignedBuf([0; 128]);
1345
1346 let start_snapshot = storage.flash().stats_snapshot();
1347
1348 let item = storage
1349 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1350 .await
1351 .unwrap();
1352 assert_eq!(item, None);
1353
1354 let item = storage
1355 .fetch_item::<&[u8]>(&mut data_buffer, &60)
1356 .await
1357 .unwrap();
1358 assert_eq!(item, None);
1359
1360 let item = storage
1361 .fetch_item::<&[u8]>(&mut data_buffer, &0xFF)
1362 .await
1363 .unwrap();
1364 assert_eq!(item, None);
1365
1366 storage
1367 .store_item(&mut data_buffer, &0u8, &[5u8])
1368 .await
1369 .unwrap();
1370 storage
1371 .store_item(&mut data_buffer, &0u8, &[5u8, 6])
1372 .await
1373 .unwrap();
1374
1375 let item = storage
1376 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1377 .await
1378 .unwrap()
1379 .unwrap();
1380 assert_eq!(item, &[5, 6]);
1381
1382 storage
1383 .store_item(&mut data_buffer, &1u8, &[2u8, 2, 2, 2, 2, 2])
1384 .await
1385 .unwrap();
1386
1387 let item = storage
1388 .fetch_item::<&[u8]>(&mut data_buffer, &0)
1389 .await
1390 .unwrap()
1391 .unwrap();
1392 assert_eq!(item, &[5, 6]);
1393
1394 let item = storage
1395 .fetch_item::<&[u8]>(&mut data_buffer, &1)
1396 .await
1397 .unwrap()
1398 .unwrap();
1399 assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
1400
1401 for index in 0..4000 {
1402 storage
1403 .store_item(
1404 &mut data_buffer,
1405 &((index % 10) as u8),
1406 &vec![(index % 10) as u8 * 2; index % 10].as_slice(),
1407 )
1408 .await
1409 .unwrap();
1410 }
1411
1412 for i in 0..10 {
1413 let item = storage
1414 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1415 .await
1416 .unwrap()
1417 .unwrap();
1418 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1419 }
1420
1421 for _ in 0..4000 {
1422 storage
1423 .store_item(&mut data_buffer, &11u8, &[0; 10])
1424 .await
1425 .unwrap();
1426 }
1427
1428 for i in 0..10 {
1429 let item = storage
1430 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1431 .await
1432 .unwrap()
1433 .unwrap();
1434 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1435 }
1436
1437 println!(
1438 "{:?}",
1439 start_snapshot.compare_to(storage.flash().stats_snapshot()),
1440 );
1441 }
1442
1443 #[test]
1444 async fn store_too_many_items() {
1445 const UPPER_BOUND: u8 = 3;
1446
1447 let mut storage = MapStorage::new(
1448 MockFlashTiny::default(),
1449 const { MapConfig::new(0x00..0x40) },
1450 NoCache::new(),
1451 );
1452 let mut data_buffer = AlignedBuf([0; 128]);
1453
1454 for i in 0..UPPER_BOUND {
1455 println!("Storing {i:?}");
1456
1457 storage
1458 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1459 .await
1460 .unwrap();
1461 }
1462
1463 assert_eq!(
1464 storage
1465 .store_item(
1466 &mut data_buffer,
1467 &UPPER_BOUND,
1468 &vec![0; UPPER_BOUND as usize].as_slice(),
1469 )
1470 .await,
1471 Err(Error::FullStorage)
1472 );
1473
1474 for i in 0..UPPER_BOUND {
1475 let item = storage
1476 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1477 .await
1478 .unwrap()
1479 .unwrap();
1480
1481 println!("Fetched {item:?}");
1482
1483 assert_eq!(item, vec![i; i as usize]);
1484 }
1485 }
1486
1487 #[test]
1488 async fn store_too_many_items_big() {
1489 const UPPER_BOUND: u8 = 68;
1490
1491 let mut storage = MapStorage::new(
1492 MockFlashBig::default(),
1493 const { MapConfig::new(0x0000..0x1000) },
1494 NoCache::new(),
1495 );
1496 let mut data_buffer = AlignedBuf([0; 128]);
1497
1498 for i in 0..UPPER_BOUND {
1499 println!("Storing {i:?}");
1500
1501 storage
1502 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1503 .await
1504 .unwrap();
1505 }
1506
1507 assert_eq!(
1508 storage
1509 .store_item(
1510 &mut data_buffer,
1511 &UPPER_BOUND,
1512 &vec![0; UPPER_BOUND as usize].as_slice(),
1513 )
1514 .await,
1515 Err(Error::FullStorage)
1516 );
1517
1518 for i in 0..UPPER_BOUND {
1519 let item = storage
1520 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1521 .await
1522 .unwrap()
1523 .unwrap();
1524
1525 println!("Fetched {item:?}");
1526
1527 assert_eq!(item, vec![i; i as usize]);
1528 }
1529 }
1530
1531 #[test]
1532 async fn store_many_items_big() {
1533 let mut storage = MapStorage::new(
1534 mock_flash::MockFlashBase::<4, 1, 4096>::default(),
1535 const { MapConfig::new(0x0000..0x4000) },
1536 NoCache::new(),
1537 );
1538 let mut data_buffer = AlignedBuf([0; 128]);
1539
1540 const LENGHT_PER_KEY: [usize; 24] = [
1541 11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
1542 ];
1543
1544 for _ in 0..100 {
1545 #[allow(clippy::needless_range_loop)]
1546 for i in 0..24 {
1547 storage
1548 .store_item(
1549 &mut data_buffer,
1550 &(i as u16),
1551 &vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
1552 )
1553 .await
1554 .unwrap();
1555 }
1556 }
1557
1558 #[allow(clippy::needless_range_loop)]
1559 for i in 0..24 {
1560 let item = storage
1561 .fetch_item::<&[u8]>(&mut data_buffer, &(i as u16))
1562 .await
1563 .unwrap()
1564 .unwrap();
1565
1566 println!("Fetched {item:?}");
1567
1568 assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
1569 }
1570 }
1571
1572 #[test]
1573 async fn remove_items() {
1574 let mut storage = MapStorage::new(
1575 mock_flash::MockFlashBase::<4, 1, 4096>::new(
1576 mock_flash::WriteCountCheck::Twice,
1577 None,
1578 true,
1579 ),
1580 const { MapConfig::new(0x0000..0x4000) },
1581 NoCache::new(),
1582 );
1583 let mut data_buffer = AlignedBuf([0; 128]);
1584
1585 for j in 0..10 {
1587 for i in 0..24 {
1588 storage
1589 .store_item(
1590 &mut data_buffer,
1591 &(i as u8),
1592 &vec![i as u8; j + 2].as_slice(),
1593 )
1594 .await
1595 .unwrap();
1596 }
1597 }
1598
1599 for j in (0..24).rev() {
1600 for i in 0..=j {
1602 assert!(
1603 storage
1604 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1605 .await
1606 .unwrap()
1607 .is_some()
1608 );
1609 }
1610
1611 storage.remove_item(&mut data_buffer, &j).await.unwrap();
1613
1614 for i in 0..j {
1616 assert!(
1617 storage
1618 .fetch_item::<&[u8]>(&mut data_buffer, &i)
1619 .await
1620 .unwrap()
1621 .is_some()
1622 );
1623 }
1624
1625 assert!(
1626 storage
1627 .fetch_item::<&[u8]>(&mut data_buffer, &j)
1628 .await
1629 .unwrap()
1630 .is_none()
1631 );
1632 }
1633 }
1634
1635 #[test]
1636 async fn remove_all() {
1637 let mut storage = MapStorage::new(
1638 mock_flash::MockFlashBase::<4, 1, 4096>::new(
1639 mock_flash::WriteCountCheck::Twice,
1640 None,
1641 true,
1642 ),
1643 const { MapConfig::new(0x0000..0x4000) },
1644 NoCache::new(),
1645 );
1646 let mut data_buffer = AlignedBuf([0; 128]);
1647
1648 for value in 0..10 {
1650 for key in 0..24u8 {
1651 storage
1652 .store_item(&mut data_buffer, &key, &vec![key; value + 2].as_slice())
1653 .await
1654 .unwrap();
1655 }
1656 }
1657
1658 for key in 0..24u8 {
1660 assert!(
1661 storage
1662 .fetch_item::<&[u8]>(&mut data_buffer, &key)
1663 .await
1664 .unwrap()
1665 .is_some()
1666 );
1667 }
1668
1669 storage.remove_all_items(&mut data_buffer).await.unwrap();
1671
1672 for key in 0..24 {
1674 assert!(
1675 storage
1676 .fetch_item::<&[u8]>(&mut data_buffer, &key)
1677 .await
1678 .unwrap()
1679 .is_none()
1680 );
1681 }
1682 }
1683
1684 #[test]
1685 async fn store_too_big_item() {
1686 let mut storage = MapStorage::new(
1687 MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true),
1688 const { MapConfig::new(0x000..0x1000) },
1689 NoCache::new(),
1690 );
1691
1692 storage
1693 .store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1])
1694 .await
1695 .unwrap();
1696
1697 assert_eq!(
1698 storage
1699 .store_item(&mut [0; 1024], &0u8, &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],)
1700 .await,
1701 Err(Error::ItemTooBig)
1702 );
1703 }
1704
1705 #[test]
1706 async fn item_iterator() {
1707 const UPPER_BOUND: u8 = 64;
1708 let mut storage = MapStorage::new(
1709 MockFlashBig::default(),
1710 const { MapConfig::new(0x000..0x1000) },
1711 NoCache::new(),
1712 );
1713
1714 let mut data_buffer = AlignedBuf([0; 128]);
1715
1716 for i in 0..UPPER_BOUND {
1717 storage
1718 .store_item(&mut data_buffer, &i, &vec![i; i as usize].as_slice())
1719 .await
1720 .unwrap();
1721 }
1722
1723 for i in 0..10 {
1725 storage
1726 .store_item(&mut data_buffer, &1u8, &vec![i; i as usize].as_slice())
1727 .await
1728 .unwrap();
1729 }
1730
1731 let mut map_iter = storage.fetch_all_items(&mut data_buffer).await.unwrap();
1732
1733 let mut count = 0;
1734 let mut last_value_buffer = [0u8; 64];
1735 let mut last_value_length = 0;
1736 while let Ok(Some((key, value))) = map_iter.next::<&[u8]>(&mut data_buffer).await {
1737 if key == 1 {
1738 last_value_length = value.len();
1740 last_value_buffer[..value.len()].copy_from_slice(value);
1741 } else {
1742 assert_eq!(value, vec![key; key as usize]);
1743 count += 1;
1744 }
1745 }
1746
1747 assert_eq!(last_value_length, 9);
1748 assert_eq!(
1749 &last_value_buffer[..last_value_length],
1750 vec![9u8; 9].as_slice()
1751 );
1752
1753 assert_eq!(count + 1, UPPER_BOUND);
1755 }
1756
1757 #[test]
1758 async fn store_unit_key() {
1759 let mut storage = MapStorage::new(
1760 MockFlashBig::default(),
1761 const { MapConfig::new(0x000..0x1000) },
1762 NoCache::new(),
1763 );
1764
1765 let mut data_buffer = AlignedBuf([0; 128]);
1766
1767 let item = storage
1768 .fetch_item::<&[u8]>(&mut data_buffer, &())
1769 .await
1770 .unwrap();
1771 assert_eq!(item, None);
1772
1773 storage
1774 .store_item(&mut data_buffer, &(), &[5u8])
1775 .await
1776 .unwrap();
1777 storage
1778 .store_item(&mut data_buffer, &(), &[5u8, 6])
1779 .await
1780 .unwrap();
1781
1782 let item = storage
1783 .fetch_item::<&[u8]>(&mut data_buffer, &())
1784 .await
1785 .unwrap()
1786 .unwrap();
1787 assert_eq!(item, &[5, 6]);
1788 }
1789
1790 #[test]
1791 async fn option_value() {
1792 let mut buffer = [0; 2];
1793
1794 assert_eq!(Some(42u8).serialize_into(&mut buffer), Ok(2));
1795 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((Some(42u8), 2)));
1796 assert_eq!(buffer, [1, 42]);
1797
1798 let mut buffer = [0; 1];
1799
1800 assert_eq!(Option::<u8>::None.serialize_into(&mut buffer), Ok(1));
1801 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((None, 1)));
1802 assert_eq!(buffer, [0]);
1803 }
1804
1805 #[test]
1806 async fn array_value() {
1807 let mut buffer = [0; 3];
1808 assert_eq!(Value::serialize_into(&[1u8, 2, 3], &mut buffer), Ok(3));
1809 assert_eq!(buffer, [1, 2, 3]);
1810 assert_eq!(
1811 <[u8; 3] as Value>::deserialize_from(&buffer),
1812 Ok(([1, 2, 3], 3))
1813 );
1814
1815 let mut buffer = [0; 4];
1816 assert_eq!(
1817 Value::serialize_into(&[0x1234u16, 0x5678], &mut buffer),
1818 Ok(4)
1819 );
1820 assert_eq!(buffer, [0x34, 0x12, 0x78, 0x56]);
1821 assert_eq!(
1822 <[u16; 2]>::deserialize_from(&buffer),
1823 Ok(([0x1234, 0x5678], 4))
1824 );
1825 }
1826}