1use core::{
102 marker::PhantomData,
103 mem::{MaybeUninit, size_of},
104};
105
106use cache::CacheImpl;
107use embedded_storage_async::nor_flash::MultiwriteNorFlash;
108
109use crate::item::{Item, ItemHeader, ItemIter, find_next_free_item_spot};
110
111use self::{
112 cache::{KeyCacheImpl, PrivateKeyCacheImpl},
113 item::{ItemHeaderIter, ItemUnborrowed},
114};
115
116use super::*;
117
118pub async fn fetch_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
133 flash: &mut S,
134 flash_range: Range<u32>,
135 cache: &mut impl KeyCacheImpl<K>,
136 data_buffer: &'d mut [u8],
137 search_key: &K,
138) -> Result<Option<V>, Error<S::Error>> {
139 let result = run_with_auto_repair!(
140 function = {
141 fetch_item_with_location(flash, flash_range.clone(), cache, data_buffer, search_key)
142 .await
143 },
144 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
145 );
146
147 let Some((item, _, item_key_len)) = result? else {
148 return Ok(None);
149 };
150
151 let data_len = item.header.length as usize;
152 let item_key_len = match item_key_len {
153 Some(item_key_len) => item_key_len,
154 None => K::get_len(&data_buffer[..data_len])?,
155 };
156
157 Ok(Some(
158 V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
159 .map_err(Error::SerializationError)?,
160 ))
161}
162
163#[allow(clippy::type_complexity)]
165async fn fetch_item_with_location<K: Key, S: NorFlash>(
166 flash: &mut S,
167 flash_range: Range<u32>,
168 cache: &mut impl PrivateKeyCacheImpl<K>,
169 data_buffer: &mut [u8],
170 search_key: &K,
171) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
172 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
173 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
174 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
175
176 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
177 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
178
179 if cache.is_dirty() {
180 cache.invalidate_cache_state();
181 }
182
183 'cache: {
184 if let Some(cached_location) = cache.key_location(search_key) {
185 let page_index = calculate_page_index::<S>(flash_range.clone(), cached_location);
186 let page_data_end_address =
187 calculate_page_end_address::<S>(flash_range.clone(), page_index)
188 - S::WORD_SIZE as u32;
189
190 let Some(header) =
191 ItemHeader::read_new(flash, cached_location, page_data_end_address).await?
192 else {
193 if cfg!(feature = "_test") {
195 panic!("Wrong cache value. Addr: {cached_location}");
196 }
197 cache.invalidate_cache_state();
198 break 'cache;
199 };
200
201 let item = header
202 .read_item(flash, data_buffer, cached_location, page_data_end_address)
203 .await?;
204
205 match item {
206 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
207 if cfg!(feature = "_test") {
208 panic!("Wrong cache value. Addr: {cached_location}");
209 }
210
211 cache.invalidate_cache_state();
213 break 'cache;
214 }
215 item::MaybeItem::Present(item) => {
216 return Ok(Some((item.unborrow(), cached_location, None)));
217 }
218 }
219 }
220 }
221
222 let mut last_used_page =
224 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?;
225
226 if last_used_page.is_none() {
227 if let Some(first_open_page) =
230 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open).await?
231 {
232 let previous_page = previous_page::<S>(flash_range.clone(), first_open_page);
233 if get_page_state(flash, flash_range.clone(), cache, previous_page)
234 .await?
235 .is_closed()
236 {
237 last_used_page = Some(previous_page);
238 } else {
239 cache.unmark_dirty();
242 return Ok(None);
243 }
244 } else {
245 return Err(Error::Corrupted {
249 #[cfg(feature = "_test")]
250 backtrace: std::backtrace::Backtrace::capture(),
251 });
252 }
253 }
254
255 let mut current_page_to_check = last_used_page.unwrap();
259 let mut newest_found_item_data = None;
260
261 loop {
262 let page_data_start_address =
263 calculate_page_address::<S>(flash_range.clone(), current_page_to_check)
264 + S::WORD_SIZE as u32;
265 let page_data_end_address =
266 calculate_page_end_address::<S>(flash_range.clone(), current_page_to_check)
267 - S::WORD_SIZE as u32;
268
269 let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
270 while let Some((item, address)) = it.next(flash, data_buffer).await? {
271 let (found_key, found_key_len) = K::deserialize_from(item.data())?;
272 if found_key == *search_key {
273 newest_found_item_data = Some((address, found_key_len));
274 }
275 }
276
277 if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
279 cache.notice_key_location(search_key, *newest_found_item_address, false);
280
281 break;
282 }
283
284 let previous_page = previous_page::<S>(flash_range.clone(), current_page_to_check);
286
287 if get_page_state(flash, flash_range.clone(), cache, previous_page).await?
288 != PageState::Closed
289 {
290 cache.unmark_dirty();
292 return Ok(None);
293 }
294
295 current_page_to_check = previous_page;
296 }
297
298 cache.unmark_dirty();
299
300 if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data {
303 let item = ItemHeader::read_new(flash, newest_found_item_address, u32::MAX)
304 .await?
305 .ok_or_else(|| {
306 Error::Corrupted {
308 #[cfg(feature = "_test")]
309 backtrace: std::backtrace::Backtrace::capture(),
310 }
311 })?
312 .read_item(flash, data_buffer, newest_found_item_address, u32::MAX)
313 .await?;
314
315 Ok(Some((
316 item.unwrap()?.unborrow(),
317 newest_found_item_address,
318 Some(newest_found_item_key_len),
319 )))
320 } else {
321 Ok(None)
322 }
323}
324
325pub async fn store_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
341 flash: &mut S,
342 flash_range: Range<u32>,
343 cache: &mut impl KeyCacheImpl<K>,
344 data_buffer: &mut [u8],
345 key: &K,
346 item: &V,
347) -> Result<(), Error<S::Error>> {
348 run_with_auto_repair!(
349 function =
350 store_item_inner(flash, flash_range.clone(), cache, data_buffer, key, item).await,
351 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
352 )
353}
354
355async fn store_item_inner<K: Key, S: NorFlash>(
356 flash: &mut S,
357 flash_range: Range<u32>,
358 cache: &mut impl KeyCacheImpl<K>,
359 data_buffer: &mut [u8],
360 key: &K,
361 item: &dyn Value<'_>,
362) -> Result<(), Error<S::Error>> {
363 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
364 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
365
366 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
367
368 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
369 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
370
371 if cache.is_dirty() {
372 cache.invalidate_cache_state();
373 }
374
375 let mut recursion_level = 0;
376 loop {
377 if recursion_level == get_pages::<S>(flash_range.clone(), 0).count() {
379 cache.unmark_dirty();
380 return Err(Error::FullStorage);
381 }
382
383 let next_page_to_use = if let Some(partial_open_page) =
385 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
386 {
387 if !get_page_state(
389 flash,
390 flash_range.clone(),
391 cache,
392 next_page::<S>(flash_range.clone(), partial_open_page),
393 )
394 .await?
395 .is_open()
396 {
397 return Err(Error::Corrupted {
402 #[cfg(feature = "_test")]
403 backtrace: std::backtrace::Backtrace::capture(),
404 });
405 }
406
407 let page_data_start_address =
410 calculate_page_address::<S>(flash_range.clone(), partial_open_page)
411 + S::WORD_SIZE as u32;
412 let page_data_end_address =
413 calculate_page_end_address::<S>(flash_range.clone(), partial_open_page)
414 - S::WORD_SIZE as u32;
415
416 let key_len = key.serialize_into(data_buffer)?;
417 let item_data_length = key_len
418 + item
419 .serialize_into(&mut data_buffer[key_len..])
420 .map_err(Error::SerializationError)?;
421
422 if item_data_length > u16::MAX as usize
423 || item_data_length
424 > calculate_page_size::<S>()
425 .saturating_sub(ItemHeader::data_address::<S>(0) as usize)
426 {
427 cache.unmark_dirty();
428 return Err(Error::ItemTooBig);
429 }
430
431 let free_spot_address = find_next_free_item_spot(
432 flash,
433 flash_range.clone(),
434 cache,
435 page_data_start_address,
436 page_data_end_address,
437 item_data_length as u32,
438 )
439 .await?;
440
441 match free_spot_address {
442 Some(free_spot_address) => {
443 cache.notice_key_location(key, free_spot_address, true);
444 Item::write_new(
445 flash,
446 flash_range.clone(),
447 cache,
448 free_spot_address,
449 &data_buffer[..item_data_length],
450 )
451 .await?;
452
453 cache.unmark_dirty();
454 return Ok(());
455 }
456 None => {
457 close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
459 Some(next_page::<S>(flash_range.clone(), partial_open_page))
460 }
461 }
462 } else {
463 None
464 };
465
466 match next_page_to_use {
472 Some(next_page_to_use) => {
473 let next_page_state =
474 get_page_state(flash, flash_range.clone(), cache, next_page_to_use).await?;
475
476 if !next_page_state.is_open() {
477 return Err(Error::Corrupted {
479 #[cfg(feature = "_test")]
480 backtrace: std::backtrace::Backtrace::capture(),
481 });
482 }
483
484 partial_close_page(flash, flash_range.clone(), cache, next_page_to_use).await?;
488
489 let next_buffer_page = next_page::<S>(flash_range.clone(), next_page_to_use);
490 let next_buffer_page_state =
491 get_page_state(flash, flash_range.clone(), cache, next_buffer_page).await?;
492
493 if !next_buffer_page_state.is_open() {
494 migrate_items::<K, _>(
495 flash,
496 flash_range.clone(),
497 cache,
498 data_buffer,
499 next_buffer_page,
500 next_page_to_use,
501 )
502 .await?;
503 }
504 }
505 None => {
506 let first_open_page =
508 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
509 .await?
510 {
511 Some(first_open_page) => first_open_page,
512 None => {
513 return Err(Error::Corrupted {
517 #[cfg(feature = "_test")]
518 backtrace: std::backtrace::Backtrace::capture(),
519 });
520 }
521 };
522
523 partial_close_page(flash, flash_range.clone(), cache, first_open_page).await?;
524 }
525 }
526
527 recursion_level += 1;
529 }
530}
531
532pub async fn remove_item<K: Key, S: MultiwriteNorFlash>(
554 flash: &mut S,
555 flash_range: Range<u32>,
556 cache: &mut impl KeyCacheImpl<K>,
557 data_buffer: &mut [u8],
558 search_key: &K,
559) -> Result<(), Error<S::Error>> {
560 run_with_auto_repair!(
561 function = remove_item_inner::<K, _>(
562 flash,
563 flash_range.clone(),
564 cache,
565 data_buffer,
566 Some(search_key)
567 )
568 .await,
569 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
570 )
571}
572
573pub async fn remove_all_items<K: Key, S: MultiwriteNorFlash>(
591 flash: &mut S,
592 flash_range: Range<u32>,
593 cache: &mut impl KeyCacheImpl<K>,
594 data_buffer: &mut [u8],
595) -> Result<(), Error<S::Error>> {
596 run_with_auto_repair!(
597 function =
598 remove_item_inner::<K, _>(flash, flash_range.clone(), cache, data_buffer, None).await,
599 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
600 )
601}
602
603async fn remove_item_inner<K: Key, S: MultiwriteNorFlash>(
605 flash: &mut S,
606 flash_range: Range<u32>,
607 cache: &mut impl KeyCacheImpl<K>,
608 data_buffer: &mut [u8],
609 search_key: Option<&K>,
610) -> Result<(), Error<S::Error>> {
611 if let Some(key) = &search_key {
612 cache.notice_key_erased(key);
613 } else {
614 cache.invalidate_cache_state();
615 }
616
617 let last_used_page =
621 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
622 .await?
623 .unwrap_or_default();
624
625 for page_index in get_pages::<S>(
627 flash_range.clone(),
628 next_page::<S>(flash_range.clone(), last_used_page),
629 ) {
630 if get_page_state(flash, flash_range.clone(), cache, page_index)
631 .await?
632 .is_open()
633 {
634 continue;
636 }
637
638 let page_data_start_address =
639 calculate_page_address::<S>(flash_range.clone(), page_index) + S::WORD_SIZE as u32;
640 let page_data_end_address =
641 calculate_page_end_address::<S>(flash_range.clone(), page_index) - S::WORD_SIZE as u32;
642
643 let mut item_headers = ItemHeaderIter::new(page_data_start_address, page_data_end_address);
645
646 while let (Some(item_header), item_address) = item_headers.next(flash).await? {
647 let item = item_header
648 .read_item(flash, data_buffer, item_address, page_data_end_address)
649 .await?;
650
651 match item {
652 item::MaybeItem::Corrupted(_, _) => continue,
653 item::MaybeItem::Erased(_, _) => continue,
654 item::MaybeItem::Present(item) => {
655 let item_match = match search_key {
656 Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
657 _ => true,
658 };
659 if item_match {
662 item.header
663 .erase_data(flash, flash_range.clone(), cache, item_address)
664 .await?;
665 }
666 }
667 }
668 }
669 }
670
671 cache.unmark_dirty();
673
674 Ok(())
675}
676
677pub struct MapItemIter<'d, 'c, K: Key, S: NorFlash, CI: CacheImpl> {
729 flash: &'d mut S,
730 flash_range: Range<u32>,
731 first_page: usize,
732 cache: &'c mut CI,
733 current_page_index: usize,
734 pub(crate) current_iter: ItemIter,
735 _key: PhantomData<K>,
736}
737
738impl<K: Key, S: NorFlash, CI: CacheImpl> MapItemIter<'_, '_, K, S, CI> {
739 pub async fn next<'a, V: Value<'a>>(
741 &mut self,
742 data_buffer: &'a mut [u8],
743 ) -> Result<Option<(K, V)>, Error<S::Error>> {
744 let item = loop {
746 if let Some((item, _address)) = self.current_iter.next(self.flash, data_buffer).await? {
747 break item;
749 }
750
751 loop {
754 self.current_page_index =
755 next_page::<S>(self.flash_range.clone(), self.current_page_index);
756
757 if self.current_page_index == self.first_page {
759 return Ok(None);
760 }
761
762 match get_page_state::<S>(
763 self.flash,
764 self.flash_range.clone(),
765 self.cache,
766 self.current_page_index,
767 )
768 .await
769 {
770 Ok(PageState::Closed) | Ok(PageState::PartialOpen) => {
771 self.current_iter = ItemIter::new(
772 calculate_page_address::<S>(
773 self.flash_range.clone(),
774 self.current_page_index,
775 ) + S::WORD_SIZE as u32,
776 calculate_page_end_address::<S>(
777 self.flash_range.clone(),
778 self.current_page_index,
779 ) - S::WORD_SIZE as u32,
780 );
781 break;
782 }
783 _ => continue,
784 }
785 }
786 };
787
788 let data_len = item.header.length as usize;
789 let (key, key_len) = K::deserialize_from(item.data())?;
790
791 Ok(Some((
792 key,
793 V::deserialize_from(&data_buffer[key_len..][..data_len - key_len])
794 .map_err(Error::SerializationError)?,
795 )))
796 }
797}
798
799pub async fn fetch_all_items<'d, 'c, K: Key, S: NorFlash, CI: KeyCacheImpl<K>>(
864 flash: &'d mut S,
865 flash_range: Range<u32>,
866 cache: &'c mut CI,
867 data_buffer: &mut [u8],
868) -> Result<MapItemIter<'d, 'c, K, S, CI>, Error<S::Error>> {
869 let first_page = run_with_auto_repair!(
872 function = {
873 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
874 .await?
875 {
876 Some(last_used_page) => {
877 Ok(next_page::<S>(flash_range.clone(), last_used_page))
879 }
880 None => {
881 if let Some(first_open_page) =
884 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
885 .await?
886 {
887 let previous_page =
888 previous_page::<S>(flash_range.clone(), first_open_page);
889 if get_page_state(flash, flash_range.clone(), cache, previous_page)
890 .await?
891 .is_closed()
892 {
893 Ok(first_open_page)
895 } else {
896 cache.unmark_dirty();
899 Ok(0)
900 }
901 } else {
902 Err(Error::Corrupted {
906 #[cfg(feature = "_test")]
907 backtrace: std::backtrace::Backtrace::capture(),
908 })
909 }
910 }
911 }
912 },
913 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
914 )?;
915
916 Ok(MapItemIter {
917 flash,
918 flash_range: flash_range.clone(),
919 first_page,
920 cache,
921 current_page_index: first_page,
922 current_iter: ItemIter::new(
923 calculate_page_address::<S>(flash_range.clone(), first_page) + S::WORD_SIZE as u32,
924 calculate_page_end_address::<S>(flash_range.clone(), first_page) - S::WORD_SIZE as u32,
925 ),
926 _key: PhantomData,
927 })
928}
929
930pub trait Key: Eq + Clone + Sized {
939 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
942 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
945 fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
948 Self::deserialize_from(buffer).map(|(_, len)| len)
949 }
950}
951
952macro_rules! impl_key_num {
953 ($int:ty) => {
954 impl Key for $int {
955 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
956 let len = size_of::<Self>();
957 if buffer.len() < len {
958 return Err(SerializationError::BufferTooSmall);
959 }
960 buffer[..len].copy_from_slice(&self.to_le_bytes());
961 Ok(len)
962 }
963
964 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
965 let len = size_of::<Self>();
966 if buffer.len() < len {
967 return Err(SerializationError::BufferTooSmall);
968 }
969
970 Ok((
971 Self::from_le_bytes(buffer[..len].try_into().unwrap()),
972 size_of::<Self>(),
973 ))
974 }
975
976 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
977 Ok(size_of::<Self>())
978 }
979 }
980 };
981}
982
983impl_key_num!(u8);
984impl_key_num!(u16);
985impl_key_num!(u32);
986impl_key_num!(u64);
987impl_key_num!(u128);
988impl_key_num!(i8);
989impl_key_num!(i16);
990impl_key_num!(i32);
991impl_key_num!(i64);
992impl_key_num!(i128);
993
994impl<const N: usize> Key for [u8; N] {
995 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
996 if buffer.len() < N {
997 return Err(SerializationError::BufferTooSmall);
998 }
999 buffer[..N].copy_from_slice(self);
1000 Ok(N)
1001 }
1002
1003 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1004 if buffer.len() < N {
1005 return Err(SerializationError::BufferTooSmall);
1006 }
1007
1008 Ok((buffer[..N].try_into().unwrap(), N))
1009 }
1010
1011 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1012 Ok(N)
1013 }
1014}
1015
1016impl Key for () {
1017 fn serialize_into(&self, _buffer: &mut [u8]) -> Result<usize, SerializationError> {
1018 Ok(0)
1019 }
1020
1021 fn deserialize_from(_buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1022 Ok(((), 0))
1023 }
1024}
1025
1026pub trait Value<'a> {
1031 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1034 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1041 where
1042 Self: Sized;
1043}
1044
1045impl<'a> Value<'a> for bool {
1046 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1047 <u8 as Value>::serialize_into(&(*self as u8), buffer)
1048 }
1049
1050 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1051 where
1052 Self: Sized,
1053 {
1054 Ok(<u8 as Value>::deserialize_from(buffer)? != 0)
1055 }
1056}
1057
1058impl<'a, T: Value<'a>> Value<'a> for Option<T> {
1059 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1060 if let Some(val) = self {
1061 <bool as Value>::serialize_into(&true, buffer)?;
1062 <T as Value>::serialize_into(val, &mut buffer[1..]).map(|len| len + 1)
1063 } else {
1064 <bool as Value>::serialize_into(&false, buffer)
1065 }
1066 }
1067
1068 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1069 where
1070 Self: Sized,
1071 {
1072 if <bool as Value>::deserialize_from(buffer)? {
1073 Ok(Some(<T as Value>::deserialize_from(&buffer[1..])?))
1074 } else {
1075 Ok(None)
1076 }
1077 }
1078}
1079
1080impl<'a, T: Value<'a>, const N: usize> Value<'a> for [T; N] {
1081 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1082 if buffer.len() < size_of::<T>() * N {
1083 return Err(SerializationError::BufferTooSmall);
1084 }
1085
1086 let mut size = 0;
1087 for v in self {
1088 size += <T as Value>::serialize_into(v, &mut buffer[size..])?;
1089 }
1090
1091 Ok(size)
1092 }
1093
1094 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1095 where
1096 Self: Sized,
1097 {
1098 let mut array = MaybeUninit::<[T; N]>::uninit();
1099
1100 if N == 0 {
1101 return Ok(unsafe { array.assume_init() });
1103 }
1104
1105 let ptr = array.as_mut_ptr() as *mut T;
1106
1107 unsafe {
1112 for i in 0..N {
1113 *ptr.add(i) = <T as Value>::deserialize_from(&buffer[i * size_of::<T>()..])?;
1114 }
1115
1116 Ok(array.assume_init())
1117 }
1118 }
1119}
1120
1121impl<'a> Value<'a> for &'a [u8] {
1122 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1123 if buffer.len() < self.len() {
1124 return Err(SerializationError::BufferTooSmall);
1125 }
1126
1127 buffer[..self.len()].copy_from_slice(self);
1128 Ok(self.len())
1129 }
1130
1131 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1132 where
1133 Self: Sized,
1134 {
1135 Ok(buffer)
1136 }
1137}
1138
1139macro_rules! impl_map_item_num {
1140 ($int:ty) => {
1141 impl<'a> Value<'a> for $int {
1142 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1143 buffer[..core::mem::size_of::<Self>()].copy_from_slice(&self.to_le_bytes());
1144 Ok(core::mem::size_of::<Self>())
1145 }
1146
1147 fn deserialize_from(buffer: &[u8]) -> Result<Self, SerializationError> {
1148 Ok(Self::from_le_bytes(
1149 buffer[..core::mem::size_of::<Self>()]
1150 .try_into()
1151 .map_err(|_| SerializationError::BufferTooSmall)?,
1152 ))
1153 }
1154 }
1155 };
1156}
1157
1158impl_map_item_num!(u8);
1159impl_map_item_num!(u16);
1160impl_map_item_num!(u32);
1161impl_map_item_num!(u64);
1162impl_map_item_num!(u128);
1163impl_map_item_num!(i8);
1164impl_map_item_num!(i16);
1165impl_map_item_num!(i32);
1166impl_map_item_num!(i64);
1167impl_map_item_num!(i128);
1168impl_map_item_num!(f32);
1169impl_map_item_num!(f64);
1170
1171#[non_exhaustive]
1175#[derive(Debug, PartialEq, Eq, Clone)]
1176#[cfg_attr(feature = "defmt-03", derive(defmt::Format))]
1177pub enum SerializationError {
1178 BufferTooSmall,
1180 InvalidData,
1182 InvalidFormat,
1184 Custom(i32),
1187}
1188
1189impl core::fmt::Display for SerializationError {
1190 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1191 match self {
1192 SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
1193 SerializationError::InvalidData => write!(f, "Invalid data"),
1194 SerializationError::InvalidFormat => write!(f, "Invalid format"),
1195 SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
1196 }
1197 }
1198}
1199
1200async fn migrate_items<K: Key, S: NorFlash>(
1201 flash: &mut S,
1202 flash_range: Range<u32>,
1203 cache: &mut impl PrivateKeyCacheImpl<K>,
1204 data_buffer: &mut [u8],
1205 source_page: usize,
1206 target_page: usize,
1207) -> Result<(), Error<S::Error>> {
1208 let mut next_page_write_address =
1212 calculate_page_address::<S>(flash_range.clone(), target_page) + S::WORD_SIZE as u32;
1213
1214 let mut it = ItemIter::new(
1215 calculate_page_address::<S>(flash_range.clone(), source_page) + S::WORD_SIZE as u32,
1216 calculate_page_end_address::<S>(flash_range.clone(), source_page) - S::WORD_SIZE as u32,
1217 );
1218 while let Some((item, item_address)) = it.next(flash, data_buffer).await? {
1219 let (key, _) = K::deserialize_from(item.data())?;
1220 let (_, data_buffer) = item.destruct();
1221
1222 cache.unmark_dirty();
1224
1225 let Some((found_item, found_address, _)) =
1227 fetch_item_with_location::<K, S>(flash, flash_range.clone(), cache, data_buffer, &key)
1228 .await?
1229 else {
1230 return Err(Error::Corrupted {
1232 #[cfg(feature = "_test")]
1233 backtrace: std::backtrace::Backtrace::capture(),
1234 });
1235 };
1236
1237 let found_item = found_item.reborrow(data_buffer);
1238
1239 if found_address == item_address {
1240 cache.notice_key_location(&key, next_page_write_address, true);
1241 found_item
1242 .write(flash, flash_range.clone(), cache, next_page_write_address)
1243 .await?;
1244 next_page_write_address = found_item
1245 .header
1246 .next_item_address::<S>(next_page_write_address);
1247 }
1248 }
1249
1250 open_page(flash, flash_range.clone(), cache, source_page).await?;
1251
1252 Ok(())
1253}
1254
1255async fn try_repair<K: Key, S: NorFlash>(
1266 flash: &mut S,
1267 flash_range: Range<u32>,
1268 cache: &mut impl KeyCacheImpl<K>,
1269 data_buffer: &mut [u8],
1270) -> Result<(), Error<S::Error>> {
1271 cache.invalidate_cache_state();
1272
1273 crate::try_general_repair(flash, flash_range.clone(), cache).await?;
1274
1275 if let Some(partial_open_page) =
1277 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
1278 {
1279 let buffer_page = next_page::<S>(flash_range.clone(), partial_open_page);
1280 if !get_page_state(flash, flash_range.clone(), cache, buffer_page)
1281 .await?
1282 .is_open()
1283 {
1284 open_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1287
1288 partial_close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1290
1291 migrate_items::<K, _>(
1292 flash,
1293 flash_range.clone(),
1294 cache,
1295 data_buffer,
1296 buffer_page,
1297 partial_open_page,
1298 )
1299 .await?;
1300 }
1301 }
1302
1303 Ok(())
1304}
1305
1306#[cfg(test)]
1307mod tests {
1308 use super::*;
1309 use futures_test::test;
1310
1311 type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
1312 type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
1313
1314 #[test]
1315 async fn store_and_fetch() {
1316 let mut flash = MockFlashBig::default();
1317 let flash_range = 0x000..0x1000;
1318
1319 let mut data_buffer = AlignedBuf([0; 128]);
1320
1321 let start_snapshot = flash.stats_snapshot();
1322
1323 let item = fetch_item::<u8, &[u8], _>(
1324 &mut flash,
1325 flash_range.clone(),
1326 &mut cache::NoCache::new(),
1327 &mut data_buffer,
1328 &0,
1329 )
1330 .await
1331 .unwrap();
1332 assert_eq!(item, None);
1333
1334 let item = fetch_item::<u8, &[u8], _>(
1335 &mut flash,
1336 flash_range.clone(),
1337 &mut cache::NoCache::new(),
1338 &mut data_buffer,
1339 &60,
1340 )
1341 .await
1342 .unwrap();
1343 assert_eq!(item, None);
1344
1345 let item = fetch_item::<u8, &[u8], _>(
1346 &mut flash,
1347 flash_range.clone(),
1348 &mut cache::NoCache::new(),
1349 &mut data_buffer,
1350 &0xFF,
1351 )
1352 .await
1353 .unwrap();
1354 assert_eq!(item, None);
1355
1356 store_item(
1357 &mut flash,
1358 flash_range.clone(),
1359 &mut cache::NoCache::new(),
1360 &mut data_buffer,
1361 &0u8,
1362 &[5u8],
1363 )
1364 .await
1365 .unwrap();
1366 store_item(
1367 &mut flash,
1368 flash_range.clone(),
1369 &mut cache::NoCache::new(),
1370 &mut data_buffer,
1371 &0u8,
1372 &[5u8, 6],
1373 )
1374 .await
1375 .unwrap();
1376
1377 let item = fetch_item::<u8, &[u8], _>(
1378 &mut flash,
1379 flash_range.clone(),
1380 &mut cache::NoCache::new(),
1381 &mut data_buffer,
1382 &0,
1383 )
1384 .await
1385 .unwrap()
1386 .unwrap();
1387 assert_eq!(item, &[5, 6]);
1388
1389 store_item(
1390 &mut flash,
1391 flash_range.clone(),
1392 &mut cache::NoCache::new(),
1393 &mut data_buffer,
1394 &1u8,
1395 &[2u8, 2, 2, 2, 2, 2],
1396 )
1397 .await
1398 .unwrap();
1399
1400 let item = fetch_item::<u8, &[u8], _>(
1401 &mut flash,
1402 flash_range.clone(),
1403 &mut cache::NoCache::new(),
1404 &mut data_buffer,
1405 &0,
1406 )
1407 .await
1408 .unwrap()
1409 .unwrap();
1410 assert_eq!(item, &[5, 6]);
1411
1412 let item = fetch_item::<u8, &[u8], _>(
1413 &mut flash,
1414 flash_range.clone(),
1415 &mut cache::NoCache::new(),
1416 &mut data_buffer,
1417 &1,
1418 )
1419 .await
1420 .unwrap()
1421 .unwrap();
1422 assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
1423
1424 for index in 0..4000 {
1425 store_item(
1426 &mut flash,
1427 flash_range.clone(),
1428 &mut cache::NoCache::new(),
1429 &mut data_buffer,
1430 &((index % 10) as u8),
1431 &vec![(index % 10) as u8 * 2; index % 10].as_slice(),
1432 )
1433 .await
1434 .unwrap();
1435 }
1436
1437 for i in 0..10 {
1438 let item = fetch_item::<u8, &[u8], _>(
1439 &mut flash,
1440 flash_range.clone(),
1441 &mut cache::NoCache::new(),
1442 &mut data_buffer,
1443 &i,
1444 )
1445 .await
1446 .unwrap()
1447 .unwrap();
1448 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1449 }
1450
1451 for _ in 0..4000 {
1452 store_item(
1453 &mut flash,
1454 flash_range.clone(),
1455 &mut cache::NoCache::new(),
1456 &mut data_buffer,
1457 &11u8,
1458 &[0; 10],
1459 )
1460 .await
1461 .unwrap();
1462 }
1463
1464 for i in 0..10 {
1465 let item = fetch_item::<u8, &[u8], _>(
1466 &mut flash,
1467 flash_range.clone(),
1468 &mut cache::NoCache::new(),
1469 &mut data_buffer,
1470 &i,
1471 )
1472 .await
1473 .unwrap()
1474 .unwrap();
1475 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1476 }
1477
1478 println!("{:?}", start_snapshot.compare_to(flash.stats_snapshot()),);
1479 }
1480
1481 #[test]
1482 async fn store_too_many_items() {
1483 const UPPER_BOUND: u8 = 3;
1484
1485 let mut tiny_flash = MockFlashTiny::default();
1486 let mut data_buffer = AlignedBuf([0; 128]);
1487
1488 for i in 0..UPPER_BOUND {
1489 println!("Storing {i:?}");
1490
1491 store_item(
1492 &mut tiny_flash,
1493 0x00..0x40,
1494 &mut cache::NoCache::new(),
1495 &mut data_buffer,
1496 &i,
1497 &vec![i; i as usize].as_slice(),
1498 )
1499 .await
1500 .unwrap();
1501 }
1502
1503 assert_eq!(
1504 store_item(
1505 &mut tiny_flash,
1506 0x00..0x40,
1507 &mut cache::NoCache::new(),
1508 &mut data_buffer,
1509 &UPPER_BOUND,
1510 &vec![0; UPPER_BOUND as usize].as_slice(),
1511 )
1512 .await,
1513 Err(Error::FullStorage)
1514 );
1515
1516 for i in 0..UPPER_BOUND {
1517 let item = fetch_item::<u8, &[u8], _>(
1518 &mut tiny_flash,
1519 0x00..0x40,
1520 &mut cache::NoCache::new(),
1521 &mut data_buffer,
1522 &i,
1523 )
1524 .await
1525 .unwrap()
1526 .unwrap();
1527
1528 println!("Fetched {item:?}");
1529
1530 assert_eq!(item, vec![i; i as usize]);
1531 }
1532 }
1533
1534 #[test]
1535 async fn store_too_many_items_big() {
1536 const UPPER_BOUND: u8 = 68;
1537
1538 let mut big_flash = MockFlashBig::default();
1539 let mut data_buffer = AlignedBuf([0; 128]);
1540
1541 for i in 0..UPPER_BOUND {
1542 println!("Storing {i:?}");
1543
1544 store_item(
1545 &mut big_flash,
1546 0x0000..0x1000,
1547 &mut cache::NoCache::new(),
1548 &mut data_buffer,
1549 &i,
1550 &vec![i; i as usize].as_slice(),
1551 )
1552 .await
1553 .unwrap();
1554 }
1555
1556 assert_eq!(
1557 store_item(
1558 &mut big_flash,
1559 0x0000..0x1000,
1560 &mut cache::NoCache::new(),
1561 &mut data_buffer,
1562 &UPPER_BOUND,
1563 &vec![0; UPPER_BOUND as usize].as_slice(),
1564 )
1565 .await,
1566 Err(Error::FullStorage)
1567 );
1568
1569 for i in 0..UPPER_BOUND {
1570 let item = fetch_item::<u8, &[u8], _>(
1571 &mut big_flash,
1572 0x0000..0x1000,
1573 &mut cache::NoCache::new(),
1574 &mut data_buffer,
1575 &i,
1576 )
1577 .await
1578 .unwrap()
1579 .unwrap();
1580
1581 println!("Fetched {item:?}");
1582
1583 assert_eq!(item, vec![i; i as usize]);
1584 }
1585 }
1586
1587 #[test]
1588 async fn store_many_items_big() {
1589 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::default();
1590 let mut data_buffer = AlignedBuf([0; 128]);
1591
1592 const LENGHT_PER_KEY: [usize; 24] = [
1593 11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
1594 ];
1595
1596 for _ in 0..100 {
1597 #[allow(clippy::needless_range_loop)]
1598 for i in 0..24 {
1599 store_item(
1600 &mut flash,
1601 0x0000..0x4000,
1602 &mut cache::NoCache::new(),
1603 &mut data_buffer,
1604 &(i as u16),
1605 &vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
1606 )
1607 .await
1608 .unwrap();
1609 }
1610 }
1611
1612 #[allow(clippy::needless_range_loop)]
1613 for i in 0..24 {
1614 let item = fetch_item::<u16, &[u8], _>(
1615 &mut flash,
1616 0x0000..0x4000,
1617 &mut cache::NoCache::new(),
1618 &mut data_buffer,
1619 &(i as u16),
1620 )
1621 .await
1622 .unwrap()
1623 .unwrap();
1624
1625 println!("Fetched {item:?}");
1626
1627 assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
1628 }
1629 }
1630
1631 #[test]
1632 async fn remove_items() {
1633 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1634 mock_flash::WriteCountCheck::Twice,
1635 None,
1636 true,
1637 );
1638 let mut data_buffer = AlignedBuf([0; 128]);
1639 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1640
1641 for j in 0..10 {
1643 for i in 0..24 {
1644 store_item(
1645 &mut flash,
1646 FLASH_RANGE,
1647 &mut cache::NoCache::new(),
1648 &mut data_buffer,
1649 &(i as u8),
1650 &vec![i as u8; j + 2].as_slice(),
1651 )
1652 .await
1653 .unwrap();
1654 }
1655 }
1656
1657 for j in (0..24).rev() {
1658 for i in 0..=j {
1660 assert!(
1661 fetch_item::<u8, &[u8], _>(
1662 &mut flash,
1663 FLASH_RANGE,
1664 &mut cache::NoCache::new(),
1665 &mut data_buffer,
1666 &i
1667 )
1668 .await
1669 .unwrap()
1670 .is_some()
1671 );
1672 }
1673
1674 remove_item::<u8, _>(
1676 &mut flash,
1677 FLASH_RANGE,
1678 &mut cache::NoCache::new(),
1679 &mut data_buffer,
1680 &j,
1681 )
1682 .await
1683 .unwrap();
1684
1685 for i in 0..j {
1687 assert!(
1688 fetch_item::<u8, &[u8], _>(
1689 &mut flash,
1690 FLASH_RANGE,
1691 &mut cache::NoCache::new(),
1692 &mut data_buffer,
1693 &i
1694 )
1695 .await
1696 .unwrap()
1697 .is_some()
1698 );
1699 }
1700
1701 assert!(
1702 fetch_item::<u8, &[u8], _>(
1703 &mut flash,
1704 FLASH_RANGE,
1705 &mut cache::NoCache::new(),
1706 &mut data_buffer,
1707 &j
1708 )
1709 .await
1710 .unwrap()
1711 .is_none()
1712 );
1713 }
1714 }
1715
1716 #[test]
1717 async fn remove_all() {
1718 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1719 mock_flash::WriteCountCheck::Twice,
1720 None,
1721 true,
1722 );
1723 let mut data_buffer = AlignedBuf([0; 128]);
1724 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1725
1726 for value in 0..10 {
1728 for key in 0..24u8 {
1729 store_item(
1730 &mut flash,
1731 FLASH_RANGE,
1732 &mut cache::NoCache::new(),
1733 &mut data_buffer,
1734 &key,
1735 &vec![key; value + 2].as_slice(),
1736 )
1737 .await
1738 .unwrap();
1739 }
1740 }
1741
1742 for key in 0..24u8 {
1744 assert!(
1745 fetch_item::<u8, &[u8], _>(
1746 &mut flash,
1747 FLASH_RANGE,
1748 &mut cache::NoCache::new(),
1749 &mut data_buffer,
1750 &key
1751 )
1752 .await
1753 .unwrap()
1754 .is_some()
1755 );
1756 }
1757
1758 remove_all_items::<u8, _>(
1760 &mut flash,
1761 FLASH_RANGE,
1762 &mut cache::NoCache::new(),
1763 &mut data_buffer,
1764 )
1765 .await
1766 .unwrap();
1767
1768 for key in 0..24 {
1770 assert!(
1771 fetch_item::<u8, &[u8], _>(
1772 &mut flash,
1773 FLASH_RANGE,
1774 &mut cache::NoCache::new(),
1775 &mut data_buffer,
1776 &key
1777 )
1778 .await
1779 .unwrap()
1780 .is_none()
1781 );
1782 }
1783 }
1784
1785 #[test]
1786 async fn store_too_big_item() {
1787 let mut flash = MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true);
1788 const FLASH_RANGE: Range<u32> = 0x000..0x1000;
1789
1790 store_item(
1791 &mut flash,
1792 FLASH_RANGE,
1793 &mut cache::NoCache::new(),
1794 &mut [0; 1024],
1795 &0u8,
1796 &[0u8; 1024 - 4 * 2 - 8 - 1],
1797 )
1798 .await
1799 .unwrap();
1800
1801 assert_eq!(
1802 store_item(
1803 &mut flash,
1804 FLASH_RANGE,
1805 &mut cache::NoCache::new(),
1806 &mut [0; 1024],
1807 &0u8,
1808 &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],
1809 )
1810 .await,
1811 Err(Error::ItemTooBig)
1812 );
1813 }
1814
1815 #[test]
1816 async fn item_iterator() {
1817 const UPPER_BOUND: u8 = 64;
1818 let mut flash = MockFlashBig::default();
1819 let flash_range = 0x000..0x1000;
1820 let mut cache = cache::NoCache::new();
1821
1822 let mut data_buffer = AlignedBuf([0; 128]);
1823
1824 for i in 0..UPPER_BOUND {
1825 store_item(
1826 &mut flash,
1827 flash_range.clone(),
1828 &mut cache,
1829 &mut data_buffer,
1830 &i,
1831 &vec![i; i as usize].as_slice(),
1832 )
1833 .await
1834 .unwrap();
1835 }
1836
1837 for i in 0..10 {
1839 store_item(
1840 &mut flash,
1841 flash_range.clone(),
1842 &mut cache,
1843 &mut data_buffer,
1844 &1u8,
1845 &vec![i; i as usize].as_slice(),
1846 )
1847 .await
1848 .unwrap();
1849 }
1850
1851 let mut map_iter = fetch_all_items::<u8, _, _>(
1852 &mut flash,
1853 flash_range.clone(),
1854 &mut cache,
1855 &mut data_buffer,
1856 )
1857 .await
1858 .unwrap();
1859
1860 let mut count = 0;
1861 let mut last_value_buffer = [0u8; 64];
1862 let mut last_value_length = 0;
1863 while let Ok(Some((key, value))) = map_iter.next::<&[u8]>(&mut data_buffer).await {
1864 if key == 1 {
1865 last_value_length = value.len();
1867 last_value_buffer[..value.len()].copy_from_slice(value);
1868 } else {
1869 assert_eq!(value, vec![key; key as usize]);
1870 count += 1;
1871 }
1872 }
1873
1874 assert_eq!(last_value_length, 9);
1875 assert_eq!(
1876 &last_value_buffer[..last_value_length],
1877 vec![9u8; 9].as_slice()
1878 );
1879
1880 assert_eq!(count + 1, UPPER_BOUND);
1882 }
1883
1884 #[test]
1885 async fn store_unit_key() {
1886 let mut flash = MockFlashBig::default();
1887 let flash_range = 0x000..0x1000;
1888
1889 let mut data_buffer = AlignedBuf([0; 128]);
1890
1891 let item = fetch_item::<(), &[u8], _>(
1892 &mut flash,
1893 flash_range.clone(),
1894 &mut cache::NoCache::new(),
1895 &mut data_buffer,
1896 &(),
1897 )
1898 .await
1899 .unwrap();
1900 assert_eq!(item, None);
1901
1902 store_item(
1903 &mut flash,
1904 flash_range.clone(),
1905 &mut cache::NoCache::new(),
1906 &mut data_buffer,
1907 &(),
1908 &[5u8],
1909 )
1910 .await
1911 .unwrap();
1912 store_item(
1913 &mut flash,
1914 flash_range.clone(),
1915 &mut cache::NoCache::new(),
1916 &mut data_buffer,
1917 &(),
1918 &[5u8, 6],
1919 )
1920 .await
1921 .unwrap();
1922
1923 let item = fetch_item::<(), &[u8], _>(
1924 &mut flash,
1925 flash_range.clone(),
1926 &mut cache::NoCache::new(),
1927 &mut data_buffer,
1928 &(),
1929 )
1930 .await
1931 .unwrap()
1932 .unwrap();
1933 assert_eq!(item, &[5, 6]);
1934 }
1935
1936 #[test]
1937 async fn option_value() {
1938 let mut buffer = [0; 2];
1939
1940 assert_eq!(Some(42u8).serialize_into(&mut buffer), Ok(2));
1941 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok(Some(42u8)));
1942 assert_eq!(buffer, [1, 42]);
1943
1944 let mut buffer = [0; 1];
1945
1946 assert_eq!(Option::<u8>::None.serialize_into(&mut buffer), Ok(1));
1947 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok(None));
1948 assert_eq!(buffer, [0]);
1949 }
1950}