1use core::mem::{MaybeUninit, size_of};
102
103use cache::CacheImpl;
104use embedded_storage_async::nor_flash::MultiwriteNorFlash;
105
106use crate::item::{Item, ItemHeader, ItemIter, find_next_free_item_spot};
107
108use self::{
109 cache::{KeyCacheImpl, PrivateKeyCacheImpl},
110 item::{ItemHeaderIter, ItemUnborrowed},
111};
112
113use super::*;
114
115pub async fn fetch_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
130 flash: &mut S,
131 flash_range: Range<u32>,
132 cache: &mut impl KeyCacheImpl<K>,
133 data_buffer: &'d mut [u8],
134 search_key: &K,
135) -> Result<Option<V>, Error<S::Error>> {
136 let result = run_with_auto_repair!(
137 function = {
138 fetch_item_with_location(flash, flash_range.clone(), cache, data_buffer, search_key)
139 .await
140 },
141 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
142 );
143
144 let Some((item, _, item_key_len)) = result? else {
145 return Ok(None);
146 };
147
148 let data_len = item.header.length as usize;
149 let item_key_len = match item_key_len {
150 Some(item_key_len) => item_key_len,
151 None => K::get_len(&data_buffer[..data_len])?,
152 };
153
154 Ok(Some(
155 V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
156 .map_err(Error::SerializationError)?,
157 ))
158}
159
160#[allow(clippy::type_complexity)]
162async fn fetch_item_with_location<K: Key, S: NorFlash>(
163 flash: &mut S,
164 flash_range: Range<u32>,
165 cache: &mut impl PrivateKeyCacheImpl<K>,
166 data_buffer: &mut [u8],
167 search_key: &K,
168) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
169 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
170 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
171 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
172
173 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
174 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
175
176 if cache.is_dirty() {
177 cache.invalidate_cache_state();
178 }
179
180 'cache: {
181 if let Some(cached_location) = cache.key_location(search_key) {
182 let page_index = calculate_page_index::<S>(flash_range.clone(), cached_location);
183 let page_data_end_address =
184 calculate_page_end_address::<S>(flash_range.clone(), page_index)
185 - S::WORD_SIZE as u32;
186
187 let Some(header) =
188 ItemHeader::read_new(flash, cached_location, page_data_end_address).await?
189 else {
190 if cfg!(feature = "_test") {
192 panic!("Wrong cache value. Addr: {cached_location}");
193 }
194 cache.invalidate_cache_state();
195 break 'cache;
196 };
197
198 let item = header
199 .read_item(flash, data_buffer, cached_location, page_data_end_address)
200 .await?;
201
202 match item {
203 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
204 if cfg!(feature = "_test") {
205 panic!("Wrong cache value. Addr: {cached_location}");
206 }
207
208 cache.invalidate_cache_state();
210 break 'cache;
211 }
212 item::MaybeItem::Present(item) => {
213 return Ok(Some((item.unborrow(), cached_location, None)));
214 }
215 }
216 }
217 }
218
219 let mut last_used_page =
221 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?;
222
223 if last_used_page.is_none() {
224 if let Some(first_open_page) =
227 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open).await?
228 {
229 let previous_page = previous_page::<S>(flash_range.clone(), first_open_page);
230 if get_page_state(flash, flash_range.clone(), cache, previous_page)
231 .await?
232 .is_closed()
233 {
234 last_used_page = Some(previous_page);
235 } else {
236 cache.unmark_dirty();
239 return Ok(None);
240 }
241 } else {
242 return Err(Error::Corrupted {
246 #[cfg(feature = "_test")]
247 backtrace: std::backtrace::Backtrace::capture(),
248 });
249 }
250 }
251
252 let mut current_page_to_check = last_used_page.unwrap();
256 let mut newest_found_item_data = None;
257
258 loop {
259 let page_data_start_address =
260 calculate_page_address::<S>(flash_range.clone(), current_page_to_check)
261 + S::WORD_SIZE as u32;
262 let page_data_end_address =
263 calculate_page_end_address::<S>(flash_range.clone(), current_page_to_check)
264 - S::WORD_SIZE as u32;
265
266 let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
267 while let Some((item, address)) = it.next(flash, data_buffer).await? {
268 let (found_key, found_key_len) = K::deserialize_from(item.data())?;
269 if found_key == *search_key {
270 newest_found_item_data = Some((address, found_key_len));
271 }
272 }
273
274 if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
276 cache.notice_key_location(search_key, *newest_found_item_address, false);
277
278 break;
279 }
280
281 let previous_page = previous_page::<S>(flash_range.clone(), current_page_to_check);
283
284 if get_page_state(flash, flash_range.clone(), cache, previous_page).await?
285 != PageState::Closed
286 {
287 cache.unmark_dirty();
289 return Ok(None);
290 }
291
292 current_page_to_check = previous_page;
293 }
294
295 cache.unmark_dirty();
296
297 if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data {
300 let item = ItemHeader::read_new(flash, newest_found_item_address, u32::MAX)
301 .await?
302 .ok_or_else(|| {
303 Error::Corrupted {
305 #[cfg(feature = "_test")]
306 backtrace: std::backtrace::Backtrace::capture(),
307 }
308 })?
309 .read_item(flash, data_buffer, newest_found_item_address, u32::MAX)
310 .await?;
311
312 Ok(Some((
313 item.unwrap()?.unborrow(),
314 newest_found_item_address,
315 Some(newest_found_item_key_len),
316 )))
317 } else {
318 Ok(None)
319 }
320}
321
322pub async fn store_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
338 flash: &mut S,
339 flash_range: Range<u32>,
340 cache: &mut impl KeyCacheImpl<K>,
341 data_buffer: &mut [u8],
342 key: &K,
343 item: &V,
344) -> Result<(), Error<S::Error>> {
345 run_with_auto_repair!(
346 function =
347 store_item_inner(flash, flash_range.clone(), cache, data_buffer, key, item).await,
348 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
349 )
350}
351
352async fn store_item_inner<K: Key, S: NorFlash>(
353 flash: &mut S,
354 flash_range: Range<u32>,
355 cache: &mut impl KeyCacheImpl<K>,
356 data_buffer: &mut [u8],
357 key: &K,
358 item: &dyn Value<'_>,
359) -> Result<(), Error<S::Error>> {
360 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
361 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
362
363 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
364
365 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
366 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
367
368 if cache.is_dirty() {
369 cache.invalidate_cache_state();
370 }
371
372 let mut recursion_level = 0;
373 loop {
374 if recursion_level == get_pages::<S>(flash_range.clone(), 0).count() {
376 cache.unmark_dirty();
377 return Err(Error::FullStorage);
378 }
379
380 let next_page_to_use = if let Some(partial_open_page) =
382 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
383 {
384 if !get_page_state(
386 flash,
387 flash_range.clone(),
388 cache,
389 next_page::<S>(flash_range.clone(), partial_open_page),
390 )
391 .await?
392 .is_open()
393 {
394 return Err(Error::Corrupted {
399 #[cfg(feature = "_test")]
400 backtrace: std::backtrace::Backtrace::capture(),
401 });
402 }
403
404 let page_data_start_address =
407 calculate_page_address::<S>(flash_range.clone(), partial_open_page)
408 + S::WORD_SIZE as u32;
409 let page_data_end_address =
410 calculate_page_end_address::<S>(flash_range.clone(), partial_open_page)
411 - S::WORD_SIZE as u32;
412
413 let key_len = key.serialize_into(data_buffer)?;
414 let item_data_length = key_len
415 + item
416 .serialize_into(&mut data_buffer[key_len..])
417 .map_err(Error::SerializationError)?;
418
419 if item_data_length > u16::MAX as usize
420 || item_data_length
421 > calculate_page_size::<S>()
422 .saturating_sub(ItemHeader::data_address::<S>(0) as usize)
423 {
424 cache.unmark_dirty();
425 return Err(Error::ItemTooBig);
426 }
427
428 let free_spot_address = find_next_free_item_spot(
429 flash,
430 flash_range.clone(),
431 cache,
432 page_data_start_address,
433 page_data_end_address,
434 item_data_length as u32,
435 )
436 .await?;
437
438 match free_spot_address {
439 Some(free_spot_address) => {
440 cache.notice_key_location(key, free_spot_address, true);
441 Item::write_new(
442 flash,
443 flash_range.clone(),
444 cache,
445 free_spot_address,
446 &data_buffer[..item_data_length],
447 )
448 .await?;
449
450 cache.unmark_dirty();
451 return Ok(());
452 }
453 None => {
454 close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
456 Some(next_page::<S>(flash_range.clone(), partial_open_page))
457 }
458 }
459 } else {
460 None
461 };
462
463 match next_page_to_use {
469 Some(next_page_to_use) => {
470 let next_page_state =
471 get_page_state(flash, flash_range.clone(), cache, next_page_to_use).await?;
472
473 if !next_page_state.is_open() {
474 return Err(Error::Corrupted {
476 #[cfg(feature = "_test")]
477 backtrace: std::backtrace::Backtrace::capture(),
478 });
479 }
480
481 partial_close_page(flash, flash_range.clone(), cache, next_page_to_use).await?;
485
486 let next_buffer_page = next_page::<S>(flash_range.clone(), next_page_to_use);
487 let next_buffer_page_state =
488 get_page_state(flash, flash_range.clone(), cache, next_buffer_page).await?;
489
490 if !next_buffer_page_state.is_open() {
491 migrate_items::<K, _>(
492 flash,
493 flash_range.clone(),
494 cache,
495 data_buffer,
496 next_buffer_page,
497 next_page_to_use,
498 )
499 .await?;
500 }
501 }
502 None => {
503 let first_open_page =
505 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
506 .await?
507 {
508 Some(first_open_page) => first_open_page,
509 None => {
510 return Err(Error::Corrupted {
514 #[cfg(feature = "_test")]
515 backtrace: std::backtrace::Backtrace::capture(),
516 });
517 }
518 };
519
520 partial_close_page(flash, flash_range.clone(), cache, first_open_page).await?;
521 }
522 }
523
524 recursion_level += 1;
526 }
527}
528
529pub async fn remove_item<K: Key, S: MultiwriteNorFlash>(
551 flash: &mut S,
552 flash_range: Range<u32>,
553 cache: &mut impl KeyCacheImpl<K>,
554 data_buffer: &mut [u8],
555 search_key: &K,
556) -> Result<(), Error<S::Error>> {
557 run_with_auto_repair!(
558 function = remove_item_inner::<K, _>(
559 flash,
560 flash_range.clone(),
561 cache,
562 data_buffer,
563 Some(search_key)
564 )
565 .await,
566 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
567 )
568}
569
570pub async fn remove_all_items<K: Key, S: MultiwriteNorFlash>(
588 flash: &mut S,
589 flash_range: Range<u32>,
590 cache: &mut impl KeyCacheImpl<K>,
591 data_buffer: &mut [u8],
592) -> Result<(), Error<S::Error>> {
593 run_with_auto_repair!(
594 function =
595 remove_item_inner::<K, _>(flash, flash_range.clone(), cache, data_buffer, None).await,
596 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
597 )
598}
599
600async fn remove_item_inner<K: Key, S: MultiwriteNorFlash>(
602 flash: &mut S,
603 flash_range: Range<u32>,
604 cache: &mut impl KeyCacheImpl<K>,
605 data_buffer: &mut [u8],
606 search_key: Option<&K>,
607) -> Result<(), Error<S::Error>> {
608 if let Some(key) = &search_key {
609 cache.notice_key_erased(key);
610 } else {
611 cache.invalidate_cache_state();
612 }
613
614 let last_used_page =
618 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
619 .await?
620 .unwrap_or_default();
621
622 for page_index in get_pages::<S>(
624 flash_range.clone(),
625 next_page::<S>(flash_range.clone(), last_used_page),
626 ) {
627 if get_page_state(flash, flash_range.clone(), cache, page_index)
628 .await?
629 .is_open()
630 {
631 continue;
633 }
634
635 let page_data_start_address =
636 calculate_page_address::<S>(flash_range.clone(), page_index) + S::WORD_SIZE as u32;
637 let page_data_end_address =
638 calculate_page_end_address::<S>(flash_range.clone(), page_index) - S::WORD_SIZE as u32;
639
640 let mut item_headers = ItemHeaderIter::new(page_data_start_address, page_data_end_address);
642
643 while let (Some(item_header), item_address) = item_headers.next(flash).await? {
644 let item = item_header
645 .read_item(flash, data_buffer, item_address, page_data_end_address)
646 .await?;
647
648 match item {
649 item::MaybeItem::Corrupted(_, _) => continue,
650 item::MaybeItem::Erased(_, _) => continue,
651 item::MaybeItem::Present(item) => {
652 let item_match = match search_key {
653 Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
654 _ => true,
655 };
656 if item_match {
659 item.header
660 .erase_data(flash, flash_range.clone(), cache, item_address)
661 .await?;
662 }
663 }
664 }
665 }
666 }
667
668 cache.unmark_dirty();
670
671 Ok(())
672}
673
674pub struct MapItemIter<'d, 'c, S: NorFlash, CI: CacheImpl> {
726 flash: &'d mut S,
727 flash_range: Range<u32>,
728 first_page: usize,
729 cache: &'c mut CI,
730 current_page_index: usize,
731 pub(crate) current_iter: ItemIter,
732}
733
734impl<S: NorFlash, CI: CacheImpl> MapItemIter<'_, '_, S, CI> {
735 pub async fn next<'a, K: Key, V: Value<'a>>(
737 &mut self,
738 data_buffer: &'a mut [u8],
739 ) -> Result<Option<(K, V)>, Error<S::Error>> {
740 let item = loop {
742 if let Some((item, _address)) = self.current_iter.next(self.flash, data_buffer).await? {
743 break item;
745 }
746
747 loop {
750 self.current_page_index =
751 next_page::<S>(self.flash_range.clone(), self.current_page_index);
752
753 if self.current_page_index == self.first_page {
755 return Ok(None);
756 }
757
758 match get_page_state::<S>(
759 self.flash,
760 self.flash_range.clone(),
761 self.cache,
762 self.current_page_index,
763 )
764 .await
765 {
766 Ok(PageState::Closed) | Ok(PageState::PartialOpen) => {
767 self.current_iter = ItemIter::new(
768 calculate_page_address::<S>(
769 self.flash_range.clone(),
770 self.current_page_index,
771 ) + S::WORD_SIZE as u32,
772 calculate_page_end_address::<S>(
773 self.flash_range.clone(),
774 self.current_page_index,
775 ) - S::WORD_SIZE as u32,
776 );
777 break;
778 }
779 _ => continue,
780 }
781 }
782 };
783
784 let data_len = item.header.length as usize;
785 let (key, key_len) = K::deserialize_from(item.data())?;
786
787 Ok(Some((
788 key,
789 V::deserialize_from(&data_buffer[key_len..][..data_len - key_len])
790 .map_err(Error::SerializationError)?,
791 )))
792 }
793}
794
795pub async fn fetch_all_items<'d, 'c, K: Key, S: NorFlash, CI: KeyCacheImpl<K>>(
860 flash: &'d mut S,
861 flash_range: Range<u32>,
862 cache: &'c mut CI,
863 data_buffer: &mut [u8],
864) -> Result<MapItemIter<'d, 'c, S, CI>, Error<S::Error>> {
865 let first_page = run_with_auto_repair!(
868 function = {
869 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
870 .await?
871 {
872 Some(last_used_page) => {
873 Ok(next_page::<S>(flash_range.clone(), last_used_page))
875 }
876 None => {
877 if let Some(first_open_page) =
880 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
881 .await?
882 {
883 let previous_page =
884 previous_page::<S>(flash_range.clone(), first_open_page);
885 if get_page_state(flash, flash_range.clone(), cache, previous_page)
886 .await?
887 .is_closed()
888 {
889 Ok(first_open_page)
891 } else {
892 cache.unmark_dirty();
895 Ok(0)
896 }
897 } else {
898 Err(Error::Corrupted {
902 #[cfg(feature = "_test")]
903 backtrace: std::backtrace::Backtrace::capture(),
904 })
905 }
906 }
907 }
908 },
909 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
910 )?;
911
912 Ok(MapItemIter {
913 flash,
914 flash_range: flash_range.clone(),
915 first_page,
916 cache,
917 current_page_index: first_page,
918 current_iter: ItemIter::new(
919 calculate_page_address::<S>(flash_range.clone(), first_page) + S::WORD_SIZE as u32,
920 calculate_page_end_address::<S>(flash_range.clone(), first_page) - S::WORD_SIZE as u32,
921 ),
922 })
923}
924
925pub trait Key: Eq + Clone + Sized {
934 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
937 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
940 fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
943 Self::deserialize_from(buffer).map(|(_, len)| len)
944 }
945}
946
947macro_rules! impl_key_num {
948 ($int:ty) => {
949 impl Key for $int {
950 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
951 let len = size_of::<Self>();
952 if buffer.len() < len {
953 return Err(SerializationError::BufferTooSmall);
954 }
955 buffer[..len].copy_from_slice(&self.to_le_bytes());
956 Ok(len)
957 }
958
959 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
960 let len = size_of::<Self>();
961 if buffer.len() < len {
962 return Err(SerializationError::BufferTooSmall);
963 }
964
965 Ok((
966 Self::from_le_bytes(buffer[..len].try_into().unwrap()),
967 size_of::<Self>(),
968 ))
969 }
970
971 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
972 Ok(size_of::<Self>())
973 }
974 }
975 };
976}
977
978impl_key_num!(u8);
979impl_key_num!(u16);
980impl_key_num!(u32);
981impl_key_num!(u64);
982impl_key_num!(u128);
983impl_key_num!(i8);
984impl_key_num!(i16);
985impl_key_num!(i32);
986impl_key_num!(i64);
987impl_key_num!(i128);
988
989impl<const N: usize> Key for [u8; N] {
990 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
991 if buffer.len() < N {
992 return Err(SerializationError::BufferTooSmall);
993 }
994 buffer[..N].copy_from_slice(self);
995 Ok(N)
996 }
997
998 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
999 if buffer.len() < N {
1000 return Err(SerializationError::BufferTooSmall);
1001 }
1002
1003 Ok((buffer[..N].try_into().unwrap(), N))
1004 }
1005
1006 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1007 Ok(N)
1008 }
1009}
1010
1011pub trait Value<'a> {
1016 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1019 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1026 where
1027 Self: Sized;
1028}
1029
1030impl<'a> Value<'a> for bool {
1031 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1032 <u8 as Value>::serialize_into(&(*self as u8), buffer)
1033 }
1034
1035 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1036 where
1037 Self: Sized,
1038 {
1039 Ok(<u8 as Value>::deserialize_from(buffer)? != 0)
1040 }
1041}
1042
1043impl<'a, T: Value<'a>> Value<'a> for Option<T> {
1044 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1045 if let Some(val) = self {
1046 <bool as Value>::serialize_into(&true, buffer)?;
1047 <T as Value>::serialize_into(val, buffer)
1048 } else {
1049 <bool as Value>::serialize_into(&false, buffer)
1050 }
1051 }
1052
1053 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1054 where
1055 Self: Sized,
1056 {
1057 if <bool as Value>::deserialize_from(buffer)? {
1058 Ok(Some(<T as Value>::deserialize_from(buffer)?))
1059 } else {
1060 Ok(None)
1061 }
1062 }
1063}
1064
1065impl<'a, T: Value<'a>, const N: usize> Value<'a> for [T; N] {
1066 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1067 if buffer.len() < size_of::<T>() * N {
1068 return Err(SerializationError::BufferTooSmall);
1069 }
1070
1071 let mut size = 0;
1072 for v in self {
1073 size += <T as Value>::serialize_into(v, &mut buffer[size..])?;
1074 }
1075
1076 Ok(size)
1077 }
1078
1079 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1080 where
1081 Self: Sized,
1082 {
1083 let mut array = MaybeUninit::<[T; N]>::uninit();
1084
1085 if N == 0 {
1086 return Ok(unsafe { array.assume_init() });
1088 }
1089
1090 let ptr = array.as_mut_ptr() as *mut T;
1091
1092 unsafe {
1097 for i in 0..N {
1098 *ptr.add(i) = <T as Value>::deserialize_from(&buffer[i * size_of::<T>()..])?;
1099 }
1100
1101 Ok(array.assume_init())
1102 }
1103 }
1104}
1105
1106impl<'a> Value<'a> for &'a [u8] {
1107 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1108 if buffer.len() < self.len() {
1109 return Err(SerializationError::BufferTooSmall);
1110 }
1111
1112 buffer[..self.len()].copy_from_slice(self);
1113 Ok(self.len())
1114 }
1115
1116 fn deserialize_from(buffer: &'a [u8]) -> Result<Self, SerializationError>
1117 where
1118 Self: Sized,
1119 {
1120 Ok(buffer)
1121 }
1122}
1123
1124macro_rules! impl_map_item_num {
1125 ($int:ty) => {
1126 impl<'a> Value<'a> for $int {
1127 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1128 buffer[..core::mem::size_of::<Self>()].copy_from_slice(&self.to_le_bytes());
1129 Ok(core::mem::size_of::<Self>())
1130 }
1131
1132 fn deserialize_from(buffer: &[u8]) -> Result<Self, SerializationError> {
1133 Ok(Self::from_le_bytes(
1134 buffer[..core::mem::size_of::<Self>()]
1135 .try_into()
1136 .map_err(|_| SerializationError::BufferTooSmall)?,
1137 ))
1138 }
1139 }
1140 };
1141}
1142
1143impl_map_item_num!(u8);
1144impl_map_item_num!(u16);
1145impl_map_item_num!(u32);
1146impl_map_item_num!(u64);
1147impl_map_item_num!(u128);
1148impl_map_item_num!(i8);
1149impl_map_item_num!(i16);
1150impl_map_item_num!(i32);
1151impl_map_item_num!(i64);
1152impl_map_item_num!(i128);
1153impl_map_item_num!(f32);
1154impl_map_item_num!(f64);
1155
1156#[non_exhaustive]
1160#[derive(Debug, PartialEq, Eq, Clone)]
1161#[cfg_attr(feature = "defmt-03", derive(defmt::Format))]
1162pub enum SerializationError {
1163 BufferTooSmall,
1165 InvalidData,
1167 InvalidFormat,
1169 Custom(i32),
1172}
1173
1174impl core::fmt::Display for SerializationError {
1175 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1176 match self {
1177 SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
1178 SerializationError::InvalidData => write!(f, "Invalid data"),
1179 SerializationError::InvalidFormat => write!(f, "Invalid format"),
1180 SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
1181 }
1182 }
1183}
1184
1185async fn migrate_items<K: Key, S: NorFlash>(
1186 flash: &mut S,
1187 flash_range: Range<u32>,
1188 cache: &mut impl PrivateKeyCacheImpl<K>,
1189 data_buffer: &mut [u8],
1190 source_page: usize,
1191 target_page: usize,
1192) -> Result<(), Error<S::Error>> {
1193 let mut next_page_write_address =
1197 calculate_page_address::<S>(flash_range.clone(), target_page) + S::WORD_SIZE as u32;
1198
1199 let mut it = ItemIter::new(
1200 calculate_page_address::<S>(flash_range.clone(), source_page) + S::WORD_SIZE as u32,
1201 calculate_page_end_address::<S>(flash_range.clone(), source_page) - S::WORD_SIZE as u32,
1202 );
1203 while let Some((item, item_address)) = it.next(flash, data_buffer).await? {
1204 let (key, _) = K::deserialize_from(item.data())?;
1205 let (_, data_buffer) = item.destruct();
1206
1207 cache.unmark_dirty();
1209
1210 let Some((found_item, found_address, _)) =
1212 fetch_item_with_location::<K, S>(flash, flash_range.clone(), cache, data_buffer, &key)
1213 .await?
1214 else {
1215 return Err(Error::Corrupted {
1217 #[cfg(feature = "_test")]
1218 backtrace: std::backtrace::Backtrace::capture(),
1219 });
1220 };
1221
1222 let found_item = found_item.reborrow(data_buffer);
1223
1224 if found_address == item_address {
1225 cache.notice_key_location(&key, next_page_write_address, true);
1226 found_item
1227 .write(flash, flash_range.clone(), cache, next_page_write_address)
1228 .await?;
1229 next_page_write_address = found_item
1230 .header
1231 .next_item_address::<S>(next_page_write_address);
1232 }
1233 }
1234
1235 open_page(flash, flash_range.clone(), cache, source_page).await?;
1236
1237 Ok(())
1238}
1239
1240async fn try_repair<K: Key, S: NorFlash>(
1251 flash: &mut S,
1252 flash_range: Range<u32>,
1253 cache: &mut impl KeyCacheImpl<K>,
1254 data_buffer: &mut [u8],
1255) -> Result<(), Error<S::Error>> {
1256 cache.invalidate_cache_state();
1257
1258 crate::try_general_repair(flash, flash_range.clone(), cache).await?;
1259
1260 if let Some(partial_open_page) =
1262 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
1263 {
1264 let buffer_page = next_page::<S>(flash_range.clone(), partial_open_page);
1265 if !get_page_state(flash, flash_range.clone(), cache, buffer_page)
1266 .await?
1267 .is_open()
1268 {
1269 open_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1272
1273 partial_close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1275
1276 migrate_items::<K, _>(
1277 flash,
1278 flash_range.clone(),
1279 cache,
1280 data_buffer,
1281 buffer_page,
1282 partial_open_page,
1283 )
1284 .await?;
1285 }
1286 }
1287
1288 Ok(())
1289}
1290
1291#[cfg(test)]
1292mod tests {
1293 use super::*;
1294 use futures_test::test;
1295
1296 type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
1297 type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
1298
1299 #[test]
1300 async fn store_and_fetch() {
1301 let mut flash = MockFlashBig::default();
1302 let flash_range = 0x000..0x1000;
1303
1304 let mut data_buffer = AlignedBuf([0; 128]);
1305
1306 let start_snapshot = flash.stats_snapshot();
1307
1308 let item = fetch_item::<u8, &[u8], _>(
1309 &mut flash,
1310 flash_range.clone(),
1311 &mut cache::NoCache::new(),
1312 &mut data_buffer,
1313 &0,
1314 )
1315 .await
1316 .unwrap();
1317 assert_eq!(item, None);
1318
1319 let item = fetch_item::<u8, &[u8], _>(
1320 &mut flash,
1321 flash_range.clone(),
1322 &mut cache::NoCache::new(),
1323 &mut data_buffer,
1324 &60,
1325 )
1326 .await
1327 .unwrap();
1328 assert_eq!(item, None);
1329
1330 let item = fetch_item::<u8, &[u8], _>(
1331 &mut flash,
1332 flash_range.clone(),
1333 &mut cache::NoCache::new(),
1334 &mut data_buffer,
1335 &0xFF,
1336 )
1337 .await
1338 .unwrap();
1339 assert_eq!(item, None);
1340
1341 store_item(
1342 &mut flash,
1343 flash_range.clone(),
1344 &mut cache::NoCache::new(),
1345 &mut data_buffer,
1346 &0u8,
1347 &[5u8],
1348 )
1349 .await
1350 .unwrap();
1351 store_item(
1352 &mut flash,
1353 flash_range.clone(),
1354 &mut cache::NoCache::new(),
1355 &mut data_buffer,
1356 &0u8,
1357 &[5u8, 6],
1358 )
1359 .await
1360 .unwrap();
1361
1362 let item = fetch_item::<u8, &[u8], _>(
1363 &mut flash,
1364 flash_range.clone(),
1365 &mut cache::NoCache::new(),
1366 &mut data_buffer,
1367 &0,
1368 )
1369 .await
1370 .unwrap()
1371 .unwrap();
1372 assert_eq!(item, &[5, 6]);
1373
1374 store_item(
1375 &mut flash,
1376 flash_range.clone(),
1377 &mut cache::NoCache::new(),
1378 &mut data_buffer,
1379 &1u8,
1380 &[2u8, 2, 2, 2, 2, 2],
1381 )
1382 .await
1383 .unwrap();
1384
1385 let item = fetch_item::<u8, &[u8], _>(
1386 &mut flash,
1387 flash_range.clone(),
1388 &mut cache::NoCache::new(),
1389 &mut data_buffer,
1390 &0,
1391 )
1392 .await
1393 .unwrap()
1394 .unwrap();
1395 assert_eq!(item, &[5, 6]);
1396
1397 let item = fetch_item::<u8, &[u8], _>(
1398 &mut flash,
1399 flash_range.clone(),
1400 &mut cache::NoCache::new(),
1401 &mut data_buffer,
1402 &1,
1403 )
1404 .await
1405 .unwrap()
1406 .unwrap();
1407 assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
1408
1409 for index in 0..4000 {
1410 store_item(
1411 &mut flash,
1412 flash_range.clone(),
1413 &mut cache::NoCache::new(),
1414 &mut data_buffer,
1415 &((index % 10) as u8),
1416 &vec![(index % 10) as u8 * 2; index % 10].as_slice(),
1417 )
1418 .await
1419 .unwrap();
1420 }
1421
1422 for i in 0..10 {
1423 let item = fetch_item::<u8, &[u8], _>(
1424 &mut flash,
1425 flash_range.clone(),
1426 &mut cache::NoCache::new(),
1427 &mut data_buffer,
1428 &i,
1429 )
1430 .await
1431 .unwrap()
1432 .unwrap();
1433 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1434 }
1435
1436 for _ in 0..4000 {
1437 store_item(
1438 &mut flash,
1439 flash_range.clone(),
1440 &mut cache::NoCache::new(),
1441 &mut data_buffer,
1442 &11u8,
1443 &[0; 10],
1444 )
1445 .await
1446 .unwrap();
1447 }
1448
1449 for i in 0..10 {
1450 let item = fetch_item::<u8, &[u8], _>(
1451 &mut flash,
1452 flash_range.clone(),
1453 &mut cache::NoCache::new(),
1454 &mut data_buffer,
1455 &i,
1456 )
1457 .await
1458 .unwrap()
1459 .unwrap();
1460 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1461 }
1462
1463 println!("{:?}", start_snapshot.compare_to(flash.stats_snapshot()),);
1464 }
1465
1466 #[test]
1467 async fn store_too_many_items() {
1468 const UPPER_BOUND: u8 = 3;
1469
1470 let mut tiny_flash = MockFlashTiny::default();
1471 let mut data_buffer = AlignedBuf([0; 128]);
1472
1473 for i in 0..UPPER_BOUND {
1474 println!("Storing {i:?}");
1475
1476 store_item(
1477 &mut tiny_flash,
1478 0x00..0x40,
1479 &mut cache::NoCache::new(),
1480 &mut data_buffer,
1481 &i,
1482 &vec![i; i as usize].as_slice(),
1483 )
1484 .await
1485 .unwrap();
1486 }
1487
1488 assert_eq!(
1489 store_item(
1490 &mut tiny_flash,
1491 0x00..0x40,
1492 &mut cache::NoCache::new(),
1493 &mut data_buffer,
1494 &UPPER_BOUND,
1495 &vec![0; UPPER_BOUND as usize].as_slice(),
1496 )
1497 .await,
1498 Err(Error::FullStorage)
1499 );
1500
1501 for i in 0..UPPER_BOUND {
1502 let item = fetch_item::<u8, &[u8], _>(
1503 &mut tiny_flash,
1504 0x00..0x40,
1505 &mut cache::NoCache::new(),
1506 &mut data_buffer,
1507 &i,
1508 )
1509 .await
1510 .unwrap()
1511 .unwrap();
1512
1513 println!("Fetched {item:?}");
1514
1515 assert_eq!(item, vec![i; i as usize]);
1516 }
1517 }
1518
1519 #[test]
1520 async fn store_too_many_items_big() {
1521 const UPPER_BOUND: u8 = 68;
1522
1523 let mut big_flash = MockFlashBig::default();
1524 let mut data_buffer = AlignedBuf([0; 128]);
1525
1526 for i in 0..UPPER_BOUND {
1527 println!("Storing {i:?}");
1528
1529 store_item(
1530 &mut big_flash,
1531 0x0000..0x1000,
1532 &mut cache::NoCache::new(),
1533 &mut data_buffer,
1534 &i,
1535 &vec![i; i as usize].as_slice(),
1536 )
1537 .await
1538 .unwrap();
1539 }
1540
1541 assert_eq!(
1542 store_item(
1543 &mut big_flash,
1544 0x0000..0x1000,
1545 &mut cache::NoCache::new(),
1546 &mut data_buffer,
1547 &UPPER_BOUND,
1548 &vec![0; UPPER_BOUND as usize].as_slice(),
1549 )
1550 .await,
1551 Err(Error::FullStorage)
1552 );
1553
1554 for i in 0..UPPER_BOUND {
1555 let item = fetch_item::<u8, &[u8], _>(
1556 &mut big_flash,
1557 0x0000..0x1000,
1558 &mut cache::NoCache::new(),
1559 &mut data_buffer,
1560 &i,
1561 )
1562 .await
1563 .unwrap()
1564 .unwrap();
1565
1566 println!("Fetched {item:?}");
1567
1568 assert_eq!(item, vec![i; i as usize]);
1569 }
1570 }
1571
1572 #[test]
1573 async fn store_many_items_big() {
1574 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::default();
1575 let mut data_buffer = AlignedBuf([0; 128]);
1576
1577 const LENGHT_PER_KEY: [usize; 24] = [
1578 11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
1579 ];
1580
1581 for _ in 0..100 {
1582 #[allow(clippy::needless_range_loop)]
1583 for i in 0..24 {
1584 store_item(
1585 &mut flash,
1586 0x0000..0x4000,
1587 &mut cache::NoCache::new(),
1588 &mut data_buffer,
1589 &(i as u16),
1590 &vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
1591 )
1592 .await
1593 .unwrap();
1594 }
1595 }
1596
1597 #[allow(clippy::needless_range_loop)]
1598 for i in 0..24 {
1599 let item = fetch_item::<u16, &[u8], _>(
1600 &mut flash,
1601 0x0000..0x4000,
1602 &mut cache::NoCache::new(),
1603 &mut data_buffer,
1604 &(i as u16),
1605 )
1606 .await
1607 .unwrap()
1608 .unwrap();
1609
1610 println!("Fetched {item:?}");
1611
1612 assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
1613 }
1614 }
1615
1616 #[test]
1617 async fn remove_items() {
1618 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1619 mock_flash::WriteCountCheck::Twice,
1620 None,
1621 true,
1622 );
1623 let mut data_buffer = AlignedBuf([0; 128]);
1624 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1625
1626 for j in 0..10 {
1628 for i in 0..24 {
1629 store_item(
1630 &mut flash,
1631 FLASH_RANGE,
1632 &mut cache::NoCache::new(),
1633 &mut data_buffer,
1634 &(i as u8),
1635 &vec![i as u8; j + 2].as_slice(),
1636 )
1637 .await
1638 .unwrap();
1639 }
1640 }
1641
1642 for j in (0..24).rev() {
1643 for i in 0..=j {
1645 assert!(
1646 fetch_item::<u8, &[u8], _>(
1647 &mut flash,
1648 FLASH_RANGE,
1649 &mut cache::NoCache::new(),
1650 &mut data_buffer,
1651 &i
1652 )
1653 .await
1654 .unwrap()
1655 .is_some()
1656 );
1657 }
1658
1659 remove_item::<u8, _>(
1661 &mut flash,
1662 FLASH_RANGE,
1663 &mut cache::NoCache::new(),
1664 &mut data_buffer,
1665 &j,
1666 )
1667 .await
1668 .unwrap();
1669
1670 for i in 0..j {
1672 assert!(
1673 fetch_item::<u8, &[u8], _>(
1674 &mut flash,
1675 FLASH_RANGE,
1676 &mut cache::NoCache::new(),
1677 &mut data_buffer,
1678 &i
1679 )
1680 .await
1681 .unwrap()
1682 .is_some()
1683 );
1684 }
1685
1686 assert!(
1687 fetch_item::<u8, &[u8], _>(
1688 &mut flash,
1689 FLASH_RANGE,
1690 &mut cache::NoCache::new(),
1691 &mut data_buffer,
1692 &j
1693 )
1694 .await
1695 .unwrap()
1696 .is_none()
1697 );
1698 }
1699 }
1700
1701 #[test]
1702 async fn remove_all() {
1703 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1704 mock_flash::WriteCountCheck::Twice,
1705 None,
1706 true,
1707 );
1708 let mut data_buffer = AlignedBuf([0; 128]);
1709 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1710
1711 for value in 0..10 {
1713 for key in 0..24u8 {
1714 store_item(
1715 &mut flash,
1716 FLASH_RANGE,
1717 &mut cache::NoCache::new(),
1718 &mut data_buffer,
1719 &key,
1720 &vec![key; value + 2].as_slice(),
1721 )
1722 .await
1723 .unwrap();
1724 }
1725 }
1726
1727 for key in 0..24u8 {
1729 assert!(
1730 fetch_item::<u8, &[u8], _>(
1731 &mut flash,
1732 FLASH_RANGE,
1733 &mut cache::NoCache::new(),
1734 &mut data_buffer,
1735 &key
1736 )
1737 .await
1738 .unwrap()
1739 .is_some()
1740 );
1741 }
1742
1743 remove_all_items::<u8, _>(
1745 &mut flash,
1746 FLASH_RANGE,
1747 &mut cache::NoCache::new(),
1748 &mut data_buffer,
1749 )
1750 .await
1751 .unwrap();
1752
1753 for key in 0..24 {
1755 assert!(
1756 fetch_item::<u8, &[u8], _>(
1757 &mut flash,
1758 FLASH_RANGE,
1759 &mut cache::NoCache::new(),
1760 &mut data_buffer,
1761 &key
1762 )
1763 .await
1764 .unwrap()
1765 .is_none()
1766 );
1767 }
1768 }
1769
1770 #[test]
1771 async fn store_too_big_item() {
1772 let mut flash = MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true);
1773 const FLASH_RANGE: Range<u32> = 0x000..0x1000;
1774
1775 store_item(
1776 &mut flash,
1777 FLASH_RANGE,
1778 &mut cache::NoCache::new(),
1779 &mut [0; 1024],
1780 &0u8,
1781 &[0u8; 1024 - 4 * 2 - 8 - 1],
1782 )
1783 .await
1784 .unwrap();
1785
1786 assert_eq!(
1787 store_item(
1788 &mut flash,
1789 FLASH_RANGE,
1790 &mut cache::NoCache::new(),
1791 &mut [0; 1024],
1792 &0u8,
1793 &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],
1794 )
1795 .await,
1796 Err(Error::ItemTooBig)
1797 );
1798 }
1799
1800 #[test]
1801 async fn item_iterator() {
1802 const UPPER_BOUND: u8 = 64;
1803 let mut flash = MockFlashBig::default();
1804 let flash_range = 0x000..0x1000;
1805 let mut cache = cache::NoCache::new();
1806
1807 let mut data_buffer = AlignedBuf([0; 128]);
1808
1809 for i in 0..UPPER_BOUND {
1810 store_item(
1811 &mut flash,
1812 flash_range.clone(),
1813 &mut cache,
1814 &mut data_buffer,
1815 &i,
1816 &vec![i; i as usize].as_slice(),
1817 )
1818 .await
1819 .unwrap();
1820 }
1821
1822 for i in 0..10 {
1824 store_item(
1825 &mut flash,
1826 flash_range.clone(),
1827 &mut cache,
1828 &mut data_buffer,
1829 &1u8,
1830 &vec![i; i as usize].as_slice(),
1831 )
1832 .await
1833 .unwrap();
1834 }
1835
1836 let mut map_iter = fetch_all_items::<u8, _, _>(
1837 &mut flash,
1838 flash_range.clone(),
1839 &mut cache,
1840 &mut data_buffer,
1841 )
1842 .await
1843 .unwrap();
1844
1845 let mut count = 0;
1846 let mut last_value_buffer = [0u8; 64];
1847 let mut last_value_length = 0;
1848 while let Ok(Some((key, value))) = map_iter.next::<u8, &[u8]>(&mut data_buffer).await {
1849 if key == 1 {
1850 last_value_length = value.len();
1852 last_value_buffer[..value.len()].copy_from_slice(value);
1853 } else {
1854 assert_eq!(value, vec![key; key as usize]);
1855 count += 1;
1856 }
1857 }
1858
1859 assert_eq!(last_value_length, 9);
1860 assert_eq!(
1861 &last_value_buffer[..last_value_length],
1862 vec![9u8; 9].as_slice()
1863 );
1864
1865 assert_eq!(count + 1, UPPER_BOUND);
1867 }
1868}