1use core::{marker::PhantomData, mem::size_of};
102
103use cache::CacheImpl;
104use embedded_storage_async::nor_flash::MultiwriteNorFlash;
105
106use crate::item::{Item, ItemHeader, ItemIter, find_next_free_item_spot};
107
108use self::{
109 cache::{KeyCacheImpl, PrivateKeyCacheImpl},
110 item::{ItemHeaderIter, ItemUnborrowed},
111};
112
113use super::*;
114
115pub async fn fetch_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
130 flash: &mut S,
131 flash_range: Range<u32>,
132 cache: &mut impl KeyCacheImpl<K>,
133 data_buffer: &'d mut [u8],
134 search_key: &K,
135) -> Result<Option<V>, Error<S::Error>> {
136 let result = run_with_auto_repair!(
137 function = {
138 fetch_item_with_location(flash, flash_range.clone(), cache, data_buffer, search_key)
139 .await
140 },
141 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
142 );
143
144 let Some((item, _, item_key_len)) = result? else {
145 return Ok(None);
146 };
147
148 let data_len = item.header.length as usize;
149 let item_key_len = match item_key_len {
150 Some(item_key_len) => item_key_len,
151 None => K::get_len(&data_buffer[..data_len])?,
152 };
153
154 let (value, _size) =
155 V::deserialize_from(&data_buffer[item_key_len..][..data_len - item_key_len])
156 .map_err(Error::SerializationError)?;
157 Ok(Some(value))
158}
159
160#[allow(clippy::type_complexity)]
162async fn fetch_item_with_location<K: Key, S: NorFlash>(
163 flash: &mut S,
164 flash_range: Range<u32>,
165 cache: &mut impl PrivateKeyCacheImpl<K>,
166 data_buffer: &mut [u8],
167 search_key: &K,
168) -> Result<Option<(ItemUnborrowed, u32, Option<usize>)>, Error<S::Error>> {
169 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
170 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
171 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
172
173 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
174 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
175
176 if cache.is_dirty() {
177 cache.invalidate_cache_state();
178 }
179
180 'cache: {
181 if let Some(cached_location) = cache.key_location(search_key) {
182 let page_index = calculate_page_index::<S>(flash_range.clone(), cached_location);
183 let page_data_end_address =
184 calculate_page_end_address::<S>(flash_range.clone(), page_index)
185 - S::WORD_SIZE as u32;
186
187 let Some(header) =
188 ItemHeader::read_new(flash, cached_location, page_data_end_address).await?
189 else {
190 if cfg!(feature = "_test") {
192 panic!("Wrong cache value. Addr: {cached_location}");
193 }
194 cache.invalidate_cache_state();
195 break 'cache;
196 };
197
198 let item = header
199 .read_item(flash, data_buffer, cached_location, page_data_end_address)
200 .await?;
201
202 match item {
203 item::MaybeItem::Corrupted(_, _) | item::MaybeItem::Erased(_, _) => {
204 if cfg!(feature = "_test") {
205 panic!("Wrong cache value. Addr: {cached_location}");
206 }
207
208 cache.invalidate_cache_state();
210 break 'cache;
211 }
212 item::MaybeItem::Present(item) => {
213 return Ok(Some((item.unborrow(), cached_location, None)));
214 }
215 }
216 }
217 }
218
219 let mut last_used_page =
221 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?;
222
223 if last_used_page.is_none() {
224 if let Some(first_open_page) =
227 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open).await?
228 {
229 let previous_page = previous_page::<S>(flash_range.clone(), first_open_page);
230 if get_page_state(flash, flash_range.clone(), cache, previous_page)
231 .await?
232 .is_closed()
233 {
234 last_used_page = Some(previous_page);
235 } else {
236 cache.unmark_dirty();
239 return Ok(None);
240 }
241 } else {
242 return Err(Error::Corrupted {
246 #[cfg(feature = "_test")]
247 backtrace: std::backtrace::Backtrace::capture(),
248 });
249 }
250 }
251
252 let mut current_page_to_check = last_used_page.unwrap();
256 let mut newest_found_item_data = None;
257
258 loop {
259 let page_data_start_address =
260 calculate_page_address::<S>(flash_range.clone(), current_page_to_check)
261 + S::WORD_SIZE as u32;
262 let page_data_end_address =
263 calculate_page_end_address::<S>(flash_range.clone(), current_page_to_check)
264 - S::WORD_SIZE as u32;
265
266 let mut it = ItemIter::new(page_data_start_address, page_data_end_address);
267 while let Some((item, address)) = it.next(flash, data_buffer).await? {
268 let (found_key, found_key_len) = K::deserialize_from(item.data())?;
269 if found_key == *search_key {
270 newest_found_item_data = Some((address, found_key_len));
271 }
272 }
273
274 if let Some((newest_found_item_address, _)) = newest_found_item_data.as_ref() {
276 cache.notice_key_location(search_key, *newest_found_item_address, false);
277
278 break;
279 }
280
281 let previous_page = previous_page::<S>(flash_range.clone(), current_page_to_check);
283
284 if get_page_state(flash, flash_range.clone(), cache, previous_page).await?
285 != PageState::Closed
286 {
287 cache.unmark_dirty();
289 return Ok(None);
290 }
291
292 current_page_to_check = previous_page;
293 }
294
295 cache.unmark_dirty();
296
297 if let Some((newest_found_item_address, newest_found_item_key_len)) = newest_found_item_data {
300 let item = ItemHeader::read_new(flash, newest_found_item_address, u32::MAX)
301 .await?
302 .ok_or_else(|| {
303 Error::Corrupted {
305 #[cfg(feature = "_test")]
306 backtrace: std::backtrace::Backtrace::capture(),
307 }
308 })?
309 .read_item(flash, data_buffer, newest_found_item_address, u32::MAX)
310 .await?;
311
312 Ok(Some((
313 item.unwrap()?.unborrow(),
314 newest_found_item_address,
315 Some(newest_found_item_key_len),
316 )))
317 } else {
318 Ok(None)
319 }
320}
321
322pub async fn store_item<'d, K: Key, V: Value<'d>, S: NorFlash>(
338 flash: &mut S,
339 flash_range: Range<u32>,
340 cache: &mut impl KeyCacheImpl<K>,
341 data_buffer: &mut [u8],
342 key: &K,
343 item: &V,
344) -> Result<(), Error<S::Error>> {
345 run_with_auto_repair!(
346 function =
347 store_item_inner(flash, flash_range.clone(), cache, data_buffer, key, item).await,
348 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
349 )
350}
351
352async fn store_item_inner<K: Key, S: NorFlash>(
353 flash: &mut S,
354 flash_range: Range<u32>,
355 cache: &mut impl KeyCacheImpl<K>,
356 data_buffer: &mut [u8],
357 key: &K,
358 item: &dyn Value<'_>,
359) -> Result<(), Error<S::Error>> {
360 assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
361 assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);
362
363 assert!(flash_range.end - flash_range.start >= S::ERASE_SIZE as u32 * 2);
364
365 assert!(S::ERASE_SIZE >= S::WORD_SIZE * 3);
366 assert!(S::WORD_SIZE <= MAX_WORD_SIZE);
367
368 if cache.is_dirty() {
369 cache.invalidate_cache_state();
370 }
371
372 let mut recursion_level = 0;
373 loop {
374 if recursion_level == get_pages::<S>(flash_range.clone(), 0).count() {
376 cache.unmark_dirty();
377 return Err(Error::FullStorage);
378 }
379
380 let next_page_to_use = if let Some(partial_open_page) =
382 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
383 {
384 if !get_page_state(
386 flash,
387 flash_range.clone(),
388 cache,
389 next_page::<S>(flash_range.clone(), partial_open_page),
390 )
391 .await?
392 .is_open()
393 {
394 return Err(Error::Corrupted {
399 #[cfg(feature = "_test")]
400 backtrace: std::backtrace::Backtrace::capture(),
401 });
402 }
403
404 let page_data_start_address =
407 calculate_page_address::<S>(flash_range.clone(), partial_open_page)
408 + S::WORD_SIZE as u32;
409 let page_data_end_address =
410 calculate_page_end_address::<S>(flash_range.clone(), partial_open_page)
411 - S::WORD_SIZE as u32;
412
413 let key_len = key.serialize_into(data_buffer)?;
414 let item_data_length = key_len
415 + item
416 .serialize_into(&mut data_buffer[key_len..])
417 .map_err(Error::SerializationError)?;
418
419 if item_data_length > u16::MAX as usize
420 || item_data_length
421 > calculate_page_size::<S>()
422 .saturating_sub(ItemHeader::data_address::<S>(0) as usize)
423 {
424 cache.unmark_dirty();
425 return Err(Error::ItemTooBig);
426 }
427
428 let free_spot_address = find_next_free_item_spot(
429 flash,
430 flash_range.clone(),
431 cache,
432 page_data_start_address,
433 page_data_end_address,
434 item_data_length as u32,
435 )
436 .await?;
437
438 match free_spot_address {
439 Some(free_spot_address) => {
440 cache.notice_key_location(key, free_spot_address, true);
441 Item::write_new(
442 flash,
443 flash_range.clone(),
444 cache,
445 free_spot_address,
446 &data_buffer[..item_data_length],
447 )
448 .await?;
449
450 cache.unmark_dirty();
451 return Ok(());
452 }
453 None => {
454 close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
456 Some(next_page::<S>(flash_range.clone(), partial_open_page))
457 }
458 }
459 } else {
460 None
461 };
462
463 match next_page_to_use {
469 Some(next_page_to_use) => {
470 let next_page_state =
471 get_page_state(flash, flash_range.clone(), cache, next_page_to_use).await?;
472
473 if !next_page_state.is_open() {
474 return Err(Error::Corrupted {
476 #[cfg(feature = "_test")]
477 backtrace: std::backtrace::Backtrace::capture(),
478 });
479 }
480
481 partial_close_page(flash, flash_range.clone(), cache, next_page_to_use).await?;
485
486 let next_buffer_page = next_page::<S>(flash_range.clone(), next_page_to_use);
487 let next_buffer_page_state =
488 get_page_state(flash, flash_range.clone(), cache, next_buffer_page).await?;
489
490 if !next_buffer_page_state.is_open() {
491 migrate_items::<K, _>(
492 flash,
493 flash_range.clone(),
494 cache,
495 data_buffer,
496 next_buffer_page,
497 next_page_to_use,
498 )
499 .await?;
500 }
501 }
502 None => {
503 let first_open_page =
505 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
506 .await?
507 {
508 Some(first_open_page) => first_open_page,
509 None => {
510 return Err(Error::Corrupted {
514 #[cfg(feature = "_test")]
515 backtrace: std::backtrace::Backtrace::capture(),
516 });
517 }
518 };
519
520 partial_close_page(flash, flash_range.clone(), cache, first_open_page).await?;
521 }
522 }
523
524 recursion_level += 1;
526 }
527}
528
529pub async fn remove_item<K: Key, S: MultiwriteNorFlash>(
551 flash: &mut S,
552 flash_range: Range<u32>,
553 cache: &mut impl KeyCacheImpl<K>,
554 data_buffer: &mut [u8],
555 search_key: &K,
556) -> Result<(), Error<S::Error>> {
557 run_with_auto_repair!(
558 function = remove_item_inner::<K, _>(
559 flash,
560 flash_range.clone(),
561 cache,
562 data_buffer,
563 Some(search_key)
564 )
565 .await,
566 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
567 )
568}
569
570pub async fn remove_all_items<K: Key, S: MultiwriteNorFlash>(
588 flash: &mut S,
589 flash_range: Range<u32>,
590 cache: &mut impl KeyCacheImpl<K>,
591 data_buffer: &mut [u8],
592) -> Result<(), Error<S::Error>> {
593 run_with_auto_repair!(
594 function =
595 remove_item_inner::<K, _>(flash, flash_range.clone(), cache, data_buffer, None).await,
596 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
597 )
598}
599
600async fn remove_item_inner<K: Key, S: MultiwriteNorFlash>(
602 flash: &mut S,
603 flash_range: Range<u32>,
604 cache: &mut impl KeyCacheImpl<K>,
605 data_buffer: &mut [u8],
606 search_key: Option<&K>,
607) -> Result<(), Error<S::Error>> {
608 if let Some(key) = &search_key {
609 cache.notice_key_erased(key);
610 } else {
611 cache.invalidate_cache_state();
612 }
613
614 let last_used_page =
618 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
619 .await?
620 .unwrap_or_default();
621
622 for page_index in get_pages::<S>(
624 flash_range.clone(),
625 next_page::<S>(flash_range.clone(), last_used_page),
626 ) {
627 if get_page_state(flash, flash_range.clone(), cache, page_index)
628 .await?
629 .is_open()
630 {
631 continue;
633 }
634
635 let page_data_start_address =
636 calculate_page_address::<S>(flash_range.clone(), page_index) + S::WORD_SIZE as u32;
637 let page_data_end_address =
638 calculate_page_end_address::<S>(flash_range.clone(), page_index) - S::WORD_SIZE as u32;
639
640 let mut item_headers = ItemHeaderIter::new(page_data_start_address, page_data_end_address);
642
643 while let (Some(item_header), item_address) = item_headers.next(flash).await? {
644 let item = item_header
645 .read_item(flash, data_buffer, item_address, page_data_end_address)
646 .await?;
647
648 match item {
649 item::MaybeItem::Corrupted(_, _) => continue,
650 item::MaybeItem::Erased(_, _) => continue,
651 item::MaybeItem::Present(item) => {
652 let item_match = match search_key {
653 Some(search_key) => K::deserialize_from(item.data())?.0 == *search_key,
654 _ => true,
655 };
656 if item_match {
659 item.header
660 .erase_data(flash, flash_range.clone(), cache, item_address)
661 .await?;
662 }
663 }
664 }
665 }
666 }
667
668 cache.unmark_dirty();
670
671 Ok(())
672}
673
674pub struct MapItemIter<'d, 'c, K: Key, S: NorFlash, CI: CacheImpl> {
726 flash: &'d mut S,
727 flash_range: Range<u32>,
728 first_page: usize,
729 cache: &'c mut CI,
730 current_page_index: usize,
731 pub(crate) current_iter: ItemIter,
732 _key: PhantomData<K>,
733}
734
735impl<K: Key, S: NorFlash, CI: CacheImpl> MapItemIter<'_, '_, K, S, CI> {
736 pub async fn next<'a, V: Value<'a>>(
738 &mut self,
739 data_buffer: &'a mut [u8],
740 ) -> Result<Option<(K, V)>, Error<S::Error>> {
741 let item = loop {
743 if let Some((item, _address)) = self.current_iter.next(self.flash, data_buffer).await? {
744 break item;
746 }
747
748 loop {
751 self.current_page_index =
752 next_page::<S>(self.flash_range.clone(), self.current_page_index);
753
754 if self.current_page_index == self.first_page {
756 return Ok(None);
757 }
758
759 match get_page_state::<S>(
760 self.flash,
761 self.flash_range.clone(),
762 self.cache,
763 self.current_page_index,
764 )
765 .await
766 {
767 Ok(PageState::Closed) | Ok(PageState::PartialOpen) => {
768 self.current_iter = ItemIter::new(
769 calculate_page_address::<S>(
770 self.flash_range.clone(),
771 self.current_page_index,
772 ) + S::WORD_SIZE as u32,
773 calculate_page_end_address::<S>(
774 self.flash_range.clone(),
775 self.current_page_index,
776 ) - S::WORD_SIZE as u32,
777 );
778 break;
779 }
780 _ => continue,
781 }
782 }
783 };
784
785 let data_len = item.header.length as usize;
786 let (key, key_len) = K::deserialize_from(item.data())?;
787 let (value, _value_len) = V::deserialize_from(&data_buffer[..data_len][key_len..])
788 .map_err(Error::SerializationError)?;
789
790 Ok(Some((key, value)))
791 }
792}
793
794pub async fn fetch_all_items<'d, 'c, K: Key, S: NorFlash, CI: KeyCacheImpl<K>>(
859 flash: &'d mut S,
860 flash_range: Range<u32>,
861 cache: &'c mut CI,
862 data_buffer: &mut [u8],
863) -> Result<MapItemIter<'d, 'c, K, S, CI>, Error<S::Error>> {
864 let first_page = run_with_auto_repair!(
867 function = {
868 match find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen)
869 .await?
870 {
871 Some(last_used_page) => {
872 Ok(next_page::<S>(flash_range.clone(), last_used_page))
874 }
875 None => {
876 if let Some(first_open_page) =
879 find_first_page(flash, flash_range.clone(), cache, 0, PageState::Open)
880 .await?
881 {
882 let previous_page =
883 previous_page::<S>(flash_range.clone(), first_open_page);
884 if get_page_state(flash, flash_range.clone(), cache, previous_page)
885 .await?
886 .is_closed()
887 {
888 Ok(first_open_page)
890 } else {
891 cache.unmark_dirty();
894 Ok(0)
895 }
896 } else {
897 Err(Error::Corrupted {
901 #[cfg(feature = "_test")]
902 backtrace: std::backtrace::Backtrace::capture(),
903 })
904 }
905 }
906 }
907 },
908 repair = try_repair::<K, _>(flash, flash_range.clone(), cache, data_buffer).await?
909 )?;
910
911 Ok(MapItemIter {
912 flash,
913 flash_range: flash_range.clone(),
914 first_page,
915 cache,
916 current_page_index: first_page,
917 current_iter: ItemIter::new(
918 calculate_page_address::<S>(flash_range.clone(), first_page) + S::WORD_SIZE as u32,
919 calculate_page_end_address::<S>(flash_range.clone(), first_page) - S::WORD_SIZE as u32,
920 ),
921 _key: PhantomData,
922 })
923}
924
925pub trait Key: Eq + Clone + Sized {
934 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
937 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError>;
940 fn get_len(buffer: &[u8]) -> Result<usize, SerializationError> {
943 Self::deserialize_from(buffer).map(|(_, len)| len)
944 }
945}
946
947macro_rules! impl_key_num {
948 ($int:ty) => {
949 impl Key for $int {
950 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
951 let len = size_of::<Self>();
952 if buffer.len() < len {
953 return Err(SerializationError::BufferTooSmall);
954 }
955 buffer[..len].copy_from_slice(&self.to_le_bytes());
956 Ok(len)
957 }
958
959 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
960 let len = size_of::<Self>();
961 if buffer.len() < len {
962 return Err(SerializationError::BufferTooSmall);
963 }
964
965 Ok((
966 Self::from_le_bytes(buffer[..len].try_into().unwrap()),
967 size_of::<Self>(),
968 ))
969 }
970
971 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
972 Ok(size_of::<Self>())
973 }
974 }
975 };
976}
977
978impl_key_num!(u8);
979impl_key_num!(u16);
980impl_key_num!(u32);
981impl_key_num!(u64);
982impl_key_num!(u128);
983impl_key_num!(i8);
984impl_key_num!(i16);
985impl_key_num!(i32);
986impl_key_num!(i64);
987impl_key_num!(i128);
988
989impl<const N: usize> Key for [u8; N] {
990 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
991 if buffer.len() < N {
992 return Err(SerializationError::BufferTooSmall);
993 }
994 buffer[..N].copy_from_slice(self);
995 Ok(N)
996 }
997
998 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
999 if buffer.len() < N {
1000 return Err(SerializationError::BufferTooSmall);
1001 }
1002
1003 Ok((buffer[..N].try_into().unwrap(), N))
1004 }
1005
1006 fn get_len(_buffer: &[u8]) -> Result<usize, SerializationError> {
1007 Ok(N)
1008 }
1009}
1010
1011impl Key for () {
1012 fn serialize_into(&self, _buffer: &mut [u8]) -> Result<usize, SerializationError> {
1013 Ok(0)
1014 }
1015
1016 fn deserialize_from(_buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1017 Ok(((), 0))
1018 }
1019}
1020
1021pub trait Value<'a> {
1026 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError>;
1029 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1038 where
1039 Self: Sized;
1040}
1041
1042impl<'a> Value<'a> for bool {
1043 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1044 <u8 as Value>::serialize_into(&(*self as u8), buffer)
1045 }
1046
1047 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1048 where
1049 Self: Sized,
1050 {
1051 let (value, size) = <u8 as Value>::deserialize_from(buffer)?;
1052 Ok((value != 0, size))
1053 }
1054}
1055
1056impl<'a, T: Value<'a>> Value<'a> for Option<T> {
1057 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1058 if let Some(val) = self {
1059 let mut size = 0;
1060 size += <bool as Value>::serialize_into(&true, buffer)?;
1061 size += <T as Value>::serialize_into(val, &mut buffer[size..])?;
1062 Ok(size)
1063 } else {
1064 <bool as Value>::serialize_into(&false, buffer)
1065 }
1066 }
1067
1068 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1069 where
1070 Self: Sized,
1071 {
1072 let (is_some, tag_size) = <bool as Value>::deserialize_from(buffer)?;
1073 if is_some {
1074 let (value, value_size) = <T as Value>::deserialize_from(&buffer[tag_size..])?;
1075 Ok((Some(value), tag_size + value_size))
1076 } else {
1077 Ok((None, tag_size))
1078 }
1079 }
1080}
1081
1082impl<'a> Value<'a> for &'a [u8] {
1083 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1084 if buffer.len() < self.len() {
1085 return Err(SerializationError::BufferTooSmall);
1086 }
1087
1088 buffer[..self.len()].copy_from_slice(self);
1089 Ok(self.len())
1090 }
1091
1092 fn deserialize_from(buffer: &'a [u8]) -> Result<(Self, usize), SerializationError>
1093 where
1094 Self: Sized,
1095 {
1096 Ok((buffer, buffer.len()))
1097 }
1098}
1099
1100macro_rules! impl_map_item_num {
1101 ($num:ty) => {
1102 impl<'a> Value<'a> for $num {
1103 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1104 let size = size_of::<Self>();
1105 if buffer.len() < size {
1106 Err(SerializationError::BufferTooSmall)
1107 } else {
1108 buffer[..size].copy_from_slice(&self.to_le_bytes());
1109 Ok(size)
1110 }
1111 }
1112
1113 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1114 let size = size_of::<Self>();
1115 let value = Self::from_le_bytes(
1116 buffer[..size]
1117 .try_into()
1118 .map_err(|_| SerializationError::BufferTooSmall)?,
1119 );
1120 Ok((value, size))
1121 }
1122 }
1123
1124 impl<'a, const N: usize> Value<'a> for [$num; N] {
1126 fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, SerializationError> {
1127 let elem_size = size_of::<$num>();
1128 if buffer.len() < elem_size * N {
1129 return Err(SerializationError::BufferTooSmall);
1130 }
1131 for i in 0..N {
1132 buffer[i * elem_size..][..elem_size].copy_from_slice(&self[i].to_le_bytes())
1133 }
1134 Ok(elem_size * N)
1135 }
1136
1137 fn deserialize_from(buffer: &[u8]) -> Result<(Self, usize), SerializationError> {
1138 let elem_size = size_of::<$num>();
1139 if buffer.len() < elem_size * N {
1140 return Err(SerializationError::BufferTooSmall);
1141 }
1142
1143 let mut array = [0 as $num; N];
1144 for i in 0..N {
1145 array[i] = <$num>::from_le_bytes(
1146 buffer[i * elem_size..][..elem_size].try_into().unwrap(),
1147 )
1148 }
1149 Ok((array, elem_size * N))
1150 }
1151 }
1152 };
1153}
1154
1155impl_map_item_num!(u8);
1156impl_map_item_num!(u16);
1157impl_map_item_num!(u32);
1158impl_map_item_num!(u64);
1159impl_map_item_num!(u128);
1160impl_map_item_num!(i8);
1161impl_map_item_num!(i16);
1162impl_map_item_num!(i32);
1163impl_map_item_num!(i64);
1164impl_map_item_num!(i128);
1165impl_map_item_num!(f32);
1166impl_map_item_num!(f64);
1167
1168#[non_exhaustive]
1172#[derive(Debug, PartialEq, Eq, Clone)]
1173#[cfg_attr(feature = "defmt", derive(defmt::Format))]
1174pub enum SerializationError {
1175 BufferTooSmall,
1177 InvalidData,
1179 InvalidFormat,
1181 Custom(i32),
1184}
1185
1186impl core::fmt::Display for SerializationError {
1187 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1188 match self {
1189 SerializationError::BufferTooSmall => write!(f, "Buffer too small"),
1190 SerializationError::InvalidData => write!(f, "Invalid data"),
1191 SerializationError::InvalidFormat => write!(f, "Invalid format"),
1192 SerializationError::Custom(val) => write!(f, "Custom error: {val}"),
1193 }
1194 }
1195}
1196
1197async fn migrate_items<K: Key, S: NorFlash>(
1198 flash: &mut S,
1199 flash_range: Range<u32>,
1200 cache: &mut impl PrivateKeyCacheImpl<K>,
1201 data_buffer: &mut [u8],
1202 source_page: usize,
1203 target_page: usize,
1204) -> Result<(), Error<S::Error>> {
1205 let mut next_page_write_address =
1209 calculate_page_address::<S>(flash_range.clone(), target_page) + S::WORD_SIZE as u32;
1210
1211 let mut it = ItemIter::new(
1212 calculate_page_address::<S>(flash_range.clone(), source_page) + S::WORD_SIZE as u32,
1213 calculate_page_end_address::<S>(flash_range.clone(), source_page) - S::WORD_SIZE as u32,
1214 );
1215 while let Some((item, item_address)) = it.next(flash, data_buffer).await? {
1216 let (key, _) = K::deserialize_from(item.data())?;
1217 let (_, data_buffer) = item.destruct();
1218
1219 cache.unmark_dirty();
1221
1222 let Some((found_item, found_address, _)) =
1224 fetch_item_with_location::<K, S>(flash, flash_range.clone(), cache, data_buffer, &key)
1225 .await?
1226 else {
1227 return Err(Error::Corrupted {
1229 #[cfg(feature = "_test")]
1230 backtrace: std::backtrace::Backtrace::capture(),
1231 });
1232 };
1233
1234 let found_item = found_item.reborrow(data_buffer);
1235
1236 if found_address == item_address {
1237 cache.notice_key_location(&key, next_page_write_address, true);
1238 found_item
1239 .write(flash, flash_range.clone(), cache, next_page_write_address)
1240 .await?;
1241 next_page_write_address = found_item
1242 .header
1243 .next_item_address::<S>(next_page_write_address);
1244 }
1245 }
1246
1247 open_page(flash, flash_range.clone(), cache, source_page).await?;
1248
1249 Ok(())
1250}
1251
1252async fn try_repair<K: Key, S: NorFlash>(
1263 flash: &mut S,
1264 flash_range: Range<u32>,
1265 cache: &mut impl KeyCacheImpl<K>,
1266 data_buffer: &mut [u8],
1267) -> Result<(), Error<S::Error>> {
1268 cache.invalidate_cache_state();
1269
1270 crate::try_general_repair(flash, flash_range.clone(), cache).await?;
1271
1272 if let Some(partial_open_page) =
1274 find_first_page(flash, flash_range.clone(), cache, 0, PageState::PartialOpen).await?
1275 {
1276 let buffer_page = next_page::<S>(flash_range.clone(), partial_open_page);
1277 if !get_page_state(flash, flash_range.clone(), cache, buffer_page)
1278 .await?
1279 .is_open()
1280 {
1281 open_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1284
1285 partial_close_page(flash, flash_range.clone(), cache, partial_open_page).await?;
1287
1288 migrate_items::<K, _>(
1289 flash,
1290 flash_range.clone(),
1291 cache,
1292 data_buffer,
1293 buffer_page,
1294 partial_open_page,
1295 )
1296 .await?;
1297 }
1298 }
1299
1300 Ok(())
1301}
1302
1303#[cfg(test)]
1304mod tests {
1305 use super::*;
1306 use futures_test::test;
1307
1308 type MockFlashBig = mock_flash::MockFlashBase<4, 4, 256>;
1309 type MockFlashTiny = mock_flash::MockFlashBase<2, 1, 32>;
1310
1311 #[test]
1312 async fn store_and_fetch() {
1313 let mut flash = MockFlashBig::default();
1314 let flash_range = 0x000..0x1000;
1315
1316 let mut data_buffer = AlignedBuf([0; 128]);
1317
1318 let start_snapshot = flash.stats_snapshot();
1319
1320 let item = fetch_item::<u8, &[u8], _>(
1321 &mut flash,
1322 flash_range.clone(),
1323 &mut cache::NoCache::new(),
1324 &mut data_buffer,
1325 &0,
1326 )
1327 .await
1328 .unwrap();
1329 assert_eq!(item, None);
1330
1331 let item = fetch_item::<u8, &[u8], _>(
1332 &mut flash,
1333 flash_range.clone(),
1334 &mut cache::NoCache::new(),
1335 &mut data_buffer,
1336 &60,
1337 )
1338 .await
1339 .unwrap();
1340 assert_eq!(item, None);
1341
1342 let item = fetch_item::<u8, &[u8], _>(
1343 &mut flash,
1344 flash_range.clone(),
1345 &mut cache::NoCache::new(),
1346 &mut data_buffer,
1347 &0xFF,
1348 )
1349 .await
1350 .unwrap();
1351 assert_eq!(item, None);
1352
1353 store_item(
1354 &mut flash,
1355 flash_range.clone(),
1356 &mut cache::NoCache::new(),
1357 &mut data_buffer,
1358 &0u8,
1359 &[5u8],
1360 )
1361 .await
1362 .unwrap();
1363 store_item(
1364 &mut flash,
1365 flash_range.clone(),
1366 &mut cache::NoCache::new(),
1367 &mut data_buffer,
1368 &0u8,
1369 &[5u8, 6],
1370 )
1371 .await
1372 .unwrap();
1373
1374 let item = fetch_item::<u8, &[u8], _>(
1375 &mut flash,
1376 flash_range.clone(),
1377 &mut cache::NoCache::new(),
1378 &mut data_buffer,
1379 &0,
1380 )
1381 .await
1382 .unwrap()
1383 .unwrap();
1384 assert_eq!(item, &[5, 6]);
1385
1386 store_item(
1387 &mut flash,
1388 flash_range.clone(),
1389 &mut cache::NoCache::new(),
1390 &mut data_buffer,
1391 &1u8,
1392 &[2u8, 2, 2, 2, 2, 2],
1393 )
1394 .await
1395 .unwrap();
1396
1397 let item = fetch_item::<u8, &[u8], _>(
1398 &mut flash,
1399 flash_range.clone(),
1400 &mut cache::NoCache::new(),
1401 &mut data_buffer,
1402 &0,
1403 )
1404 .await
1405 .unwrap()
1406 .unwrap();
1407 assert_eq!(item, &[5, 6]);
1408
1409 let item = fetch_item::<u8, &[u8], _>(
1410 &mut flash,
1411 flash_range.clone(),
1412 &mut cache::NoCache::new(),
1413 &mut data_buffer,
1414 &1,
1415 )
1416 .await
1417 .unwrap()
1418 .unwrap();
1419 assert_eq!(item, &[2, 2, 2, 2, 2, 2]);
1420
1421 for index in 0..4000 {
1422 store_item(
1423 &mut flash,
1424 flash_range.clone(),
1425 &mut cache::NoCache::new(),
1426 &mut data_buffer,
1427 &((index % 10) as u8),
1428 &vec![(index % 10) as u8 * 2; index % 10].as_slice(),
1429 )
1430 .await
1431 .unwrap();
1432 }
1433
1434 for i in 0..10 {
1435 let item = fetch_item::<u8, &[u8], _>(
1436 &mut flash,
1437 flash_range.clone(),
1438 &mut cache::NoCache::new(),
1439 &mut data_buffer,
1440 &i,
1441 )
1442 .await
1443 .unwrap()
1444 .unwrap();
1445 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1446 }
1447
1448 for _ in 0..4000 {
1449 store_item(
1450 &mut flash,
1451 flash_range.clone(),
1452 &mut cache::NoCache::new(),
1453 &mut data_buffer,
1454 &11u8,
1455 &[0; 10],
1456 )
1457 .await
1458 .unwrap();
1459 }
1460
1461 for i in 0..10 {
1462 let item = fetch_item::<u8, &[u8], _>(
1463 &mut flash,
1464 flash_range.clone(),
1465 &mut cache::NoCache::new(),
1466 &mut data_buffer,
1467 &i,
1468 )
1469 .await
1470 .unwrap()
1471 .unwrap();
1472 assert_eq!(item, &vec![(i % 10) * 2; (i % 10) as usize]);
1473 }
1474
1475 println!("{:?}", start_snapshot.compare_to(flash.stats_snapshot()),);
1476 }
1477
1478 #[test]
1479 async fn store_too_many_items() {
1480 const UPPER_BOUND: u8 = 3;
1481
1482 let mut tiny_flash = MockFlashTiny::default();
1483 let mut data_buffer = AlignedBuf([0; 128]);
1484
1485 for i in 0..UPPER_BOUND {
1486 println!("Storing {i:?}");
1487
1488 store_item(
1489 &mut tiny_flash,
1490 0x00..0x40,
1491 &mut cache::NoCache::new(),
1492 &mut data_buffer,
1493 &i,
1494 &vec![i; i as usize].as_slice(),
1495 )
1496 .await
1497 .unwrap();
1498 }
1499
1500 assert_eq!(
1501 store_item(
1502 &mut tiny_flash,
1503 0x00..0x40,
1504 &mut cache::NoCache::new(),
1505 &mut data_buffer,
1506 &UPPER_BOUND,
1507 &vec![0; UPPER_BOUND as usize].as_slice(),
1508 )
1509 .await,
1510 Err(Error::FullStorage)
1511 );
1512
1513 for i in 0..UPPER_BOUND {
1514 let item = fetch_item::<u8, &[u8], _>(
1515 &mut tiny_flash,
1516 0x00..0x40,
1517 &mut cache::NoCache::new(),
1518 &mut data_buffer,
1519 &i,
1520 )
1521 .await
1522 .unwrap()
1523 .unwrap();
1524
1525 println!("Fetched {item:?}");
1526
1527 assert_eq!(item, vec![i; i as usize]);
1528 }
1529 }
1530
1531 #[test]
1532 async fn store_too_many_items_big() {
1533 const UPPER_BOUND: u8 = 68;
1534
1535 let mut big_flash = MockFlashBig::default();
1536 let mut data_buffer = AlignedBuf([0; 128]);
1537
1538 for i in 0..UPPER_BOUND {
1539 println!("Storing {i:?}");
1540
1541 store_item(
1542 &mut big_flash,
1543 0x0000..0x1000,
1544 &mut cache::NoCache::new(),
1545 &mut data_buffer,
1546 &i,
1547 &vec![i; i as usize].as_slice(),
1548 )
1549 .await
1550 .unwrap();
1551 }
1552
1553 assert_eq!(
1554 store_item(
1555 &mut big_flash,
1556 0x0000..0x1000,
1557 &mut cache::NoCache::new(),
1558 &mut data_buffer,
1559 &UPPER_BOUND,
1560 &vec![0; UPPER_BOUND as usize].as_slice(),
1561 )
1562 .await,
1563 Err(Error::FullStorage)
1564 );
1565
1566 for i in 0..UPPER_BOUND {
1567 let item = fetch_item::<u8, &[u8], _>(
1568 &mut big_flash,
1569 0x0000..0x1000,
1570 &mut cache::NoCache::new(),
1571 &mut data_buffer,
1572 &i,
1573 )
1574 .await
1575 .unwrap()
1576 .unwrap();
1577
1578 println!("Fetched {item:?}");
1579
1580 assert_eq!(item, vec![i; i as usize]);
1581 }
1582 }
1583
1584 #[test]
1585 async fn store_many_items_big() {
1586 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::default();
1587 let mut data_buffer = AlignedBuf([0; 128]);
1588
1589 const LENGHT_PER_KEY: [usize; 24] = [
1590 11, 13, 6, 13, 13, 10, 2, 3, 5, 36, 1, 65, 4, 6, 1, 15, 10, 7, 3, 15, 9, 3, 4, 5,
1591 ];
1592
1593 for _ in 0..100 {
1594 #[allow(clippy::needless_range_loop)]
1595 for i in 0..24 {
1596 store_item(
1597 &mut flash,
1598 0x0000..0x4000,
1599 &mut cache::NoCache::new(),
1600 &mut data_buffer,
1601 &(i as u16),
1602 &vec![i as u8; LENGHT_PER_KEY[i]].as_slice(),
1603 )
1604 .await
1605 .unwrap();
1606 }
1607 }
1608
1609 #[allow(clippy::needless_range_loop)]
1610 for i in 0..24 {
1611 let item = fetch_item::<u16, &[u8], _>(
1612 &mut flash,
1613 0x0000..0x4000,
1614 &mut cache::NoCache::new(),
1615 &mut data_buffer,
1616 &(i as u16),
1617 )
1618 .await
1619 .unwrap()
1620 .unwrap();
1621
1622 println!("Fetched {item:?}");
1623
1624 assert_eq!(item, vec![i as u8; LENGHT_PER_KEY[i]]);
1625 }
1626 }
1627
1628 #[test]
1629 async fn remove_items() {
1630 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1631 mock_flash::WriteCountCheck::Twice,
1632 None,
1633 true,
1634 );
1635 let mut data_buffer = AlignedBuf([0; 128]);
1636 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1637
1638 for j in 0..10 {
1640 for i in 0..24 {
1641 store_item(
1642 &mut flash,
1643 FLASH_RANGE,
1644 &mut cache::NoCache::new(),
1645 &mut data_buffer,
1646 &(i as u8),
1647 &vec![i as u8; j + 2].as_slice(),
1648 )
1649 .await
1650 .unwrap();
1651 }
1652 }
1653
1654 for j in (0..24).rev() {
1655 for i in 0..=j {
1657 assert!(
1658 fetch_item::<u8, &[u8], _>(
1659 &mut flash,
1660 FLASH_RANGE,
1661 &mut cache::NoCache::new(),
1662 &mut data_buffer,
1663 &i
1664 )
1665 .await
1666 .unwrap()
1667 .is_some()
1668 );
1669 }
1670
1671 remove_item::<u8, _>(
1673 &mut flash,
1674 FLASH_RANGE,
1675 &mut cache::NoCache::new(),
1676 &mut data_buffer,
1677 &j,
1678 )
1679 .await
1680 .unwrap();
1681
1682 for i in 0..j {
1684 assert!(
1685 fetch_item::<u8, &[u8], _>(
1686 &mut flash,
1687 FLASH_RANGE,
1688 &mut cache::NoCache::new(),
1689 &mut data_buffer,
1690 &i
1691 )
1692 .await
1693 .unwrap()
1694 .is_some()
1695 );
1696 }
1697
1698 assert!(
1699 fetch_item::<u8, &[u8], _>(
1700 &mut flash,
1701 FLASH_RANGE,
1702 &mut cache::NoCache::new(),
1703 &mut data_buffer,
1704 &j
1705 )
1706 .await
1707 .unwrap()
1708 .is_none()
1709 );
1710 }
1711 }
1712
1713 #[test]
1714 async fn remove_all() {
1715 let mut flash = mock_flash::MockFlashBase::<4, 1, 4096>::new(
1716 mock_flash::WriteCountCheck::Twice,
1717 None,
1718 true,
1719 );
1720 let mut data_buffer = AlignedBuf([0; 128]);
1721 const FLASH_RANGE: Range<u32> = 0x0000..0x4000;
1722
1723 for value in 0..10 {
1725 for key in 0..24u8 {
1726 store_item(
1727 &mut flash,
1728 FLASH_RANGE,
1729 &mut cache::NoCache::new(),
1730 &mut data_buffer,
1731 &key,
1732 &vec![key; value + 2].as_slice(),
1733 )
1734 .await
1735 .unwrap();
1736 }
1737 }
1738
1739 for key in 0..24u8 {
1741 assert!(
1742 fetch_item::<u8, &[u8], _>(
1743 &mut flash,
1744 FLASH_RANGE,
1745 &mut cache::NoCache::new(),
1746 &mut data_buffer,
1747 &key
1748 )
1749 .await
1750 .unwrap()
1751 .is_some()
1752 );
1753 }
1754
1755 remove_all_items::<u8, _>(
1757 &mut flash,
1758 FLASH_RANGE,
1759 &mut cache::NoCache::new(),
1760 &mut data_buffer,
1761 )
1762 .await
1763 .unwrap();
1764
1765 for key in 0..24 {
1767 assert!(
1768 fetch_item::<u8, &[u8], _>(
1769 &mut flash,
1770 FLASH_RANGE,
1771 &mut cache::NoCache::new(),
1772 &mut data_buffer,
1773 &key
1774 )
1775 .await
1776 .unwrap()
1777 .is_none()
1778 );
1779 }
1780 }
1781
1782 #[test]
1783 async fn store_too_big_item() {
1784 let mut flash = MockFlashBig::new(mock_flash::WriteCountCheck::Twice, None, true);
1785 const FLASH_RANGE: Range<u32> = 0x000..0x1000;
1786
1787 store_item(
1788 &mut flash,
1789 FLASH_RANGE,
1790 &mut cache::NoCache::new(),
1791 &mut [0; 1024],
1792 &0u8,
1793 &[0u8; 1024 - 4 * 2 - 8 - 1],
1794 )
1795 .await
1796 .unwrap();
1797
1798 assert_eq!(
1799 store_item(
1800 &mut flash,
1801 FLASH_RANGE,
1802 &mut cache::NoCache::new(),
1803 &mut [0; 1024],
1804 &0u8,
1805 &[0u8; 1024 - 4 * 2 - 8 - 1 + 1],
1806 )
1807 .await,
1808 Err(Error::ItemTooBig)
1809 );
1810 }
1811
1812 #[test]
1813 async fn item_iterator() {
1814 const UPPER_BOUND: u8 = 64;
1815 let mut flash = MockFlashBig::default();
1816 let flash_range = 0x000..0x1000;
1817 let mut cache = cache::NoCache::new();
1818
1819 let mut data_buffer = AlignedBuf([0; 128]);
1820
1821 for i in 0..UPPER_BOUND {
1822 store_item(
1823 &mut flash,
1824 flash_range.clone(),
1825 &mut cache,
1826 &mut data_buffer,
1827 &i,
1828 &vec![i; i as usize].as_slice(),
1829 )
1830 .await
1831 .unwrap();
1832 }
1833
1834 for i in 0..10 {
1836 store_item(
1837 &mut flash,
1838 flash_range.clone(),
1839 &mut cache,
1840 &mut data_buffer,
1841 &1u8,
1842 &vec![i; i as usize].as_slice(),
1843 )
1844 .await
1845 .unwrap();
1846 }
1847
1848 let mut map_iter = fetch_all_items::<u8, _, _>(
1849 &mut flash,
1850 flash_range.clone(),
1851 &mut cache,
1852 &mut data_buffer,
1853 )
1854 .await
1855 .unwrap();
1856
1857 let mut count = 0;
1858 let mut last_value_buffer = [0u8; 64];
1859 let mut last_value_length = 0;
1860 while let Ok(Some((key, value))) = map_iter.next::<&[u8]>(&mut data_buffer).await {
1861 if key == 1 {
1862 last_value_length = value.len();
1864 last_value_buffer[..value.len()].copy_from_slice(value);
1865 } else {
1866 assert_eq!(value, vec![key; key as usize]);
1867 count += 1;
1868 }
1869 }
1870
1871 assert_eq!(last_value_length, 9);
1872 assert_eq!(
1873 &last_value_buffer[..last_value_length],
1874 vec![9u8; 9].as_slice()
1875 );
1876
1877 assert_eq!(count + 1, UPPER_BOUND);
1879 }
1880
1881 #[test]
1882 async fn store_unit_key() {
1883 let mut flash = MockFlashBig::default();
1884 let flash_range = 0x000..0x1000;
1885
1886 let mut data_buffer = AlignedBuf([0; 128]);
1887
1888 let item = fetch_item::<(), &[u8], _>(
1889 &mut flash,
1890 flash_range.clone(),
1891 &mut cache::NoCache::new(),
1892 &mut data_buffer,
1893 &(),
1894 )
1895 .await
1896 .unwrap();
1897 assert_eq!(item, None);
1898
1899 store_item(
1900 &mut flash,
1901 flash_range.clone(),
1902 &mut cache::NoCache::new(),
1903 &mut data_buffer,
1904 &(),
1905 &[5u8],
1906 )
1907 .await
1908 .unwrap();
1909 store_item(
1910 &mut flash,
1911 flash_range.clone(),
1912 &mut cache::NoCache::new(),
1913 &mut data_buffer,
1914 &(),
1915 &[5u8, 6],
1916 )
1917 .await
1918 .unwrap();
1919
1920 let item = fetch_item::<(), &[u8], _>(
1921 &mut flash,
1922 flash_range.clone(),
1923 &mut cache::NoCache::new(),
1924 &mut data_buffer,
1925 &(),
1926 )
1927 .await
1928 .unwrap()
1929 .unwrap();
1930 assert_eq!(item, &[5, 6]);
1931 }
1932
1933 #[test]
1934 async fn option_value() {
1935 let mut buffer = [0; 2];
1936
1937 assert_eq!(Some(42u8).serialize_into(&mut buffer), Ok(2));
1938 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((Some(42u8), 2)));
1939 assert_eq!(buffer, [1, 42]);
1940
1941 let mut buffer = [0; 1];
1942
1943 assert_eq!(Option::<u8>::None.serialize_into(&mut buffer), Ok(1));
1944 assert_eq!(Option::<u8>::deserialize_from(&buffer), Ok((None, 1)));
1945 assert_eq!(buffer, [0]);
1946 }
1947
1948 #[test]
1949 async fn array_value() {
1950 let mut buffer = [0; 3];
1951 assert_eq!(Value::serialize_into(&[1u8, 2, 3], &mut buffer), Ok(3));
1952 assert_eq!(buffer, [1, 2, 3]);
1953 assert_eq!(
1954 <[u8; 3] as Value>::deserialize_from(&buffer),
1955 Ok(([1, 2, 3], 3))
1956 );
1957
1958 let mut buffer = [0; 4];
1959 assert_eq!(
1960 Value::serialize_into(&[0x1234u16, 0x5678], &mut buffer),
1961 Ok(4)
1962 );
1963 assert_eq!(buffer, [0x34, 0x12, 0x78, 0x56]);
1964 assert_eq!(
1965 <[u16; 2]>::deserialize_from(&buffer),
1966 Ok(([0x1234, 0x5678], 4))
1967 );
1968 }
1969}