recasting/lib.rs
1//! Note that there are "alloc" and "std" feature flags that can be turned off
2
3#![cfg_attr(not(feature = "std"), no_std)]
4
5use core::{
6 marker::PhantomData,
7 num::{
8 NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
9 NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
10 },
11};
12
13#[cfg(feature = "alloc")]
14extern crate alloc;
15
16/// A trait implemented for structures that have `Item`s that should be
17/// `Recast`ed by a `Recaster`.
18///
19/// `Recast` was designed for structures with their own virtual address spaces,
20/// with the `Item`s being virtual addresses. For example, arenas from the
21/// `triple_arena` crate have entries that can be referenced by `Ptr`s. The
22/// `Ptr`s are indexes that must remain stable, otherwise copies of them will
23/// become invalid. The arenas from `triple_arena` have a problem where they
24/// should be compressed before being serialized, or else unused interior
25/// capacity will be forced upon the deserialized arenas in order to keep stable
26/// `Ptr`s. If the plain `compress_and_shrink` function is used, for example,
27/// the `Ptr` indexes are translated each to a new value, but all external
28/// `Ptr`s and the `Ptr`s in the entries would remain untranslated to the new
29/// mapping.
30///
31/// ```
32/// use triple_arena::{ptr_struct, Arena, Ptr};
33/// // the `P0` `Ptr` type
34/// ptr_struct!(P0);
35///
36/// let mut a: Arena<P0, (u64, Option<P0>)> = Arena::new();
37/// let p_1 = a.insert((1, None));
38/// let p_2 = a.insert((2, None));
39/// let p_7 = a.insert((7, None));
40/// let p_42 = a.insert((42, Some(p_7)));
41/// // create some unallocated internal entries
42/// a.remove(p_1).unwrap();
43/// a.remove(p_2).unwrap();
44///
45/// assert_eq!(a.get(p_7), Some(&(7, None)));
46/// assert_eq!(a.get(p_42), Some(&(42, Some(p_7))));
47///
48/// // translate all the entries and their `Ptr` keys to new locations
49/// a.compress_and_shrink();
50///
51/// // the external copies `p_7` and `p_42` no longer work
52/// assert_eq!(a.get(p_7), None);
53/// assert_eq!(a.get(p_42), None);
54///
55/// // manually get the entry where 42 ends up
56/// let entry42: (u64, Option<P0>) = *a.vals().nth(1).unwrap();
57/// // the copy of `p_7` also was not changed even though it was inside
58/// // the arena, only the `Ptr`s by which entries are referenced has
59/// // changed, and anything inside the values is left unchanged
60/// assert_eq!(a.get(entry42.1.unwrap()), None);
61/// ```
62///
63/// We could use `compress_and_shrink_with` and correctly recast all `Ptr`s
64/// inside and outside the arena, but this quickly gets untenable as the
65/// complexity of the datastructure increases. Here is a version with `Recast`
66///
67/// ```
68/// use triple_arena::{ptr_struct, Arena, Ptr, Recast, Recaster};
69/// ptr_struct!(P0);
70///
71/// // the structs from `ptr_struct` automatically have `Recast<Self>`
72/// // implemented, which looks like:
73/// //
74/// //impl Recast<Self> for P0 {
75/// // #[inline]
76/// // fn recast<R: Recaster<Item = P0>>(&mut self, recaster: &R)
77/// // -> Result<(), <R as Recaster>::Item> {
78/// // recaster.recast_item(self)
79/// // }
80/// //}
81///
82/// impl Recast<P0> for (u64, Option<P0>) {
83/// fn recast<R: Recaster<Item = P0>>(
84/// &mut self,
85/// recaster: &R,
86/// ) -> Result<(), <R as Recaster>::Item> {
87/// // this calls the impl of `Recast` for `Option<T>`, which calls
88/// // `Recast<Self> for P0`
89/// self.1.recast(recaster)?;
90/// // `self.0` does not have any `P0`s inside of it, but if there
91/// // are any fields with `P0`s they should also have `recast`
92/// // called
93/// Ok(())
94/// }
95/// }
96///
97/// let mut a: Arena<P0, (u64, Option<P0>)> = Arena::new();
98/// let p_1 = a.insert((1, None));
99/// let p_2 = a.insert((2, None));
100/// let mut p_7 = a.insert((7, None));
101/// let mut p_42 = a.insert((42, Some(p_7)));
102/// // create some unallocated internal entries
103/// a.remove(p_1).unwrap();
104/// a.remove(p_2).unwrap();
105///
106/// assert_eq!(a.get(p_7), Some(&(7, None)));
107/// assert_eq!(a.get(p_42), Some(&(42, Some(p_7))));
108///
109/// let old_p_7 = p_7;
110///
111/// // translate all the entries and their `Ptr` keys to new locations,
112/// // this time with `compress_and_shrink_recaster` which returns an
113/// // `Arena<P0, P0>` that implements `Recaster`. This recaster maps
114/// // old `Ptr`s to their new indexes.
115/// let recaster = a.compress_and_shrink_recaster();
116///
117/// p_7.recast(&recaster).unwrap();
118/// p_42.recast(&recaster).unwrap();
119///
120/// // now `p_7` and `p_42` work again, however note that we forgot
121/// // to recast the arena itself, this is very important to remember
122/// // if the entries in the arena or whatever map we are using has
123/// // `Item`s in it
124/// assert_eq!(a.get(p_7), Some(&(7, None)));
125/// assert_eq!(a.get(p_42), Some(&(42, Some(old_p_7))));
126///
127/// a.recast(&recaster).unwrap();
128///
129/// assert_eq!(a.get(p_42), Some(&(42, Some(p_7))));
130/// ```
131///
132/// Now for a more advanced example with multiple `Item` types
133///
134/// ```
135/// use triple_arena::{Arena, Ptr, ptr_struct, Recaster, Recast};
136///
137/// // Multiple `Ptr` types. Usually, there needs to be one `Item`
138/// // type per address space that can be recast.
139/// ptr_struct!(P0; Q1);
140///
141/// // An example user structure. Preferably, these things should be
142/// // designed so that all the `Ptr`s are self contained, which
143/// // means that only one top level recast call needs to be made
144/// // to map them all.
145/// #[derive(Debug, PartialEq, Eq)]
146/// struct Entry(u64, Vec<P0>, Q1);
147/// struct Structure {
148/// p0_arena: Arena<P0, Entry>,
149/// // some `P0`s that are stored externally to the arena
150/// external: Vec<P0>,
151/// // An arena that is keyed by `Q1` instead of `P0`. If there are
152/// // multiple arenas, `HashMap`s, `BTreeMap`s, or any kind of
153/// // virtual-address-space capable types within the same structure,
154/// // they should each have their own wrapper `Item`s implementing
155/// // `Recast<Self>`. If for some reason there is a need for address
156/// // space duplication, e.g. a `HashSet<P0>` companion to a
157/// // `Arena<P0, ...>`, then the hash set needs to be emptied into a
158/// // new one, with each `P0` recast with the recaster for the
159/// // principle address space.
160/// q1_arena: Arena<Q1, i32>,
161/// }
162///
163/// impl Recast<P0> for Entry {
164/// fn recast<R: Recaster<Item = P0>>(&mut self, recaster: &R)
165/// -> Result<(), <R as Recaster>::Item> {
166/// // we can do this because of the `impl` of `Recast` for `Vec`, which
167/// // recasts all the entries
168/// self.1.recast(recaster)?;
169/// // the `.0` and `.1` fields are left untouched, since they
170/// // do not have `P0` values that would need to be recast
171/// Ok(())
172/// }
173/// }
174///
175/// // Because we used an external associated type for the `Recast` trait,
176/// // we can `impl` multiple times to be able to recast the same struct
177/// // with recasters of different `Item`s.
178/// impl Recast<Q1> for Entry {
179/// fn recast<R: Recaster<Item = Q1>>(&mut self, recaster: &R)
180/// -> Result<(), <R as Recaster>::Item> {
181/// // recast the `Q1`
182/// self.2.recast(recaster)?;
183/// Ok(())
184/// }
185/// }
186///
187/// impl Recast<P0> for Structure {
188/// fn recast<R: Recaster<Item = P0>>(&mut self, recaster: &R)
189/// -> Result<(), <R as Recaster>::Item> {
190/// self.p0_arena.recast(recaster)?;
191/// // `external` has some `P0`s we need to recast so that they
192/// // agree with elsewhere in the structure.
193/// self.external.recast(recaster)?;
194/// Ok(())
195/// }
196/// }
197///
198/// impl Recast<Q1> for Structure {
199/// fn recast<R: Recaster<Item = Q1>>(&mut self, recaster: &R)
200/// -> Result<(), <R as Recaster>::Item> {
201/// // the `Entry`s have `Q1` in them, this will delegate to the
202/// // `Recast<Q1> for Entry` impl
203/// self.p0_arena.recast(recaster)?;
204/// Ok(())
205/// }
206/// }
207///
208/// impl Structure {
209/// fn recast_q1(&mut self) -> Result<Arena<Q1, Q1>, Q1> {
210/// let recaster = self.q1_arena.compress_and_shrink_recaster();
211/// // Immediately afterwards, we need to recast all the values.
212/// self.recast(&recaster)?;
213/// // it is encouraged to have all the relevant `Item`s self
214/// // contained within `Structure` so that only one call is
215/// // needed at the user level, but if needed we could also
216/// // return the `recaster` for recasting things external to
217/// // the structure.
218/// Ok(recaster)
219/// }
220///
221/// fn recast_p0(&mut self) -> Result<Arena<P0, P0>, P0> {
222/// // Note an `Err` gets returned by `recast` if it encounters
223/// // a `P0` that was not contained as a key in the `p0_arena`
224/// // upon the call to acquire a recaster. If `Structure` is
225/// // using some kind of convention where invalid items could
226/// // be kept, they need to be purged by some routine here before
227/// // the `compress_and_shrink_recaster` call, or a custom
228/// // `Recaster` could be defined to be a no-op on unkown keys.
229/// let recaster = self.p0_arena.compress_and_shrink_recaster();
230/// self.recast(&recaster)?;
231/// Ok(recaster)
232/// }
233///
234/// // for preparation before serialization or whatever purpose
235/// // the recasting has
236/// fn compress_and_shrink_all(&mut self) {
237/// self.recast_q1().unwrap();
238/// self.recast_p0().unwrap();
239/// }
240/// }
241///
242/// let mut structure = Structure {
243/// p0_arena: Arena::new(),
244/// external: vec![],
245/// q1_arena: Arena::new()
246/// };
247/// // do some random inserts and removals
248/// let q1_0 = structure.q1_arena.insert(-1);
249/// let mut q1_1 = structure.q1_arena.insert(-2);
250/// structure.q1_arena.remove(q1_0).unwrap();
251/// let p0_0 = structure.p0_arena.insert(Entry(1, vec![], q1_1));
252/// let mut p0_1 = structure.p0_arena.insert(Entry(2, vec![], q1_1));
253/// let mut p0_2 = structure.p0_arena.insert(Entry(42, vec![p0_1], q1_1));
254/// structure.p0_arena.remove(p0_0).unwrap();
255///
256/// let p0_recaster = structure.recast_p0().unwrap();
257/// let q1_recaster = structure.recast_q1().unwrap();
258///
259/// // now everything within the `Structure` is recast, but note
260/// // that the items we kept outside are invalid now and would
261/// // need to be recast to be used for the new mapping
262///
263/// let old_q1_1 = q1_1;
264/// q1_1.recast(&q1_recaster).unwrap();
265///
266/// assert!(structure.q1_arena.get(old_q1_1).is_none());
267/// assert_eq!(structure.q1_arena.get(q1_1), Some(-2).as_ref());
268///
269/// let old_p0_1 = p0_1;
270/// let old_p0_2 = p0_2;
271/// p0_1.recast(&p0_recaster).unwrap();
272/// p0_2.recast(&p0_recaster).unwrap();
273///
274/// // the complicated structure relations and `Ptr` validities
275/// // are preserved despite the `Ptr`s all changing
276/// assert_eq!(
277/// *structure.p0_arena.get(p0_2).unwrap(),
278/// Entry(42, vec![p0_1], q1_1)
279/// );
280/// assert_ne!(old_p0_1, p0_1);
281/// assert_ne!(old_p0_2, p0_2);
282/// assert_ne!(old_q1_1, q1_1);
283/// ```
284pub trait Recast<Item> {
285 /// Rewrites all the `<R as Recaster>::Item`s of `self` according to the
286 /// `<recaster as Recaster>::map`.
287 ///
288 /// # Errors
289 ///
290 /// If the recaster encounters an item it does not recognize, it returns
291 /// that item. Note that the state of `self` from before the `recast` may be
292 /// unrecoverable (the structure can remain partially recast), and errors
293 /// should only be encountered if these traits were used wrongly.
294 fn recast<R: Recaster<Item = Item>>(
295 &mut self,
296 recaster: &R,
297 ) -> Result<(), <R as Recaster>::Item>;
298}
299
300// for working with `Result<(), E>` and such easier
301impl<I> Recast<I> for () {
302 #[inline]
303 fn recast<R: Recaster<Item = I>>(
304 &mut self,
305 _recaster: &R,
306 ) -> Result<(), <R as Recaster>::Item> {
307 Ok(())
308 }
309}
310
311impl<I, T> Recast<I> for PhantomData<T> {
312 #[inline]
313 fn recast<R: Recaster<Item = I>>(
314 &mut self,
315 _recaster: &R,
316 ) -> Result<(), <R as Recaster>::Item> {
317 Ok(())
318 }
319}
320
321macro_rules! impl_self_recast {
322 ($($t:ident)*) => {
323 $(
324 impl Recast<$t> for $t {
325 #[inline]
326 fn recast<R: Recaster<Item = $t>>(&mut self, recaster: &R)
327 -> Result<(), <R as Recaster>::Item> {
328 recaster.recast_item(self)
329 }
330 }
331 )*
332 };
333}
334
335impl_self_recast!(
336 usize u8 u16 u32 u64 u128 NonZeroUsize NonZeroU8 NonZeroU16 NonZeroU32 NonZeroU64 NonZeroU128
337 isize i8 i16 i32 i64 i128 NonZeroIsize NonZeroI8 NonZeroI16 NonZeroI32 NonZeroI64 NonZeroI128
338 bool char f32 f64
339);
340
341impl<I, T: Recast<I>> Recast<I> for &mut T {
342 #[inline]
343 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
344 Recast::recast(*self, recaster)?;
345 Ok(())
346 }
347}
348
349impl<I, T: Recast<I>> Recast<I> for core::cell::Cell<T> {
350 #[inline]
351 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
352 Recast::recast(self.get_mut(), recaster)?;
353 Ok(())
354 }
355}
356
357impl<I, T: Recast<I>> Recast<I> for core::cell::RefCell<T> {
358 #[inline]
359 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
360 Recast::recast(self.get_mut(), recaster)?;
361 Ok(())
362 }
363}
364
365#[cfg(feature = "alloc")]
366impl<I, T: Recast<I>> Recast<I> for alloc::boxed::Box<T> {
367 #[inline]
368 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
369 self.as_mut().recast(recaster)?;
370 Ok(())
371 }
372}
373
374#[cfg(feature = "alloc")]
375impl<I, T: Recast<I>> Recast<I> for alloc::rc::Rc<T> {
376 #[inline]
377 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
378 Recast::recast(&mut alloc::rc::Rc::get_mut(self), recaster)?;
379 Ok(())
380 }
381}
382
383// this does exist under `alloc` but not on all platforms
384#[cfg(feature = "std")]
385impl<I, T: Recast<I>> Recast<I> for std::sync::Arc<T> {
386 #[inline]
387 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
388 Recast::recast(&mut std::sync::Arc::get_mut(self), recaster)?;
389 Ok(())
390 }
391}
392
393impl<I, T: Recast<I>> Recast<I> for Option<T> {
394 #[inline]
395 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
396 if let Some(t) = self {
397 t.recast(recaster)?;
398 }
399 Ok(())
400 }
401}
402
403impl<I, T: Recast<I>, E: Recast<I>> Recast<I> for Result<T, E> {
404 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
405 match self {
406 Ok(o) => o.recast(recaster)?,
407 Err(e) => e.recast(recaster)?,
408 }
409 Ok(())
410 }
411}
412
413macro_rules! tuple_recast {
414 ($($i:tt $t:tt),+) => {
415 impl<Z, $($t: Recast<Z>,)+> Recast<Z> for ($($t,)+) {
416 #[inline]
417 fn recast<R: Recaster<Item = Z>>(&mut self, recaster: &R)
418 -> Result<(), <R as Recaster>::Item> {
419 $(
420 self.$i.recast(recaster)?;
421 )+
422 Ok(())
423 }
424 }
425 };
426}
427
428tuple_recast!(0 A);
429tuple_recast!(0 A, 1 B);
430tuple_recast!(0 A, 1 B, 2 C);
431tuple_recast!(0 A, 1 B, 2 C, 3 D);
432tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E);
433tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F);
434tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G);
435tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G, 7 H);
436tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G, 7 H, 8 I);
437tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G, 7 H, 8 I, 9 J);
438tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G, 7 H, 8 I, 9 J, 10 K);
439tuple_recast!(0 A, 1 B, 2 C, 3 D, 4 E, 5 F, 6 G, 7 H, 8 I, 9 J, 10 K, 11 L);
440
441impl<I, T: Recast<I>, const N: usize> Recast<I> for [T; N] {
442 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
443 for t in self {
444 t.recast(recaster)?;
445 }
446 Ok(())
447 }
448}
449
450impl<I, T: Recast<I>> Recast<I> for [T] {
451 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
452 for t in self {
453 t.recast(recaster)?;
454 }
455 Ok(())
456 }
457}
458
459#[cfg(feature = "alloc")]
460impl<I, T: Recast<I>> Recast<I> for alloc::vec::Vec<T> {
461 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
462 for t in self {
463 t.recast(recaster)?;
464 }
465 Ok(())
466 }
467}
468
469#[cfg(feature = "alloc")]
470impl<I, T: Recast<I>> Recast<I> for alloc::collections::VecDeque<T> {
471 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
472 for t in self {
473 t.recast(recaster)?;
474 }
475 Ok(())
476 }
477}
478
479#[cfg(feature = "alloc")]
480impl<I, T: Recast<I>> Recast<I> for alloc::collections::LinkedList<T> {
481 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
482 for t in self {
483 t.recast(recaster)?;
484 }
485 Ok(())
486 }
487}
488
489#[cfg(feature = "std")]
490impl<I, K, V: Recast<I>> Recast<I> for std::collections::HashMap<K, V> {
491 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
492 for t in self.values_mut() {
493 t.recast(recaster)?;
494 }
495 Ok(())
496 }
497}
498
499#[cfg(feature = "std")]
500impl<I, K, V: Recast<I>> Recast<I> for std::collections::BTreeMap<K, V> {
501 fn recast<R: Recaster<Item = I>>(&mut self, recaster: &R) -> Result<(), <R as Recaster>::Item> {
502 for t in self.values_mut() {
503 t.recast(recaster)?;
504 }
505 Ok(())
506 }
507}
508
509/// A trait implemented for structures that can act as a `Recaster` in
510/// `Recast`ing `Item`s.
511///
512/// Most end users will get a `Recaster` from some method on a collection or
513/// some free function, see the `Recast` documentation for that. Here is
514/// an example of actually implementing a `Recaster`.
515///
516/// ```
517/// use std::collections::HashMap;
518///
519/// use recasting::{Recast, Recaster};
520///
521/// // there is an existing impl for `HashMap`, we will use a wrapper
522/// struct MyRecaster(HashMap<i64, i64>);
523///
524/// impl Recaster for MyRecaster {
525/// type Item = i64;
526///
527/// fn recast_item(&self, item: &mut Self::Item) -> Result<(), Self::Item> {
528/// if let Some(res) = self.0.get(item) {
529/// *item = *res;
530/// Ok(())
531/// } else {
532/// // alternatively, we could make this a no-op or use
533/// // other behavior depending on our intentions
534/// Err(*item)
535/// }
536/// }
537/// }
538///
539/// struct Structure {
540/// keyed_map: HashMap<i64, String>,
541/// }
542///
543/// impl Structure {
544/// fn insert(&mut self, i: i64, s: &str) {
545/// self.keyed_map.insert(i, s.to_owned());
546/// }
547///
548/// fn get(&self, i: i64) -> Option<&str> {
549/// self.keyed_map.get(&i).map(|s| s.as_str())
550/// }
551///
552/// // some arbitrary key remapping we are choosing for an example
553/// fn sub42<F: FnMut(i64, i64)>(&mut self, mut map: F) {
554/// let mut new = HashMap::new();
555/// for (key, val) in self.keyed_map.drain() {
556/// new.insert(key - 42, val);
557/// // closure through which we can
558/// // see the old and new keys
559/// map(key, key - 42);
560/// }
561/// self.keyed_map = new;
562/// }
563///
564/// fn sub42_recaster(&mut self) -> MyRecaster {
565/// let mut map = HashMap::new();
566/// self.sub42(|old, new| {
567/// map.insert(old, new);
568/// });
569/// MyRecaster(map)
570/// }
571/// }
572///
573/// let mut structure = Structure {
574/// keyed_map: HashMap::new(),
575/// };
576///
577/// let mut keys = vec![0, 1337, -10];
578/// structure.insert(keys[0], "test");
579/// structure.insert(keys[1], "hello");
580/// structure.insert(keys[2], "world");
581///
582/// let recaster = structure.sub42_recaster();
583/// // this goes through the `Recast` impl for `Vec` and calls
584/// // `Recast<i64>` with `<MyRecaster as Recaster>::recast_item`
585/// keys.recast(&recaster).unwrap();
586///
587/// assert_eq!(&keys, &[-42, 1295, -52]);
588/// assert_eq!(structure.get(keys[0]).unwrap(), "test");
589/// assert_eq!(structure.get(keys[1]).unwrap(), "hello");
590/// assert_eq!(structure.get(keys[2]).unwrap(), "world");
591/// ```
592pub trait Recaster {
593 type Item;
594
595 /// Recasts an `item` based off of `self`. Returns an `Err` with `item` if
596 /// it could not be handled.
597 fn recast_item(&self, item: &mut Self::Item) -> Result<(), Self::Item>;
598}
599
600#[cfg(feature = "std")]
601impl<K: PartialEq + Eq + core::hash::Hash + Clone> Recaster for std::collections::HashMap<K, K> {
602 type Item = K;
603
604 fn recast_item(&self, item: &mut Self::Item) -> Result<(), Self::Item> {
605 if let Some(res) = self.get(item) {
606 *item = res.clone();
607 Ok(())
608 } else {
609 Err(item.clone())
610 }
611 }
612}
613
614#[cfg(feature = "std")]
615impl<K: std::cmp::Ord + Clone> Recaster for std::collections::BTreeMap<K, K> {
616 type Item = K;
617
618 fn recast_item(&self, item: &mut Self::Item) -> Result<(), Self::Item> {
619 if let Some(res) = self.get(item) {
620 *item = res.clone();
621 Ok(())
622 } else {
623 Err(item.clone())
624 }
625 }
626}