1use std::any::TypeId;
2use std::cell::{Cell, UnsafeCell};
3use std::fmt;
4use std::hash::{BuildHasher, Hash, Hasher};
5use std::marker::PhantomData;
6use std::num::NonZeroUsize;
7use std::path::{Path, PathBuf};
8
9use crossbeam_utils::CachePadded;
10use intrusive_collections::{intrusive_adapter, LinkedList, LinkedListLink, UnsafeRef};
11use rustc_hash::FxBuildHasher;
12
13use crate::durability::Durability;
14use crate::function::{VerifyCycleHeads, VerifyResult};
15use crate::hash::{FxHashSet, FxIndexSet};
16use crate::id::{AsId, FromId};
17use crate::ingredient::Ingredient;
18use crate::plumbing::{self, Jar, ZalsaLocal};
19use crate::revision::AtomicRevision;
20use crate::sync::{Arc, Mutex, OnceLock};
21use crate::table::memo::{MemoTable, MemoTableTypes, MemoTableWithTypesMut};
22use crate::table::Slot;
23use crate::zalsa::{IngredientIndex, JarKind, Zalsa};
24use crate::zalsa_local::QueryEdge;
25use crate::{DatabaseKeyIndex, Event, EventKind, Id, Revision};
26
27pub trait Configuration: Sized + 'static {
32 const LOCATION: crate::ingredient::Location;
33 const DEBUG_NAME: &'static str;
34
35 const PERSIST: bool;
37
38 #[cfg(test)]
40 const REVISIONS: NonZeroUsize = NonZeroUsize::new(3).unwrap();
41
42 #[cfg(not(test))] const REVISIONS: NonZeroUsize = NonZeroUsize::new(1).unwrap();
44
45 type Fields<'db>: InternedData;
47
48 type Struct<'db>: Copy + FromId + AsId;
50
51 fn heap_size(_value: &Self::Fields<'_>) -> Option<usize> {
53 None
54 }
55
56 fn serialize<S>(value: &Self::Fields<'_>, serializer: S) -> Result<S::Ok, S::Error>
60 where
61 S: plumbing::serde::Serializer;
62
63 fn deserialize<'de, D>(deserializer: D) -> Result<Self::Fields<'static>, D::Error>
67 where
68 D: plumbing::serde::Deserializer<'de>;
69}
70
71pub trait InternedData: Sized + Eq + Hash + Clone + Sync + Send {}
72impl<T: Eq + Hash + Clone + Sync + Send> InternedData for T {}
73
74pub struct JarImpl<C: Configuration> {
75 phantom: PhantomData<C>,
76}
77
78pub struct IngredientImpl<C: Configuration> {
83 ingredient_index: IngredientIndex,
85
86 hasher: FxBuildHasher,
88
89 shift: u32,
91
92 shards: Box<[CachePadded<Mutex<IngredientShard<C>>>]>,
94
95 revision_queue: RevisionQueue<C>,
97
98 memo_table_types: Arc<MemoTableTypes>,
99
100 _marker: PhantomData<fn() -> C>,
101}
102
103struct IngredientShard<C: Configuration> {
104 key_map: hashbrown::HashTable<Id>,
109
110 lru: LinkedList<ValueAdapter<C>>,
112}
113
114impl<C: Configuration> Default for IngredientShard<C> {
115 fn default() -> Self {
116 Self {
117 lru: LinkedList::default(),
118 key_map: hashbrown::HashTable::new(),
119 }
120 }
121}
122
123unsafe impl<C: Configuration> Sync for Value<C> {}
126
127intrusive_adapter!(ValueAdapter<C> = UnsafeRef<Value<C>>: Value<C> { link: LinkedListLink } where C: Configuration);
128
129pub struct Value<C>
131where
132 C: Configuration,
133{
134 shard: u16,
136
137 link: LinkedListLink,
139
140 fields: UnsafeCell<C::Fields<'static>>,
145
146 memos: UnsafeCell<MemoTable>,
151
152 shared: UnsafeCell<ValueShared>,
155}
156
157#[repr(Rust, packed)] #[derive(Clone, Copy)]
160struct ValueShared {
161 id: Id,
175
176 last_interned_at: Revision,
178
179 durability: Durability,
184}
185
186impl ValueShared {
187 fn is_reusable<C: Configuration>(&self) -> bool {
189 if C::REVISIONS == IMMORTAL {
191 return false;
192 }
193
194 self.durability == Durability::LOW
201 }
202}
203
204impl<C> Value<C>
205where
206 C: Configuration,
207{
208 #[cfg(feature = "salsa_unstable")]
210 pub fn fields(&self) -> &C::Fields<'static> {
211 unsafe { &*self.fields.get() }
215 }
216
217 #[cfg(all(not(feature = "shuttle"), feature = "salsa_unstable"))]
224 unsafe fn memory_usage(&self, memo_table_types: &MemoTableTypes) -> crate::database::SlotInfo {
225 let heap_size = C::heap_size(self.fields());
226 let memos = unsafe { &*self.memos.get() };
229 let memos = unsafe { memo_table_types.attach_memos(memos) };
231
232 crate::database::SlotInfo {
233 debug_name: C::DEBUG_NAME,
234 size_of_metadata: std::mem::size_of::<Self>() - std::mem::size_of::<C::Fields<'_>>(),
235 size_of_fields: std::mem::size_of::<C::Fields<'_>>(),
236 heap_size_of_fields: heap_size,
237 memos: memos.memory_usage(),
238 }
239 }
240}
241
242impl<C: Configuration> Default for JarImpl<C> {
243 fn default() -> Self {
244 Self {
245 phantom: PhantomData,
246 }
247 }
248}
249
250impl<C: Configuration> Jar for JarImpl<C> {
251 fn create_ingredients(
252 _zalsa: &mut Zalsa,
253 first_index: IngredientIndex,
254 ) -> Vec<Box<dyn Ingredient>> {
255 vec![Box::new(IngredientImpl::<C>::new(first_index)) as _]
256 }
257
258 fn id_struct_type_id() -> TypeId {
259 TypeId::of::<C::Struct<'static>>()
260 }
261}
262
263impl<C> IngredientImpl<C>
264where
265 C: Configuration,
266{
267 pub fn new(ingredient_index: IngredientIndex) -> Self {
268 static SHARDS: OnceLock<usize> = OnceLock::new();
269 let shards = *SHARDS.get_or_init(|| {
270 let num_cpus = std::thread::available_parallelism()
271 .map(usize::from)
272 .unwrap_or(1);
273
274 (num_cpus * 4).next_power_of_two()
275 });
276
277 Self {
278 ingredient_index,
279 hasher: FxBuildHasher,
280 memo_table_types: Arc::new(MemoTableTypes::default()),
281 revision_queue: RevisionQueue::default(),
282 shift: usize::BITS - shards.trailing_zeros(),
283 shards: (0..shards).map(|_| Default::default()).collect(),
284 _marker: PhantomData,
285 }
286 }
287
288 #[inline]
292 fn shard(&self, hash: u64) -> usize {
293 ((hash as usize) << 7) >> self.shift
295 }
296
297 unsafe fn to_internal_data<'db>(&'db self, data: C::Fields<'db>) -> C::Fields<'static> {
302 unsafe { std::mem::transmute(data) }
304 }
305
306 fn from_internal_data<'db>(data: &'db C::Fields<'static>) -> &'db C::Fields<'db> {
307 unsafe { std::mem::transmute(data) }
310 }
311
312 pub fn intern<'db, Key>(
323 &'db self,
324 zalsa: &'db Zalsa,
325 zalsa_local: &'db ZalsaLocal,
326 key: Key,
327 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
328 ) -> C::Struct<'db>
329 where
330 Key: Hash,
331 C::Fields<'db>: HashEqLike<Key>,
332 {
333 FromId::from_id(self.intern_id(zalsa, zalsa_local, key, assemble))
334 }
335
336 pub fn intern_id<'db, Key>(
347 &'db self,
348 zalsa: &'db Zalsa,
349 zalsa_local: &'db ZalsaLocal,
350 key: Key,
351 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
352 ) -> crate::Id
353 where
354 Key: Hash,
355 C::Fields<'db>: HashEqLike<Key>,
360 {
361 let current_revision = zalsa.current_revision();
363 self.revision_queue.record(current_revision);
364
365 let hash = self.hasher.hash_one(&key);
367
368 let shard_index = self.shard(hash);
369 let shard = unsafe { &mut *self.shards.get_unchecked(shard_index).lock() };
371
372 let found_value = Cell::new(None);
373 let eq = |id: &_| unsafe { Self::value_eq(*id, &key, zalsa, &found_value) };
375
376 if let Some(&id) = shard.key_map.find(hash, eq) {
378 let value = found_value
379 .get()
380 .expect("found the interned value, so `found_value` should be set");
381
382 let index = self.database_key_index(id);
383
384 let value_shared = unsafe { &mut *value.shared.get() };
386
387 if { value_shared.last_interned_at } < current_revision {
389 value_shared.last_interned_at = current_revision;
390
391 zalsa.event(&|| {
392 Event::new(EventKind::DidValidateInternedValue {
393 key: index,
394 revision: current_revision,
395 })
396 });
397
398 if value_shared.is_reusable::<C>() {
399 unsafe { shard.lru.cursor_mut_from_ptr(value).remove() };
404
405 unsafe { shard.lru.push_front(UnsafeRef::from_raw(value)) };
408 }
409 }
410
411 if let Some((_, stamp)) = zalsa_local.active_query() {
412 let was_reusable = value_shared.is_reusable::<C>();
413
414 value_shared.durability = std::cmp::max(value_shared.durability, stamp.durability);
416
417 if was_reusable && !value_shared.is_reusable::<C>() {
420 unsafe { shard.lru.cursor_mut_from_ptr(value).remove() };
423 }
424 }
425
426 zalsa_local.report_tracked_read_simple(
432 index,
433 value_shared.durability,
434 current_revision,
435 );
436
437 return value_shared.id;
438 }
439
440 if !self.revision_queue.is_primed() {
442 return self.intern_id_cold(
443 key,
444 zalsa,
445 zalsa_local,
446 assemble,
447 shard,
448 shard_index,
449 hash,
450 );
451 }
452
453 let mut cursor = shard.lru.back_mut();
455
456 while let Some(value) = cursor.get() {
457 let value_shared = unsafe { &mut *value.shared.get() };
459
460 if !self.revision_queue.is_stale(value_shared.last_interned_at) {
466 break;
467 }
468
469 debug_assert!({ value_shared.last_interned_at } < current_revision);
471
472 let (durability, last_interned_at) = zalsa_local
474 .active_query()
475 .map(|(_, stamp)| (stamp.durability, current_revision))
476 .unwrap_or((Durability::MAX, Revision::max()));
479
480 let old_id = value_shared.id;
481
482 let Some(new_id) = value_shared.id.next_generation() else {
486 cursor.remove().unwrap();
489
490 cursor = shard.lru.back_mut();
492
493 continue;
494 };
495
496 *value_shared = ValueShared {
498 id: new_id,
499 durability,
500 last_interned_at,
501 };
502
503 let index = self.database_key_index(value_shared.id);
504
505 zalsa_local.report_tracked_read_simple(
509 index,
510 value_shared.durability,
511 current_revision,
512 );
513
514 zalsa.event(&|| {
515 Event::new(EventKind::DidReuseInternedValue {
516 key: index,
517 revision: current_revision,
518 })
519 });
520
521 let value = unsafe { &*UnsafeRef::into_raw(cursor.remove().unwrap()) };
525
526 let old_fields = unsafe { &mut *value.fields.get() };
530
531 let old_hash = self.hasher.hash_one(&*old_fields);
539 shard
540 .key_map
541 .find_entry(old_hash, |found_id: &Id| *found_id == old_id)
542 .expect("interned value in LRU so must be in key_map")
543 .remove();
544
545 *old_fields = unsafe { self.to_internal_data(assemble(new_id, key)) };
549
550 let hasher = |id: &_| unsafe { self.value_hash(*id, zalsa) };
552
553 shard.key_map.insert_unique(hash, new_id, hasher);
555
556 let memo_table = unsafe { &mut *value.memos.get() };
560
561 unsafe { self.clear_memos(zalsa, memo_table, new_id) };
566
567 if value_shared.is_reusable::<C>() {
568 shard.lru.push_front(unsafe { UnsafeRef::from_raw(value) });
573 }
574
575 return new_id;
576 }
577
578 self.intern_id_cold(key, zalsa, zalsa_local, assemble, shard, shard_index, hash)
580 }
581
582 #[allow(clippy::too_many_arguments)]
586 fn intern_id_cold<'db, Key>(
587 &'db self,
588 key: Key,
589 zalsa: &Zalsa,
590 zalsa_local: &ZalsaLocal,
591 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
592 shard: &mut IngredientShard<C>,
593 shard_index: usize,
594 hash: u64,
595 ) -> crate::Id
596 where
597 Key: Hash,
598 C::Fields<'db>: HashEqLike<Key>,
599 {
600 let current_revision = zalsa.current_revision();
601
602 let (durability, last_interned_at) = zalsa_local
604 .active_query()
605 .map(|(_, stamp)| (stamp.durability, current_revision))
606 .unwrap_or((Durability::MAX, Revision::max()));
609
610 let (id, value) = zalsa_local.allocate(zalsa, self.ingredient_index, |id| Value::<C> {
612 shard: shard_index as u16,
613 link: LinkedListLink::new(),
614 memos: UnsafeCell::new(unsafe { MemoTable::new(self.memo_table_types()) }),
617 fields: UnsafeCell::new(unsafe { self.to_internal_data(assemble(id, key)) }),
619 shared: UnsafeCell::new(ValueShared {
620 id,
621 durability,
622 last_interned_at,
623 }),
624 });
625
626 self.insert_id(id, zalsa, shard, hash, value);
628
629 let index = self.database_key_index(id);
630
631 zalsa_local.report_tracked_read_simple(index, durability, current_revision);
640
641 zalsa.event(&|| {
642 Event::new(EventKind::DidInternValue {
643 key: index,
644 revision: current_revision,
645 })
646 });
647
648 id
649 }
650
651 fn insert_id(
653 &self,
654 id: Id,
655 zalsa: &Zalsa,
656 shard: &mut IngredientShard<C>,
657 hash: u64,
658 value: &Value<C>,
659 ) {
660 let value_shared = unsafe { &mut *value.shared.get() };
662
663 if value_shared.is_reusable::<C>() {
664 shard.lru.push_front(unsafe { UnsafeRef::from_raw(value) });
669 }
670
671 let hasher = |id: &_| unsafe { self.value_hash(*id, zalsa) };
673
674 shard.key_map.insert_unique(hash, id, hasher);
676
677 debug_assert_eq!(hash, {
678 let value = zalsa.table().get::<Value<C>>(id);
679
680 unsafe { self.hasher.hash_one(&*value.fields.get()) }
682 });
683 }
684
685 pub(crate) unsafe fn clear_memos(&self, zalsa: &Zalsa, memo_table: &mut MemoTable, id: Id) {
691 let table = unsafe { self.memo_table_types.attach_memos_mut(memo_table) };
693
694 struct TableDropGuard<'a>(MemoTableWithTypesMut<'a>);
697
698 impl Drop for TableDropGuard<'_> {
699 fn drop(&mut self) {
700 unsafe { self.0.drop() };
703 }
704 }
705
706 let mut table_guard = TableDropGuard(table);
707
708 unsafe {
711 table_guard.0.take_memos(|memo_ingredient_index, memo| {
712 let ingredient_index =
713 zalsa.ingredient_index_for_memo(self.ingredient_index, memo_ingredient_index);
714
715 let executor = DatabaseKeyIndex::new(ingredient_index, id);
716
717 zalsa.event(&|| Event::new(EventKind::DidDiscard { key: executor }));
718
719 memo.remove_outputs(zalsa, executor);
720 })
721 };
722
723 std::mem::forget(table_guard);
724
725 memo_table.reset();
727 }
728
729 unsafe fn value_hash<'db>(&'db self, id: Id, zalsa: &'db Zalsa) -> u64 {
735 let value = zalsa.table().get::<Value<C>>(id);
738
739 unsafe { self.hasher.hash_one(&*value.fields.get()) }
741 }
742
743 unsafe fn value_eq<'db, Key>(
749 id: Id,
750 key: &Key,
751 zalsa: &'db Zalsa,
752 found_value: &Cell<Option<&'db Value<C>>>,
753 ) -> bool
754 where
755 C::Fields<'db>: HashEqLike<Key>,
756 {
757 let value = zalsa.table().get::<Value<C>>(id);
758 found_value.set(Some(value));
759
760 let fields = unsafe { &*value.fields.get() };
762
763 HashEqLike::eq(Self::from_internal_data(fields), key)
764 }
765
766 #[inline]
768 pub fn database_key_index(&self, id: Id) -> DatabaseKeyIndex {
769 DatabaseKeyIndex::new(self.ingredient_index, id)
770 }
771
772 pub fn data<'db>(&'db self, zalsa: &'db Zalsa, id: Id) -> &'db C::Fields<'db> {
774 let value = zalsa.table().get::<Value<C>>(id);
775
776 debug_assert!(
777 {
778 let _shard = self.shards[value.shard as usize].lock();
779
780 let value_shared = unsafe { &mut *value.shared.get() };
782
783 let last_changed_revision = zalsa.last_changed_revision(value_shared.durability);
784 ({ value_shared.last_interned_at }) >= last_changed_revision
785 },
786 "Data was not interned in the latest revision for its durability."
787 );
788
789 unsafe { Self::from_internal_data(&*value.fields.get()) }
793 }
794
795 pub fn fields<'db>(&'db self, zalsa: &'db Zalsa, s: C::Struct<'db>) -> &'db C::Fields<'db> {
799 self.data(zalsa, AsId::as_id(&s))
800 }
801
802 pub fn reset(&mut self, zalsa_mut: &mut Zalsa) {
803 _ = zalsa_mut;
804
805 for shard in self.shards.iter_mut() {
806 shard.get_mut().key_map.clear();
808 }
809 }
810
811 pub fn entries<'db>(&'db self, zalsa: &'db Zalsa) -> impl Iterator<Item = StructEntry<'db, C>> {
813 unsafe { self.entries_inner(true, zalsa) }
815 }
816
817 unsafe fn entries_inner<'db>(
824 &'db self,
825 should_lock: bool,
826 zalsa: &'db Zalsa,
827 ) -> impl Iterator<Item = StructEntry<'db, C>> {
828 zalsa.table().slots_of::<Value<C>>().map(move |(_, value)| {
830 if should_lock {
831 let _shard = unsafe { self.shards.get_unchecked(value.shard as usize) }.lock();
833 }
834
835 let id = unsafe { (*value.shared.get()).id };
839
840 StructEntry {
841 value,
842 key: self.database_key_index(id),
843 }
844 })
845 }
846}
847
848pub struct StructEntry<'db, C>
850where
851 C: Configuration,
852{
853 value: &'db Value<C>,
854 key: DatabaseKeyIndex,
855}
856
857impl<'db, C> StructEntry<'db, C>
858where
859 C: Configuration,
860{
861 pub fn key(&self) -> DatabaseKeyIndex {
863 self.key
864 }
865
866 pub fn as_struct(&self) -> C::Struct<'_> {
868 FromId::from_id(self.key.key_index())
869 }
870
871 #[cfg(feature = "salsa_unstable")]
872 pub fn value(&self) -> &'db Value<C> {
873 self.value
874 }
875}
876
877impl<C> Ingredient for IngredientImpl<C>
878where
879 C: Configuration,
880{
881 fn location(&self) -> &'static crate::ingredient::Location {
882 &C::LOCATION
883 }
884
885 fn ingredient_index(&self) -> IngredientIndex {
886 self.ingredient_index
887 }
888
889 unsafe fn maybe_changed_after(
890 &self,
891 zalsa: &crate::zalsa::Zalsa,
892 _db: crate::database::RawDatabase<'_>,
893 input: Id,
894 _revision: Revision,
895 _cycle_heads: &mut VerifyCycleHeads,
896 ) -> VerifyResult {
897 let current_revision = zalsa.current_revision();
899 self.revision_queue.record(current_revision);
900
901 let value = zalsa.table().get::<Value<C>>(input);
902
903 let _shard = unsafe { self.shards.get_unchecked(value.shard as usize) }.lock();
905
906 let value_shared = unsafe { &mut *value.shared.get() };
908
909 if value_shared.id.generation() > input.generation() {
911 return VerifyResult::changed();
912 }
913
914 value_shared.last_interned_at = current_revision;
916
917 zalsa.event(&|| {
918 let index = self.database_key_index(input);
919
920 Event::new(EventKind::DidValidateInternedValue {
921 key: index,
922 revision: current_revision,
923 })
924 });
925
926 VerifyResult::unchanged()
928 }
929
930 fn collect_minimum_serialized_edges(
931 &self,
932 _zalsa: &Zalsa,
933 edge: QueryEdge,
934 serialized_edges: &mut FxIndexSet<QueryEdge>,
935 _visited_edges: &mut FxHashSet<QueryEdge>,
936 ) {
937 if C::PERSIST && C::REVISIONS != IMMORTAL {
938 serialized_edges.insert(edge);
944 }
945
946 }
948
949 fn debug_name(&self) -> &'static str {
950 C::DEBUG_NAME
951 }
952
953 fn jar_kind(&self) -> JarKind {
954 JarKind::Struct
955 }
956
957 fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
958 &self.memo_table_types
959 }
960
961 fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
962 &mut self.memo_table_types
963 }
964
965 #[cfg(all(not(feature = "shuttle"), feature = "salsa_unstable"))]
967 fn memory_usage(&self, db: &dyn crate::Database) -> Option<Vec<crate::database::SlotInfo>> {
968 use parking_lot::lock_api::RawMutex;
969
970 for shard in self.shards.iter() {
971 unsafe { shard.raw().lock() };
973 }
974
975 let entries = unsafe { self.entries_inner(false, db.zalsa()) };
977
978 let memory_usage = entries
979 .map(|entry| unsafe { entry.value.memory_usage(&self.memo_table_types) })
982 .collect();
983
984 for shard in self.shards.iter() {
985 unsafe { shard.raw().unlock() };
987 }
988
989 Some(memory_usage)
990 }
991
992 fn is_persistable(&self) -> bool {
993 C::PERSIST
994 }
995
996 fn should_serialize(&self, zalsa: &Zalsa) -> bool {
997 C::PERSIST && self.entries(zalsa).next().is_some()
998 }
999
1000 #[cfg(feature = "persistence")]
1001 unsafe fn serialize<'db>(
1002 &'db self,
1003 zalsa: &'db Zalsa,
1004 f: &mut dyn FnMut(&dyn erased_serde::Serialize),
1005 ) {
1006 f(&persistence::SerializeIngredient {
1007 zalsa,
1008 ingredient: self,
1009 })
1010 }
1011
1012 #[cfg(feature = "persistence")]
1013 fn deserialize(
1014 &mut self,
1015 zalsa: &mut Zalsa,
1016 deserializer: &mut dyn erased_serde::Deserializer,
1017 ) -> Result<(), erased_serde::Error> {
1018 let deserialize = persistence::DeserializeIngredient {
1019 zalsa,
1020 ingredient: self,
1021 };
1022
1023 serde::de::DeserializeSeed::deserialize(deserialize, deserializer)
1024 }
1025}
1026
1027impl<C> std::fmt::Debug for IngredientImpl<C>
1028where
1029 C: Configuration,
1030{
1031 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1032 f.debug_struct(std::any::type_name::<Self>())
1033 .field("index", &self.ingredient_index)
1034 .finish()
1035 }
1036}
1037
1038unsafe impl<C> Slot for Value<C>
1040where
1041 C: Configuration,
1042{
1043 #[inline(always)]
1044 unsafe fn memos(&self, _current_revision: Revision) -> &MemoTable {
1045 unsafe { &*self.memos.get() }
1048 }
1049
1050 #[inline(always)]
1051 fn memos_mut(&mut self) -> &mut MemoTable {
1052 self.memos.get_mut()
1053 }
1054}
1055
1056struct RevisionQueue<C> {
1062 lock: Mutex<()>,
1063 revisions: Box<[AtomicRevision]>,
1065 _configuration: PhantomData<fn() -> C>,
1066}
1067
1068const IMMORTAL: NonZeroUsize = NonZeroUsize::MAX;
1070
1071impl<C: Configuration> Default for RevisionQueue<C> {
1072 fn default() -> RevisionQueue<C> {
1073 let revisions = if C::REVISIONS == IMMORTAL {
1074 Box::default()
1075 } else {
1076 (0..C::REVISIONS.get())
1077 .map(|_| AtomicRevision::start())
1078 .collect()
1079 };
1080
1081 RevisionQueue {
1082 lock: Mutex::new(()),
1083 revisions,
1084 _configuration: PhantomData,
1085 }
1086 }
1087}
1088
1089impl<C: Configuration> RevisionQueue<C> {
1090 #[inline]
1092 fn record(&self, revision: Revision) {
1093 if C::REVISIONS == IMMORTAL {
1095 return;
1096 }
1097
1098 if self.revisions[0].load() >= revision {
1100 return;
1101 }
1102
1103 self.record_cold(revision);
1104 }
1105
1106 #[cold]
1107 fn record_cold(&self, revision: Revision) {
1108 let _lock = self.lock.lock();
1109
1110 for i in (1..C::REVISIONS.get()).rev() {
1114 self.revisions[i].store(self.revisions[i - 1].load());
1115 }
1116
1117 self.revisions[0].store(revision);
1118 }
1119
1120 #[inline]
1122 fn is_stale(&self, revision: Revision) -> bool {
1123 if C::REVISIONS == IMMORTAL {
1125 return false;
1126 }
1127
1128 let oldest = self.revisions[C::REVISIONS.get() - 1].load();
1129
1130 if oldest == Revision::start() {
1132 return false;
1133 }
1134
1135 revision < oldest
1136 }
1137
1138 #[inline]
1141 fn is_primed(&self) -> bool {
1142 if C::REVISIONS == IMMORTAL {
1144 return false;
1145 }
1146
1147 self.revisions[C::REVISIONS.get() - 1].load() > Revision::start()
1148 }
1149}
1150
1151pub trait HashEqLike<O> {
1153 fn hash<H: Hasher>(&self, h: &mut H);
1154 fn eq(&self, data: &O) -> bool;
1155}
1156
1157pub trait Lookup<O> {
1172 fn into_owned(self) -> O;
1173}
1174
1175impl<T> Lookup<T> for T {
1176 fn into_owned(self) -> T {
1177 self
1178 }
1179}
1180
1181impl<T> HashEqLike<T> for T
1182where
1183 T: Hash + Eq,
1184{
1185 fn hash<H: Hasher>(&self, h: &mut H) {
1186 Hash::hash(self, &mut *h);
1187 }
1188
1189 fn eq(&self, data: &T) -> bool {
1190 self == data
1191 }
1192}
1193
1194impl<T> HashEqLike<T> for &T
1195where
1196 T: Hash + Eq,
1197{
1198 fn hash<H: Hasher>(&self, h: &mut H) {
1199 Hash::hash(*self, &mut *h);
1200 }
1201
1202 fn eq(&self, data: &T) -> bool {
1203 **self == *data
1204 }
1205}
1206
1207impl<T> HashEqLike<&T> for T
1208where
1209 T: Hash + Eq,
1210{
1211 fn hash<H: Hasher>(&self, h: &mut H) {
1212 Hash::hash(self, &mut *h);
1213 }
1214
1215 fn eq(&self, data: &&T) -> bool {
1216 *self == **data
1217 }
1218}
1219
1220impl<T> Lookup<T> for &T
1221where
1222 T: Clone,
1223{
1224 fn into_owned(self) -> T {
1225 Clone::clone(self)
1226 }
1227}
1228
1229impl<'a, T> HashEqLike<&'a T> for Box<T>
1230where
1231 T: ?Sized + Hash + Eq,
1232 Box<T>: From<&'a T>,
1233{
1234 fn hash<H: Hasher>(&self, h: &mut H) {
1235 Hash::hash(self, &mut *h)
1236 }
1237 fn eq(&self, data: &&T) -> bool {
1238 **self == **data
1239 }
1240}
1241
1242impl<'a, T> Lookup<Box<T>> for &'a T
1243where
1244 T: ?Sized + Hash + Eq,
1245 Box<T>: From<&'a T>,
1246{
1247 fn into_owned(self) -> Box<T> {
1248 Box::from(self)
1249 }
1250}
1251
1252impl<'a, T> HashEqLike<&'a T> for Arc<T>
1253where
1254 T: ?Sized + Hash + Eq,
1255 Arc<T>: From<&'a T>,
1256{
1257 fn hash<H: Hasher>(&self, h: &mut H) {
1258 Hash::hash(&**self, &mut *h)
1259 }
1260 fn eq(&self, data: &&T) -> bool {
1261 **self == **data
1262 }
1263}
1264
1265impl<'a, T> Lookup<Arc<T>> for &'a T
1266where
1267 T: ?Sized + Hash + Eq,
1268 Arc<T>: From<&'a T>,
1269{
1270 fn into_owned(self) -> Arc<T> {
1271 Arc::from(self)
1272 }
1273}
1274
1275impl Lookup<String> for &str {
1276 fn into_owned(self) -> String {
1277 self.to_owned()
1278 }
1279}
1280
1281#[cfg(feature = "compact_str")]
1282impl Lookup<compact_str::CompactString> for &str {
1283 fn into_owned(self) -> compact_str::CompactString {
1284 compact_str::CompactString::new(self)
1285 }
1286}
1287
1288impl HashEqLike<&str> for String {
1289 fn hash<H: Hasher>(&self, h: &mut H) {
1290 Hash::hash(self, &mut *h)
1291 }
1292
1293 fn eq(&self, data: &&str) -> bool {
1294 self == *data
1295 }
1296}
1297
1298#[cfg(feature = "compact_str")]
1299impl HashEqLike<&str> for compact_str::CompactString {
1300 fn hash<H: Hasher>(&self, h: &mut H) {
1301 Hash::hash(self, &mut *h)
1302 }
1303
1304 fn eq(&self, data: &&str) -> bool {
1305 self == *data
1306 }
1307}
1308
1309impl<A, T: Hash + Eq + PartialEq<A>> HashEqLike<&[A]> for Vec<T> {
1310 fn hash<H: Hasher>(&self, h: &mut H) {
1311 Hash::hash(self, h);
1312 }
1313
1314 fn eq(&self, data: &&[A]) -> bool {
1315 self.len() == data.len() && data.iter().enumerate().all(|(i, a)| &self[i] == a)
1316 }
1317}
1318
1319impl<A: Hash + Eq + PartialEq<T> + Clone + Lookup<T>, T> Lookup<Vec<T>> for &[A] {
1320 fn into_owned(self) -> Vec<T> {
1321 self.iter().map(|a| Lookup::into_owned(a.clone())).collect()
1322 }
1323}
1324
1325impl<const N: usize, A, T: Hash + Eq + PartialEq<A>> HashEqLike<[A; N]> for Vec<T> {
1326 fn hash<H: Hasher>(&self, h: &mut H) {
1327 Hash::hash(self, h);
1328 }
1329
1330 fn eq(&self, data: &[A; N]) -> bool {
1331 self.len() == data.len() && data.iter().enumerate().all(|(i, a)| &self[i] == a)
1332 }
1333}
1334
1335impl<const N: usize, A: Hash + Eq + PartialEq<T> + Clone + Lookup<T>, T> Lookup<Vec<T>> for [A; N] {
1336 fn into_owned(self) -> Vec<T> {
1337 self.into_iter()
1338 .map(|a| Lookup::into_owned(a.clone()))
1339 .collect()
1340 }
1341}
1342
1343impl HashEqLike<&Path> for PathBuf {
1344 fn hash<H: Hasher>(&self, h: &mut H) {
1345 Hash::hash(self, h);
1346 }
1347
1348 fn eq(&self, data: &&Path) -> bool {
1349 self == data
1350 }
1351}
1352
1353impl Lookup<PathBuf> for &Path {
1354 fn into_owned(self) -> PathBuf {
1355 self.to_owned()
1356 }
1357}
1358
1359#[cfg(feature = "persistence")]
1360mod persistence {
1361 use std::cell::UnsafeCell;
1362 use std::fmt;
1363 use std::hash::BuildHasher;
1364
1365 use intrusive_collections::LinkedListLink;
1366 use serde::ser::{SerializeMap, SerializeStruct};
1367 use serde::{de, Deserialize};
1368
1369 use super::{Configuration, IngredientImpl, Value, ValueShared};
1370 use crate::plumbing::Ingredient;
1371 use crate::table::memo::MemoTable;
1372 use crate::zalsa::Zalsa;
1373 use crate::{Durability, Id, Revision};
1374
1375 pub struct SerializeIngredient<'db, C>
1376 where
1377 C: Configuration,
1378 {
1379 pub zalsa: &'db Zalsa,
1380 pub ingredient: &'db IngredientImpl<C>,
1381 }
1382
1383 impl<C> serde::Serialize for SerializeIngredient<'_, C>
1384 where
1385 C: Configuration,
1386 {
1387 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1388 where
1389 S: serde::Serializer,
1390 {
1391 let Self { zalsa, ingredient } = *self;
1392
1393 let count = ingredient
1394 .shards
1395 .iter()
1396 .map(|shard| shard.lock().key_map.len())
1397 .sum();
1398
1399 let mut map = serializer.serialize_map(Some(count))?;
1400
1401 for (_, value) in zalsa.table().slots_of::<Value<C>>() {
1402 let id = unsafe { (*value.shared.get()).id };
1405
1406 map.serialize_entry(&id.as_bits(), value)?;
1407 }
1408
1409 map.end()
1410 }
1411 }
1412
1413 impl<C> serde::Serialize for Value<C>
1414 where
1415 C: Configuration,
1416 {
1417 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1418 where
1419 S: serde::Serializer,
1420 {
1421 let mut value = serializer.serialize_struct("Value,", 3)?;
1422
1423 let Value {
1424 fields,
1425 shared,
1426 shard: _,
1427 link: _,
1428 memos: _,
1429 } = self;
1430
1431 let fields = unsafe { &*fields.get() };
1434
1435 let ValueShared {
1438 durability,
1439 last_interned_at,
1440 id: _,
1441 } = unsafe { *shared.get() };
1442
1443 value.serialize_field("durability", &durability)?;
1444 value.serialize_field("last_interned_at", &last_interned_at)?;
1445 value.serialize_field("fields", &SerializeFields::<C>(fields))?;
1446
1447 value.end()
1448 }
1449 }
1450
1451 struct SerializeFields<'db, C: Configuration>(&'db C::Fields<'static>);
1452
1453 impl<C> serde::Serialize for SerializeFields<'_, C>
1454 where
1455 C: Configuration,
1456 {
1457 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1458 where
1459 S: serde::Serializer,
1460 {
1461 C::serialize(self.0, serializer)
1462 }
1463 }
1464
1465 pub struct DeserializeIngredient<'db, C>
1466 where
1467 C: Configuration,
1468 {
1469 pub zalsa: &'db mut Zalsa,
1470 pub ingredient: &'db mut IngredientImpl<C>,
1471 }
1472
1473 impl<'de, C> de::DeserializeSeed<'de> for DeserializeIngredient<'_, C>
1474 where
1475 C: Configuration,
1476 {
1477 type Value = ();
1478
1479 fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
1480 where
1481 D: serde::Deserializer<'de>,
1482 {
1483 deserializer.deserialize_map(self)
1484 }
1485 }
1486
1487 impl<'de, C> de::Visitor<'de> for DeserializeIngredient<'_, C>
1488 where
1489 C: Configuration,
1490 {
1491 type Value = ();
1492
1493 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
1494 formatter.write_str("a map")
1495 }
1496
1497 fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
1498 where
1499 M: de::MapAccess<'de>,
1500 {
1501 let DeserializeIngredient { zalsa, ingredient } = self;
1502
1503 while let Some((id, value)) = access.next_entry::<u64, DeserializeValue<C>>()? {
1504 let id = Id::from_bits(id);
1505 let (page_idx, _) = crate::table::split_id(id);
1506
1507 let hash = ingredient.hasher.hash_one(&value.fields.0);
1509 let shard_index = ingredient.shard(hash);
1510
1511 let shard = unsafe { &mut *ingredient.shards.get_unchecked(shard_index).lock() };
1513
1514 let value = Value::<C> {
1515 shard: shard_index as u16,
1516 link: LinkedListLink::new(),
1517 memos: UnsafeCell::new(unsafe {
1520 MemoTable::new(ingredient.memo_table_types())
1521 }),
1522 fields: UnsafeCell::new(value.fields.0),
1523 shared: UnsafeCell::new(ValueShared {
1524 id,
1525 durability: value.durability,
1526 last_interned_at: value.last_interned_at,
1527 }),
1528 };
1529
1530 zalsa.table_mut().force_page::<Value<C>>(
1532 page_idx,
1533 ingredient.ingredient_index(),
1534 ingredient.memo_table_types(),
1535 );
1536
1537 let (allocated_id, value) = unsafe {
1541 zalsa
1542 .table()
1543 .page(page_idx)
1544 .allocate(page_idx, |_| value)
1545 .unwrap_or_else(|_| panic!("serialized an invalid `Id`: {id:?}"))
1546 };
1547
1548 assert_eq!(
1549 allocated_id.index(),
1550 id.index(),
1551 "values are serialized in allocation order"
1552 );
1553
1554 ingredient.insert_id(id, zalsa, shard, hash, value);
1556 }
1557
1558 Ok(())
1559 }
1560 }
1561
1562 #[derive(Deserialize)]
1563 #[serde(rename = "Value")]
1564 pub struct DeserializeValue<C: Configuration> {
1565 durability: Durability,
1566 last_interned_at: Revision,
1567 #[serde(bound = "C: Configuration")]
1568 fields: DeserializeFields<C>,
1569 }
1570
1571 struct DeserializeFields<C: Configuration>(C::Fields<'static>);
1572
1573 impl<'de, C> serde::Deserialize<'de> for DeserializeFields<C>
1574 where
1575 C: Configuration,
1576 {
1577 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1578 where
1579 D: serde::Deserializer<'de>,
1580 {
1581 C::deserialize(deserializer)
1582 .map(DeserializeFields)
1583 .map_err(de::Error::custom)
1584 }
1585 }
1586}