1use std::any::TypeId;
2use std::borrow::Cow;
3use std::cell::{Cell, UnsafeCell};
4use std::fmt;
5use std::hash::{BuildHasher, Hash, Hasher};
6use std::marker::PhantomData;
7use std::num::NonZeroUsize;
8use std::path::{Path, PathBuf};
9
10use crossbeam_utils::CachePadded;
11use intrusive_collections::{intrusive_adapter, LinkedList, LinkedListLink, UnsafeRef};
12use rustc_hash::FxBuildHasher;
13
14use crate::durability::Durability;
15use crate::function::VerifyResult;
16use crate::hash::{FxHashSet, FxIndexSet};
17use crate::id::{AsId, FromId};
18use crate::ingredient::Ingredient;
19use crate::plumbing::{self, Jar, ZalsaLocal};
20use crate::revision::AtomicRevision;
21use crate::sync::{Arc, Mutex, OnceLock};
22use crate::table::memo::{MemoTable, MemoTableTypes, MemoTableWithTypesMut};
23use crate::table::Slot;
24use crate::zalsa::{IngredientIndex, JarKind, Zalsa};
25use crate::zalsa_local::QueryEdge;
26use crate::{DatabaseKeyIndex, Event, EventKind, Id, Revision};
27
28pub trait Configuration: Sized + 'static {
33 const LOCATION: crate::ingredient::Location;
34 const DEBUG_NAME: &'static str;
35
36 const PERSIST: bool;
38
39 #[cfg(test)]
41 const REVISIONS: NonZeroUsize = NonZeroUsize::new(3).unwrap();
42
43 #[cfg(not(test))] const REVISIONS: NonZeroUsize = NonZeroUsize::new(1).unwrap();
45
46 type Fields<'db>: InternedData;
48
49 type Struct<'db>: Copy + FromId + AsId;
51
52 fn heap_size(_value: &Self::Fields<'_>) -> Option<usize> {
54 None
55 }
56
57 fn serialize<S>(value: &Self::Fields<'_>, serializer: S) -> Result<S::Ok, S::Error>
61 where
62 S: plumbing::serde::Serializer;
63
64 fn deserialize<'de, D>(deserializer: D) -> Result<Self::Fields<'static>, D::Error>
68 where
69 D: plumbing::serde::Deserializer<'de>;
70}
71
72pub trait InternedData: Sized + Eq + Hash + Clone + Sync + Send {}
73impl<T: Eq + Hash + Clone + Sync + Send> InternedData for T {}
74
75pub struct JarImpl<C: Configuration> {
76 phantom: PhantomData<C>,
77}
78
79pub struct IngredientImpl<C: Configuration> {
84 ingredient_index: IngredientIndex,
86
87 hasher: FxBuildHasher,
89
90 shift: u32,
92
93 shards: Box<[CachePadded<Mutex<IngredientShard<C>>>]>,
95
96 revision_queue: RevisionQueue<C>,
98
99 memo_table_types: Arc<MemoTableTypes>,
100
101 _marker: PhantomData<fn() -> C>,
102}
103
104struct IngredientShard<C: Configuration> {
105 key_map: hashbrown::HashTable<Id>,
110
111 lru: LinkedList<ValueAdapter<C>>,
113}
114
115impl<C: Configuration> Default for IngredientShard<C> {
116 fn default() -> Self {
117 Self {
118 lru: LinkedList::default(),
119 key_map: hashbrown::HashTable::new(),
120 }
121 }
122}
123
124unsafe impl<C: Configuration> Sync for Value<C> {}
127
128intrusive_adapter!(ValueAdapter<C> = UnsafeRef<Value<C>>: Value<C> { link: LinkedListLink } where C: Configuration);
129
130pub struct Value<C>
132where
133 C: Configuration,
134{
135 shard: u16,
137
138 link: LinkedListLink,
140
141 fields: UnsafeCell<C::Fields<'static>>,
146
147 memos: UnsafeCell<MemoTable>,
152
153 shared: UnsafeCell<ValueShared>,
156}
157
158#[repr(Rust, packed)] #[derive(Clone, Copy)]
161struct ValueShared {
162 id: Id,
176
177 last_interned_at: Revision,
179
180 durability: Durability,
185}
186
187impl ValueShared {
188 fn is_reusable<C: Configuration>(&self) -> bool {
190 if C::REVISIONS == IMMORTAL {
192 return false;
193 }
194
195 self.durability == Durability::LOW
202 }
203}
204
205impl<C> Value<C>
206where
207 C: Configuration,
208{
209 #[cfg(feature = "salsa_unstable")]
211 pub fn fields(&self) -> &C::Fields<'static> {
212 unsafe { &*self.fields.get() }
216 }
217
218 #[cfg(all(not(feature = "shuttle"), feature = "salsa_unstable"))]
225 unsafe fn memory_usage(&self, memo_table_types: &MemoTableTypes) -> crate::database::SlotInfo {
226 let heap_size = C::heap_size(self.fields());
227 let memos = unsafe { &*self.memos.get() };
230 let memos = unsafe { memo_table_types.attach_memos(memos) };
232
233 crate::database::SlotInfo {
234 debug_name: C::DEBUG_NAME,
235 size_of_metadata: std::mem::size_of::<Self>() - std::mem::size_of::<C::Fields<'_>>(),
236 size_of_fields: std::mem::size_of::<C::Fields<'_>>(),
237 heap_size_of_fields: heap_size,
238 memos: memos.memory_usage(),
239 }
240 }
241}
242
243impl<C: Configuration> Default for JarImpl<C> {
244 fn default() -> Self {
245 Self {
246 phantom: PhantomData,
247 }
248 }
249}
250
251impl<C: Configuration> Jar for JarImpl<C> {
252 fn create_ingredients(
253 _zalsa: &mut Zalsa,
254 first_index: IngredientIndex,
255 ) -> Vec<Box<dyn Ingredient>> {
256 vec![Box::new(IngredientImpl::<C>::new(first_index)) as _]
257 }
258
259 fn id_struct_type_id() -> TypeId {
260 TypeId::of::<C::Struct<'static>>()
261 }
262}
263
264impl<C> IngredientImpl<C>
265where
266 C: Configuration,
267{
268 pub fn new(ingredient_index: IngredientIndex) -> Self {
269 static SHARDS: OnceLock<usize> = OnceLock::new();
270 let shards = *SHARDS.get_or_init(|| {
271 let num_cpus = std::thread::available_parallelism()
272 .map(usize::from)
273 .unwrap_or(1);
274
275 (num_cpus * 4).next_power_of_two()
276 });
277
278 Self {
279 ingredient_index,
280 hasher: FxBuildHasher,
281 memo_table_types: Arc::new(MemoTableTypes::default()),
282 revision_queue: RevisionQueue::default(),
283 shift: usize::BITS - shards.trailing_zeros(),
284 shards: (0..shards).map(|_| Default::default()).collect(),
285 _marker: PhantomData,
286 }
287 }
288
289 #[inline]
293 fn shard(&self, hash: u64) -> usize {
294 ((hash as usize) << 7) >> self.shift
296 }
297
298 unsafe fn to_internal_data<'db>(&'db self, data: C::Fields<'db>) -> C::Fields<'static> {
303 unsafe { std::mem::transmute(data) }
305 }
306
307 fn from_internal_data<'db>(data: &'db C::Fields<'static>) -> &'db C::Fields<'db> {
308 unsafe { std::mem::transmute(data) }
311 }
312
313 pub fn intern<'db, Key>(
324 &'db self,
325 zalsa: &'db Zalsa,
326 zalsa_local: &'db ZalsaLocal,
327 key: Key,
328 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
329 ) -> C::Struct<'db>
330 where
331 Key: Hash,
332 C::Fields<'db>: HashEqLike<Key>,
333 {
334 FromId::from_id(self.intern_id(zalsa, zalsa_local, key, assemble))
335 }
336
337 pub fn intern_id<'db, Key>(
348 &'db self,
349 zalsa: &'db Zalsa,
350 zalsa_local: &'db ZalsaLocal,
351 key: Key,
352 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
353 ) -> crate::Id
354 where
355 Key: Hash,
356 C::Fields<'db>: HashEqLike<Key>,
361 {
362 let current_revision = zalsa.current_revision();
364 self.revision_queue.record(current_revision);
365
366 let hash = self.hasher.hash_one(&key);
368
369 let shard_index = self.shard(hash);
370 let shard = unsafe { &mut *self.shards.get_unchecked(shard_index).lock() };
372
373 let found_value = Cell::new(None);
374 let eq = |id: &_| unsafe { Self::value_eq(*id, &key, zalsa, &found_value) };
376
377 if let Some(&id) = shard.key_map.find(hash, eq) {
379 let value = found_value
380 .get()
381 .expect("found the interned value, so `found_value` should be set");
382
383 let index = self.database_key_index(id);
384
385 let value_shared = unsafe { &mut *value.shared.get() };
387
388 if { value_shared.last_interned_at } < current_revision {
390 value_shared.last_interned_at = current_revision;
391
392 zalsa.event(&|| {
393 Event::new(EventKind::DidValidateInternedValue {
394 key: index,
395 revision: current_revision,
396 })
397 });
398
399 if value_shared.is_reusable::<C>() {
400 unsafe { shard.lru.cursor_mut_from_ptr(value).remove() };
405
406 unsafe { shard.lru.push_front(UnsafeRef::from_raw(value)) };
409 }
410 }
411
412 if let Some((_, stamp)) = zalsa_local.active_query() {
413 let was_reusable = value_shared.is_reusable::<C>();
414
415 value_shared.durability = std::cmp::max(value_shared.durability, stamp.durability);
417
418 if was_reusable && !value_shared.is_reusable::<C>() {
421 unsafe { shard.lru.cursor_mut_from_ptr(value).remove() };
424 }
425 }
426
427 zalsa_local.report_tracked_read_simple(
433 index,
434 value_shared.durability,
435 current_revision,
436 );
437
438 return value_shared.id;
439 }
440
441 if !self.revision_queue.is_primed() {
443 return self.intern_id_cold(
444 key,
445 zalsa,
446 zalsa_local,
447 assemble,
448 shard,
449 shard_index,
450 hash,
451 );
452 }
453
454 let mut cursor = shard.lru.back_mut();
456
457 while let Some(value) = cursor.get() {
458 let value_shared = unsafe { &mut *value.shared.get() };
460
461 if !self.revision_queue.is_stale(value_shared.last_interned_at) {
467 break;
468 }
469
470 debug_assert!({ value_shared.last_interned_at } < current_revision);
472
473 let (durability, last_interned_at) = zalsa_local
475 .active_query()
476 .map(|(_, stamp)| (stamp.durability, current_revision))
477 .unwrap_or((Durability::MAX, Revision::max()));
480
481 let old_id = value_shared.id;
482
483 let Some(new_id) = value_shared.id.next_generation() else {
487 cursor.remove().unwrap();
490
491 cursor = shard.lru.back_mut();
493
494 continue;
495 };
496
497 *value_shared = ValueShared {
499 id: new_id,
500 durability,
501 last_interned_at,
502 };
503
504 let index = self.database_key_index(value_shared.id);
505
506 zalsa_local.report_tracked_read_simple(
510 index,
511 value_shared.durability,
512 current_revision,
513 );
514
515 zalsa.event(&|| {
516 Event::new(EventKind::DidReuseInternedValue {
517 key: index,
518 revision: current_revision,
519 })
520 });
521
522 let value = unsafe { &*UnsafeRef::into_raw(cursor.remove().unwrap()) };
526
527 let old_fields = unsafe { &mut *value.fields.get() };
531
532 let old_hash = self.hasher.hash_one(&*old_fields);
540 shard
541 .key_map
542 .find_entry(old_hash, |found_id: &Id| *found_id == old_id)
543 .expect("interned value in LRU so must be in key_map")
544 .remove();
545
546 *old_fields = unsafe { self.to_internal_data(assemble(new_id, key)) };
550
551 let hasher = |id: &_| unsafe { self.value_hash(*id, zalsa) };
553
554 shard.key_map.insert_unique(hash, new_id, hasher);
556
557 let memo_table = unsafe { &mut *value.memos.get() };
561
562 unsafe { self.clear_memos(zalsa, memo_table, new_id) };
567
568 if value_shared.is_reusable::<C>() {
569 shard.lru.push_front(unsafe { UnsafeRef::from_raw(value) });
574 }
575
576 return new_id;
577 }
578
579 self.intern_id_cold(key, zalsa, zalsa_local, assemble, shard, shard_index, hash)
581 }
582
583 #[allow(clippy::too_many_arguments)]
587 fn intern_id_cold<'db, Key>(
588 &'db self,
589 key: Key,
590 zalsa: &Zalsa,
591 zalsa_local: &ZalsaLocal,
592 assemble: impl FnOnce(Id, Key) -> C::Fields<'db>,
593 shard: &mut IngredientShard<C>,
594 shard_index: usize,
595 hash: u64,
596 ) -> crate::Id
597 where
598 Key: Hash,
599 C::Fields<'db>: HashEqLike<Key>,
600 {
601 let current_revision = zalsa.current_revision();
602
603 let (durability, last_interned_at) = zalsa_local
605 .active_query()
606 .map(|(_, stamp)| (stamp.durability, current_revision))
607 .unwrap_or((Durability::MAX, Revision::max()));
610
611 let (id, value) = zalsa_local.allocate(zalsa, self.ingredient_index, |id| Value::<C> {
613 shard: shard_index as u16,
614 link: LinkedListLink::new(),
615 memos: UnsafeCell::new(unsafe { MemoTable::new(self.memo_table_types()) }),
618 fields: UnsafeCell::new(unsafe { self.to_internal_data(assemble(id, key)) }),
620 shared: UnsafeCell::new(ValueShared {
621 id,
622 durability,
623 last_interned_at,
624 }),
625 });
626
627 self.insert_id(id, zalsa, shard, hash, value);
629
630 let index = self.database_key_index(id);
631
632 zalsa_local.report_tracked_read_simple(index, durability, current_revision);
641
642 zalsa.event(&|| {
643 Event::new(EventKind::DidInternValue {
644 key: index,
645 revision: current_revision,
646 })
647 });
648
649 id
650 }
651
652 fn insert_id(
654 &self,
655 id: Id,
656 zalsa: &Zalsa,
657 shard: &mut IngredientShard<C>,
658 hash: u64,
659 value: &Value<C>,
660 ) {
661 let value_shared = unsafe { &mut *value.shared.get() };
663
664 if value_shared.is_reusable::<C>() {
665 shard.lru.push_front(unsafe { UnsafeRef::from_raw(value) });
670 }
671
672 let hasher = |id: &_| unsafe { self.value_hash(*id, zalsa) };
674
675 shard.key_map.insert_unique(hash, id, hasher);
677
678 debug_assert_eq!(hash, {
679 let value = zalsa.table().get::<Value<C>>(id);
680
681 unsafe { self.hasher.hash_one(&*value.fields.get()) }
683 });
684 }
685
686 pub(crate) unsafe fn clear_memos(&self, zalsa: &Zalsa, memo_table: &mut MemoTable, id: Id) {
692 let table = unsafe { self.memo_table_types.attach_memos_mut(memo_table) };
694
695 struct TableDropGuard<'a>(MemoTableWithTypesMut<'a>);
698
699 impl Drop for TableDropGuard<'_> {
700 fn drop(&mut self) {
701 unsafe { self.0.drop() };
704 }
705 }
706
707 let mut table_guard = TableDropGuard(table);
708
709 unsafe {
712 table_guard.0.take_memos(|memo_ingredient_index, memo| {
713 let ingredient_index =
714 zalsa.ingredient_index_for_memo(self.ingredient_index, memo_ingredient_index);
715
716 let executor = DatabaseKeyIndex::new(ingredient_index, id);
717
718 zalsa.event(&|| Event::new(EventKind::DidDiscard { key: executor }));
719
720 memo.remove_outputs(zalsa, executor);
721 })
722 };
723
724 std::mem::forget(table_guard);
725
726 memo_table.reset();
728 }
729
730 unsafe fn value_hash<'db>(&'db self, id: Id, zalsa: &'db Zalsa) -> u64 {
736 let value = zalsa.table().get::<Value<C>>(id);
739
740 unsafe { self.hasher.hash_one(&*value.fields.get()) }
742 }
743
744 unsafe fn value_eq<'db, Key>(
750 id: Id,
751 key: &Key,
752 zalsa: &'db Zalsa,
753 found_value: &Cell<Option<&'db Value<C>>>,
754 ) -> bool
755 where
756 C::Fields<'db>: HashEqLike<Key>,
757 {
758 let value = zalsa.table().get::<Value<C>>(id);
759 found_value.set(Some(value));
760
761 let fields = unsafe { &*value.fields.get() };
763
764 HashEqLike::eq(Self::from_internal_data(fields), key)
765 }
766
767 #[inline]
769 pub fn database_key_index(&self, id: Id) -> DatabaseKeyIndex {
770 DatabaseKeyIndex::new(self.ingredient_index, id)
771 }
772
773 pub fn data<'db>(&'db self, zalsa: &'db Zalsa, id: Id) -> &'db C::Fields<'db> {
775 let value = zalsa.table().get::<Value<C>>(id);
776
777 debug_assert!(
778 {
779 let _shard = self.shards[value.shard as usize].lock();
780
781 let value_shared = unsafe { &mut *value.shared.get() };
783
784 let last_changed_revision = zalsa.last_changed_revision(value_shared.durability);
785 ({ value_shared.last_interned_at }) >= last_changed_revision
786 },
787 "Data for `{database_key:?}` was not interned in the latest revision for its durability.",
788 database_key = self.database_key_index(id),
789 );
790
791 unsafe { Self::from_internal_data(&*value.fields.get()) }
795 }
796
797 pub fn fields<'db>(&'db self, zalsa: &'db Zalsa, s: C::Struct<'db>) -> &'db C::Fields<'db> {
801 self.data(zalsa, AsId::as_id(&s))
802 }
803
804 pub fn reset(&mut self, zalsa_mut: &mut Zalsa) {
805 _ = zalsa_mut;
806
807 for shard in self.shards.iter_mut() {
808 shard.get_mut().key_map.clear();
810 }
811 }
812
813 pub fn entries<'db>(&'db self, zalsa: &'db Zalsa) -> impl Iterator<Item = StructEntry<'db, C>> {
815 unsafe { self.entries_inner(true, zalsa) }
817 }
818
819 unsafe fn entries_inner<'db>(
826 &'db self,
827 should_lock: bool,
828 zalsa: &'db Zalsa,
829 ) -> impl Iterator<Item = StructEntry<'db, C>> {
830 zalsa.table().slots_of::<Value<C>>().map(move |(_, value)| {
832 if should_lock {
833 let _shard = unsafe { self.shards.get_unchecked(value.shard as usize) }.lock();
835 }
836
837 let id = unsafe { (*value.shared.get()).id };
841
842 StructEntry {
843 value,
844 key: self.database_key_index(id),
845 }
846 })
847 }
848}
849
850pub struct StructEntry<'db, C>
852where
853 C: Configuration,
854{
855 #[allow(dead_code)]
856 value: &'db Value<C>,
857 key: DatabaseKeyIndex,
858}
859
860impl<'db, C> StructEntry<'db, C>
861where
862 C: Configuration,
863{
864 pub fn key(&self) -> DatabaseKeyIndex {
866 self.key
867 }
868
869 pub fn as_struct(&self) -> C::Struct<'_> {
871 FromId::from_id(self.key.key_index())
872 }
873
874 #[cfg(feature = "salsa_unstable")]
875 pub fn value(&self) -> &'db Value<C> {
876 self.value
877 }
878}
879
880impl<C> Ingredient for IngredientImpl<C>
881where
882 C: Configuration,
883{
884 fn location(&self) -> &'static crate::ingredient::Location {
885 &C::LOCATION
886 }
887
888 fn ingredient_index(&self) -> IngredientIndex {
889 self.ingredient_index
890 }
891
892 unsafe fn maybe_changed_after(
893 &self,
894 zalsa: &crate::zalsa::Zalsa,
895 _db: crate::database::RawDatabase<'_>,
896 input: Id,
897 _revision: Revision,
898 ) -> VerifyResult {
899 let current_revision = zalsa.current_revision();
901 self.revision_queue.record(current_revision);
902
903 let value = zalsa.table().get::<Value<C>>(input);
904
905 let _shard = unsafe { self.shards.get_unchecked(value.shard as usize) }.lock();
907
908 let value_shared = unsafe { &mut *value.shared.get() };
910
911 if value_shared.id.generation() > input.generation() {
913 return VerifyResult::changed();
914 }
915
916 value_shared.last_interned_at = current_revision;
918
919 zalsa.event(&|| {
920 let index = self.database_key_index(input);
921
922 Event::new(EventKind::DidValidateInternedValue {
923 key: index,
924 revision: current_revision,
925 })
926 });
927
928 VerifyResult::unchanged()
930 }
931
932 fn collect_minimum_serialized_edges(
933 &self,
934 _zalsa: &Zalsa,
935 edge: QueryEdge,
936 serialized_edges: &mut FxIndexSet<QueryEdge>,
937 _visited_edges: &mut FxHashSet<QueryEdge>,
938 ) {
939 if C::PERSIST && C::REVISIONS != IMMORTAL {
940 serialized_edges.insert(edge);
946 }
947
948 }
950
951 fn flatten_cycle_head_dependencies(
952 &self,
953 _zalsa: &Zalsa,
954 id: Id,
955 flattened_input_outputs: &mut FxIndexSet<QueryEdge>,
956 _seen: &mut FxHashSet<DatabaseKeyIndex>,
957 ) {
958 flattened_input_outputs.insert(QueryEdge::input(self.database_key_index(id)));
959 }
960
961 fn debug_name(&self) -> &'static str {
962 C::DEBUG_NAME
963 }
964
965 fn jar_kind(&self) -> JarKind {
966 JarKind::Struct
967 }
968
969 fn memo_table_types(&self) -> &Arc<MemoTableTypes> {
970 &self.memo_table_types
971 }
972
973 fn memo_table_types_mut(&mut self) -> &mut Arc<MemoTableTypes> {
974 &mut self.memo_table_types
975 }
976
977 #[cfg(all(not(feature = "shuttle"), feature = "salsa_unstable"))]
979 fn memory_usage(&self, db: &dyn crate::Database) -> Option<Vec<crate::database::SlotInfo>> {
980 use parking_lot::lock_api::RawMutex;
981
982 for shard in self.shards.iter() {
983 unsafe { shard.raw().lock() };
985 }
986
987 let entries = unsafe { self.entries_inner(false, db.zalsa()) };
989
990 let memory_usage = entries
991 .map(|entry| unsafe { entry.value.memory_usage(&self.memo_table_types) })
994 .collect();
995
996 for shard in self.shards.iter() {
997 unsafe { shard.raw().unlock() };
999 }
1000
1001 Some(memory_usage)
1002 }
1003
1004 fn is_persistable(&self) -> bool {
1005 C::PERSIST
1006 }
1007
1008 fn should_serialize(&self, zalsa: &Zalsa) -> bool {
1009 C::PERSIST && self.entries(zalsa).next().is_some()
1010 }
1011
1012 #[cfg(feature = "persistence")]
1013 unsafe fn serialize<'db>(
1014 &'db self,
1015 zalsa: &'db Zalsa,
1016 f: &mut dyn FnMut(&dyn erased_serde::Serialize),
1017 ) {
1018 f(&persistence::SerializeIngredient {
1019 zalsa,
1020 ingredient: self,
1021 })
1022 }
1023
1024 #[cfg(feature = "persistence")]
1025 fn deserialize(
1026 &mut self,
1027 zalsa: &mut Zalsa,
1028 deserializer: &mut dyn erased_serde::Deserializer,
1029 ) -> Result<(), erased_serde::Error> {
1030 let deserialize = persistence::DeserializeIngredient {
1031 zalsa,
1032 ingredient: self,
1033 };
1034
1035 serde::de::DeserializeSeed::deserialize(deserialize, deserializer)
1036 }
1037}
1038
1039impl<C> std::fmt::Debug for IngredientImpl<C>
1040where
1041 C: Configuration,
1042{
1043 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1044 f.debug_struct(std::any::type_name::<Self>())
1045 .field("index", &self.ingredient_index)
1046 .finish()
1047 }
1048}
1049
1050unsafe impl<C> Slot for Value<C>
1052where
1053 C: Configuration,
1054{
1055 #[inline(always)]
1056 unsafe fn memos(
1057 this: *const Self,
1058 _current_revision: Revision,
1059 ) -> *const crate::table::memo::MemoTable {
1060 unsafe { (*this).memos.get() }
1064 }
1065
1066 #[inline(always)]
1067 fn memos_mut(&mut self) -> &mut crate::table::memo::MemoTable {
1068 self.memos.get_mut()
1069 }
1070}
1071
1072struct RevisionQueue<C> {
1078 lock: Mutex<()>,
1079 revisions: Box<[AtomicRevision]>,
1081 _configuration: PhantomData<fn() -> C>,
1082}
1083
1084const IMMORTAL: NonZeroUsize = NonZeroUsize::MAX;
1086
1087impl<C: Configuration> Default for RevisionQueue<C> {
1088 fn default() -> RevisionQueue<C> {
1089 let revisions = if C::REVISIONS == IMMORTAL {
1090 Box::default()
1091 } else {
1092 (0..C::REVISIONS.get())
1093 .map(|_| AtomicRevision::start())
1094 .collect()
1095 };
1096
1097 RevisionQueue {
1098 lock: Mutex::new(()),
1099 revisions,
1100 _configuration: PhantomData,
1101 }
1102 }
1103}
1104
1105impl<C: Configuration> RevisionQueue<C> {
1106 #[inline]
1108 fn record(&self, revision: Revision) {
1109 if C::REVISIONS == IMMORTAL {
1111 return;
1112 }
1113
1114 if self.revisions[0].load() >= revision {
1116 return;
1117 }
1118
1119 self.record_cold(revision);
1120 }
1121
1122 #[cold]
1123 fn record_cold(&self, revision: Revision) {
1124 let _lock = self.lock.lock();
1125
1126 for i in (1..C::REVISIONS.get()).rev() {
1130 self.revisions[i].store(self.revisions[i - 1].load());
1131 }
1132
1133 self.revisions[0].store(revision);
1134 }
1135
1136 #[inline]
1138 fn is_stale(&self, revision: Revision) -> bool {
1139 if C::REVISIONS == IMMORTAL {
1141 return false;
1142 }
1143
1144 let oldest = self.revisions[C::REVISIONS.get() - 1].load();
1145
1146 if oldest == Revision::start() {
1148 return false;
1149 }
1150
1151 revision < oldest
1152 }
1153
1154 #[inline]
1157 fn is_primed(&self) -> bool {
1158 if C::REVISIONS == IMMORTAL {
1160 return false;
1161 }
1162
1163 self.revisions[C::REVISIONS.get() - 1].load() > Revision::start()
1164 }
1165}
1166
1167pub trait HashEqLike<O> {
1169 fn hash<H: Hasher>(&self, h: &mut H);
1170 fn eq(&self, data: &O) -> bool;
1171}
1172
1173pub trait Lookup<O> {
1188 fn into_owned(self) -> O;
1189}
1190
1191impl<T> Lookup<T> for T {
1192 fn into_owned(self) -> T {
1193 self
1194 }
1195}
1196
1197impl<T> HashEqLike<T> for T
1198where
1199 T: Hash + Eq,
1200{
1201 fn hash<H: Hasher>(&self, h: &mut H) {
1202 Hash::hash(self, &mut *h);
1203 }
1204
1205 fn eq(&self, data: &T) -> bool {
1206 self == data
1207 }
1208}
1209
1210impl<T> HashEqLike<T> for &T
1211where
1212 T: Hash + Eq,
1213{
1214 fn hash<H: Hasher>(&self, h: &mut H) {
1215 Hash::hash(*self, &mut *h);
1216 }
1217
1218 fn eq(&self, data: &T) -> bool {
1219 **self == *data
1220 }
1221}
1222
1223impl<T> HashEqLike<&T> for T
1224where
1225 T: Hash + Eq,
1226{
1227 fn hash<H: Hasher>(&self, h: &mut H) {
1228 Hash::hash(self, &mut *h);
1229 }
1230
1231 fn eq(&self, data: &&T) -> bool {
1232 *self == **data
1233 }
1234}
1235
1236impl<T> Lookup<T> for &T
1237where
1238 T: Clone,
1239{
1240 fn into_owned(self) -> T {
1241 Clone::clone(self)
1242 }
1243}
1244
1245impl<'a, T> HashEqLike<&'a T> for Box<T>
1246where
1247 T: ?Sized + Hash + Eq,
1248 Box<T>: From<&'a T>,
1249{
1250 fn hash<H: Hasher>(&self, h: &mut H) {
1251 Hash::hash(self, &mut *h)
1252 }
1253 fn eq(&self, data: &&T) -> bool {
1254 **self == **data
1255 }
1256}
1257
1258impl<'a, T> Lookup<Box<T>> for &'a T
1259where
1260 T: ?Sized + Hash + Eq,
1261 Box<T>: From<&'a T>,
1262{
1263 fn into_owned(self) -> Box<T> {
1264 Box::from(self)
1265 }
1266}
1267
1268impl<'a, T> HashEqLike<&'a T> for Arc<T>
1269where
1270 T: ?Sized + Hash + Eq,
1271 Arc<T>: From<&'a T>,
1272{
1273 fn hash<H: Hasher>(&self, h: &mut H) {
1274 Hash::hash(&**self, &mut *h)
1275 }
1276 fn eq(&self, data: &&T) -> bool {
1277 **self == **data
1278 }
1279}
1280
1281impl<'a, T> Lookup<Arc<T>> for &'a T
1282where
1283 T: ?Sized + Hash + Eq,
1284 Arc<T>: From<&'a T>,
1285{
1286 fn into_owned(self) -> Arc<T> {
1287 Arc::from(self)
1288 }
1289}
1290
1291impl Lookup<String> for &str {
1292 fn into_owned(self) -> String {
1293 self.to_owned()
1294 }
1295}
1296
1297#[cfg(feature = "compact_str")]
1298impl Lookup<compact_str::CompactString> for &str {
1299 fn into_owned(self) -> compact_str::CompactString {
1300 compact_str::CompactString::new(self)
1301 }
1302}
1303
1304#[cfg(feature = "compact_str")]
1305impl HashEqLike<&str> for compact_str::CompactString {
1306 fn hash<H: Hasher>(&self, h: &mut H) {
1307 Hash::hash(self, &mut *h)
1308 }
1309
1310 fn eq(&self, data: &&str) -> bool {
1311 self == *data
1312 }
1313}
1314
1315#[cfg(feature = "compact_str")]
1316impl HashEqLike<Cow<'_, str>> for compact_str::CompactString {
1317 fn hash<H: Hasher>(&self, h: &mut H) {
1318 self.as_str().hash(h);
1319 }
1320
1321 fn eq(&self, data: &Cow<'_, str>) -> bool {
1322 self.as_str() == data.as_ref()
1323 }
1324}
1325
1326#[cfg(feature = "compact_str")]
1327impl Lookup<compact_str::CompactString> for Cow<'_, str> {
1328 fn into_owned(self) -> compact_str::CompactString {
1329 compact_str::CompactString::new(Cow::into_owned(self))
1330 }
1331}
1332
1333impl HashEqLike<&str> for String {
1334 fn hash<H: Hasher>(&self, h: &mut H) {
1335 Hash::hash(self, &mut *h)
1336 }
1337
1338 fn eq(&self, data: &&str) -> bool {
1339 self == *data
1340 }
1341}
1342
1343impl<A, T: Hash + Eq + PartialEq<A>> HashEqLike<&[A]> for Vec<T> {
1344 fn hash<H: Hasher>(&self, h: &mut H) {
1345 Hash::hash(self, h);
1346 }
1347
1348 fn eq(&self, data: &&[A]) -> bool {
1349 self.len() == data.len() && data.iter().enumerate().all(|(i, a)| &self[i] == a)
1350 }
1351}
1352
1353impl<A: Hash + Eq + PartialEq<T> + Clone + Lookup<T>, T> Lookup<Vec<T>> for &[A] {
1354 fn into_owned(self) -> Vec<T> {
1355 self.iter().map(|a| Lookup::into_owned(a.clone())).collect()
1356 }
1357}
1358
1359impl<const N: usize, A, T: Hash + Eq + PartialEq<A>> HashEqLike<[A; N]> for Vec<T> {
1360 fn hash<H: Hasher>(&self, h: &mut H) {
1361 Hash::hash(self, h);
1362 }
1363
1364 fn eq(&self, data: &[A; N]) -> bool {
1365 self.len() == data.len() && data.iter().enumerate().all(|(i, a)| &self[i] == a)
1366 }
1367}
1368
1369impl<const N: usize, A: Hash + Eq + PartialEq<T> + Clone + Lookup<T>, T> Lookup<Vec<T>> for [A; N] {
1370 fn into_owned(self) -> Vec<T> {
1371 self.into_iter()
1372 .map(|a| Lookup::into_owned(a.clone()))
1373 .collect()
1374 }
1375}
1376
1377impl HashEqLike<&Path> for PathBuf {
1378 fn hash<H: Hasher>(&self, h: &mut H) {
1379 Hash::hash(self, h);
1380 }
1381
1382 fn eq(&self, data: &&Path) -> bool {
1383 self == data
1384 }
1385}
1386
1387impl Lookup<PathBuf> for &Path {
1388 fn into_owned(self) -> PathBuf {
1389 self.to_owned()
1390 }
1391}
1392
1393impl<T: Hash + Eq + Clone> HashEqLike<Cow<'_, T>> for T {
1394 fn hash<H: Hasher>(&self, h: &mut H) {
1395 Hash::hash(self, h);
1396 }
1397
1398 fn eq(&self, data: &Cow<'_, T>) -> bool {
1399 self == data.as_ref()
1400 }
1401}
1402
1403impl<T: Clone> Lookup<T> for Cow<'_, T> {
1404 fn into_owned(self) -> T {
1405 Cow::into_owned(self)
1406 }
1407}
1408
1409impl HashEqLike<Cow<'_, str>> for String {
1410 fn hash<H: Hasher>(&self, h: &mut H) {
1411 self.as_str().hash(h);
1412 }
1413
1414 fn eq(&self, data: &Cow<'_, str>) -> bool {
1415 self.as_str() == data.as_ref()
1416 }
1417}
1418
1419impl Lookup<String> for Cow<'_, str> {
1420 fn into_owned(self) -> String {
1421 Cow::into_owned(self)
1422 }
1423}
1424
1425impl HashEqLike<Cow<'_, Path>> for PathBuf {
1426 fn hash<H: Hasher>(&self, h: &mut H) {
1427 self.as_path().hash(h);
1428 }
1429
1430 fn eq(&self, data: &Cow<'_, Path>) -> bool {
1431 self.as_path() == data.as_ref()
1432 }
1433}
1434
1435impl Lookup<PathBuf> for Cow<'_, Path> {
1436 fn into_owned(self) -> PathBuf {
1437 Cow::into_owned(self)
1438 }
1439}
1440
1441impl<T: Hash + Eq + Clone> HashEqLike<Cow<'_, [T]>> for Box<[T]> {
1442 fn hash<H: Hasher>(&self, h: &mut H) {
1443 self.as_ref().hash(h);
1444 }
1445
1446 fn eq(&self, data: &Cow<'_, [T]>) -> bool {
1447 self.as_ref() == data.as_ref()
1448 }
1449}
1450
1451impl<T: Clone> Lookup<Box<[T]>> for Cow<'_, [T]> {
1452 fn into_owned(self) -> Box<[T]> {
1453 Cow::into_owned(self).into_boxed_slice()
1454 }
1455}
1456
1457impl<T: Hash + Eq + Clone> HashEqLike<Cow<'_, [T]>> for Vec<T> {
1458 fn hash<H: Hasher>(&self, h: &mut H) {
1459 self.as_slice().hash(h);
1460 }
1461
1462 fn eq(&self, data: &Cow<'_, [T]>) -> bool {
1463 self.as_slice() == data.as_ref()
1464 }
1465}
1466
1467impl<T: Clone> Lookup<Vec<T>> for Cow<'_, [T]> {
1468 fn into_owned(self) -> Vec<T> {
1469 Cow::into_owned(self)
1470 }
1471}
1472
1473#[cfg(feature = "persistence")]
1474mod persistence {
1475 use std::cell::UnsafeCell;
1476 use std::fmt;
1477 use std::hash::BuildHasher;
1478
1479 use intrusive_collections::LinkedListLink;
1480 use serde::ser::{SerializeMap, SerializeStruct};
1481 use serde::{de, Deserialize};
1482
1483 use super::{Configuration, IngredientImpl, Value, ValueShared};
1484 use crate::plumbing::Ingredient;
1485 use crate::table::memo::MemoTable;
1486 use crate::zalsa::Zalsa;
1487 use crate::{Durability, Id, Revision};
1488
1489 pub struct SerializeIngredient<'db, C>
1490 where
1491 C: Configuration,
1492 {
1493 pub zalsa: &'db Zalsa,
1494 pub ingredient: &'db IngredientImpl<C>,
1495 }
1496
1497 impl<C> serde::Serialize for SerializeIngredient<'_, C>
1498 where
1499 C: Configuration,
1500 {
1501 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1502 where
1503 S: serde::Serializer,
1504 {
1505 let Self { zalsa, ingredient } = *self;
1506
1507 let count = ingredient
1508 .shards
1509 .iter()
1510 .map(|shard| shard.lock().key_map.len())
1511 .sum();
1512
1513 let mut map = serializer.serialize_map(Some(count))?;
1514
1515 for (_, value) in zalsa.table().slots_of::<Value<C>>() {
1516 let id = unsafe { (*value.shared.get()).id };
1519
1520 map.serialize_entry(&id.as_bits(), value)?;
1521 }
1522
1523 map.end()
1524 }
1525 }
1526
1527 impl<C> serde::Serialize for Value<C>
1528 where
1529 C: Configuration,
1530 {
1531 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1532 where
1533 S: serde::Serializer,
1534 {
1535 let mut value = serializer.serialize_struct("Value,", 3)?;
1536
1537 let Value {
1538 fields,
1539 shared,
1540 shard: _,
1541 link: _,
1542 memos: _,
1543 } = self;
1544
1545 let fields = unsafe { &*fields.get() };
1548
1549 let ValueShared {
1552 durability,
1553 last_interned_at,
1554 id: _,
1555 } = unsafe { *shared.get() };
1556
1557 value.serialize_field("durability", &durability)?;
1558 value.serialize_field("last_interned_at", &last_interned_at)?;
1559 value.serialize_field("fields", &SerializeFields::<C>(fields))?;
1560
1561 value.end()
1562 }
1563 }
1564
1565 struct SerializeFields<'db, C: Configuration>(&'db C::Fields<'static>);
1566
1567 impl<C> serde::Serialize for SerializeFields<'_, C>
1568 where
1569 C: Configuration,
1570 {
1571 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1572 where
1573 S: serde::Serializer,
1574 {
1575 C::serialize(self.0, serializer)
1576 }
1577 }
1578
1579 pub struct DeserializeIngredient<'db, C>
1580 where
1581 C: Configuration,
1582 {
1583 pub zalsa: &'db mut Zalsa,
1584 pub ingredient: &'db mut IngredientImpl<C>,
1585 }
1586
1587 impl<'de, C> de::DeserializeSeed<'de> for DeserializeIngredient<'_, C>
1588 where
1589 C: Configuration,
1590 {
1591 type Value = ();
1592
1593 fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
1594 where
1595 D: serde::Deserializer<'de>,
1596 {
1597 deserializer.deserialize_map(self)
1598 }
1599 }
1600
1601 impl<'de, C> de::Visitor<'de> for DeserializeIngredient<'_, C>
1602 where
1603 C: Configuration,
1604 {
1605 type Value = ();
1606
1607 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
1608 formatter.write_str("a map")
1609 }
1610
1611 fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
1612 where
1613 M: de::MapAccess<'de>,
1614 {
1615 let DeserializeIngredient { zalsa, ingredient } = self;
1616
1617 while let Some((id, value)) = access.next_entry::<u64, DeserializeValue<C>>()? {
1618 let id = Id::from_bits(id);
1619 let (page_idx, _) = crate::table::split_id(id);
1620
1621 let hash = ingredient.hasher.hash_one(&value.fields.0);
1623 let shard_index = ingredient.shard(hash);
1624
1625 let shard = unsafe { &mut *ingredient.shards.get_unchecked(shard_index).lock() };
1627
1628 let value = Value::<C> {
1629 shard: shard_index as u16,
1630 link: LinkedListLink::new(),
1631 memos: UnsafeCell::new(unsafe {
1634 MemoTable::new(ingredient.memo_table_types())
1635 }),
1636 fields: UnsafeCell::new(value.fields.0),
1637 shared: UnsafeCell::new(ValueShared {
1638 id,
1639 durability: value.durability,
1640 last_interned_at: value.last_interned_at,
1641 }),
1642 };
1643
1644 zalsa.table_mut().force_page::<Value<C>>(
1646 page_idx,
1647 ingredient.ingredient_index(),
1648 ingredient.memo_table_types(),
1649 );
1650
1651 let (allocated_id, value) = unsafe {
1655 zalsa
1656 .table()
1657 .page(page_idx)
1658 .allocate(page_idx, |_| value)
1659 .unwrap_or_else(|_| panic!("serialized an invalid `Id`: {id:?}"))
1660 };
1661
1662 assert_eq!(
1663 allocated_id.index(),
1664 id.index(),
1665 "values are serialized in allocation order"
1666 );
1667
1668 ingredient.insert_id(id, zalsa, shard, hash, value);
1670 }
1671
1672 Ok(())
1673 }
1674 }
1675
1676 #[derive(Deserialize)]
1677 #[serde(rename = "Value")]
1678 pub struct DeserializeValue<C: Configuration> {
1679 durability: Durability,
1680 last_interned_at: Revision,
1681 #[serde(bound = "C: Configuration")]
1682 fields: DeserializeFields<C>,
1683 }
1684
1685 struct DeserializeFields<C: Configuration>(C::Fields<'static>);
1686
1687 impl<'de, C> serde::Deserialize<'de> for DeserializeFields<C>
1688 where
1689 C: Configuration,
1690 {
1691 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1692 where
1693 D: serde::Deserializer<'de>,
1694 {
1695 C::deserialize(deserializer)
1696 .map(DeserializeFields)
1697 .map_err(de::Error::custom)
1698 }
1699 }
1700}