lucid_core/lib.rs
1//! # Lucid Core
2//!
3//! High-performance memory retrieval engine implementing ACT-R spreading activation
4//! and MINERVA 2's reconstructive memory model.
5//!
6//! ## Why Reconstructive Memory?
7//!
8//! Most AI memory systems treat memory as storage and retrieval—like a database.
9//! But human memory doesn't work that way. Human memory is *reconstructive*:
10//!
11//! - **Memories evolve over time** - They aren't static records
12//! - **Context shapes retrieval** - What surfaces depends on your current state
13//! - **Associations matter** - Memories activate related memories
14//! - **Details fade, essence persists** - Verbatim decays faster than gist
15//!
16//! This library implements the computational mechanisms that make this possible.
17//!
18//! ## Core Concepts
19//!
20//! ### Activation
21//!
22//! Every memory has an activation level that determines how likely it is to be
23//! retrieved. Activation comes from three sources:
24//!
25//! 1. **Base-level activation** - Recency and frequency of access
26//! ```text
27//! B(m) = ln[Σ(t_k)^(-d)]
28//! ```
29//!
30//! 2. **Probe-trace similarity** - How well the current context matches
31//! ```text
32//! A(i) = S(i)³ (MINERVA 2's nonlinear function)
33//! ```
34//!
35//! 3. **Spreading activation** - Activation from associated memories
36//! ```text
37//! A_j = Σ(W_i/n_i) × S_ij
38//! ```
39//!
40//! ### Retrieval
41//!
42//! The retrieval pipeline combines these into a ranked set of candidates:
43//!
44//! 1. Compute semantic similarity between probe and all memories
45//! 2. Apply nonlinear activation (emphasizes strong matches)
46//! 3. Compute base-level from access history
47//! 4. Spread activation through the association graph
48//! 5. Combine, rank, and filter by retrieval probability
49//!
50//! ## Example
51//!
52//! ```rust
53//! use lucid_core::{
54//! retrieval::{retrieve, RetrievalConfig, RetrievalInput},
55//! spreading::Association,
56//! };
57//!
58//! // Your memory embeddings (from any embedding model)
59//! let memories = vec![
60//! vec![1.0, 0.0, 0.0],
61//! vec![0.5, 0.5, 0.0],
62//! vec![0.0, 1.0, 0.0],
63//! ];
64//!
65//! // Probe embedding (what you're looking for)
66//! let probe = vec![0.9, 0.1, 0.0];
67//!
68//! let input = RetrievalInput {
69//! probe_embedding: &probe,
70//! memory_embeddings: &memories,
71//! access_histories_ms: &[vec![1000.0], vec![500.0], vec![100.0]],
72//! emotional_weights: &[0.5, 0.5, 0.5],
73//! decay_rates: &[0.5, 0.5, 0.5],
74//! working_memory_boosts: &[1.0, 1.0, 1.0], // 1.0 = no boost, up to 2.0
75//! associations: &[], // Optional: links between memories
76//! current_time_ms: 2000.0,
77//! };
78//!
79//! let config = RetrievalConfig::default();
80//! let results = retrieve(&input, &config);
81//!
82//! // Results are ranked by total activation
83//! for candidate in results {
84//! println!(
85//! "Memory {} - activation: {:.3}, probability: {:.3}",
86//! candidate.index,
87//! candidate.total_activation,
88//! candidate.probability
89//! );
90//! }
91//! ```
92//!
93//! ## Performance
94//!
95//! This library is designed for speed because memory should feel like remembering—
96//! not like a database query.
97//!
98//! - Pure Rust implementation
99//! - No heap allocations in hot paths where possible
100//! - Batch operations for embedding comparisons
101//! - Pre-computed norms for similarity calculations
102//!
103//! ## References
104//!
105//! - Anderson, J. R. (1983). *The Architecture of Cognition* - ACT-R theory
106//! - Hintzman, D. L. (1988). *MINERVA 2: A simulation model of human memory* -
107//! Reconstructive retrieval
108//! - Kahana, M. J. (2012). *Foundations of Human Memory* - Memory models
109
110#![warn(missing_docs)]
111#![warn(clippy::all)]
112#![allow(clippy::needless_return)]
113
114pub mod activation;
115#[cfg(feature = "embedding")]
116pub mod embedding;
117pub mod location;
118pub mod retrieval;
119pub mod spreading;
120pub mod visual;
121
122pub use activation::{
123 combine_activations,
124 // Association Decay
125 compute_association_decay,
126 compute_base_level,
127 // Reconsolidation
128 compute_effective_thresholds,
129 // Instance Noise
130 compute_encoding_strength,
131 compute_instance_noise,
132 // Session Decay
133 compute_session_decay_rate,
134 compute_session_decay_rate_batch,
135 // Working Memory
136 compute_working_memory_boost,
137 compute_working_memory_boost_batch,
138 cosine_similarity,
139 get_decay_tau,
140 nonlinear_activation,
141 pe_zone,
142 reconsolidation_probability,
143 reinforce_association,
144 retrieval_probability,
145 should_prune_association,
146 ActivationBreakdown,
147 ActivationConfig,
148 AssociationDecayConfig,
149 AssociationState,
150 InstanceNoiseConfig,
151 ReconsolidationConfig,
152 WorkingMemoryConfig,
153 BETA_RECON,
154 THETA_HIGH,
155 THETA_LOW,
156};
157pub use retrieval::{retrieve, RetrievalCandidate, RetrievalConfig, RetrievalInput};
158pub use spreading::{
159 // Temporal Spreading (Episodic Memory)
160 compute_temporal_link_strength,
161 create_episode_links,
162 find_temporal_neighbors,
163 spread_activation,
164 spread_temporal_activation,
165 spread_temporal_activation_multi,
166 Association,
167 SpreadingConfig,
168 SpreadingResult,
169 TemporalLink,
170 TemporalSpreadingConfig,
171 TemporalSpreadingResult,
172};
173
174// Location Intuitions (spatial memory)
175pub use location::{
176 compute_association_strength, compute_batch_decay, compute_decayed_familiarity,
177 compute_familiarity, get_associated_locations, infer_activity_type, initial_familiarity,
178 is_well_known, spread_location_activation, ActivityInference, ActivityType, InferenceSource,
179 LocationAssociation, LocationConfig, LocationIntuition,
180};
181
182// Visual Memory
183pub use visual::{
184 compute_pruning_candidates, compute_tag_strength, prepare_frame_description_prompt,
185 prepare_synthesis_prompt, retrieve_visual, select_frames_for_description, should_prune,
186 should_tag, ConsolidationState, ConsolidationWindow, EmotionalContext, FrameCandidate,
187 FrameDescriptionConfig, FrameDescriptionResult, PruningCandidate, PruningReason, TagReason,
188 TranscriptSegment, VisualConfig, VisualConsolidationState, VisualMemory,
189 VisualRetrievalCandidate, VisualRetrievalConfig, VisualRetrievalInput, VisualSource, VisualTag,
190};
191
192/// Library version
193pub const VERSION: &str = env!("CARGO_PKG_VERSION");
194
195/// Initialize the library (placeholder for future setup).
196#[must_use]
197pub const fn init() -> &'static str {
198 "lucid-core initialized"
199}
200
201#[cfg(test)]
202mod tests {
203 use super::*;
204
205 #[test]
206 fn test_basic_retrieval() {
207 let memories = vec![
208 vec![1.0, 0.0, 0.0],
209 vec![0.0, 1.0, 0.0],
210 vec![0.0, 0.0, 1.0],
211 ];
212 let probe = vec![1.0, 0.0, 0.0];
213 let now = 10000.0;
214
215 let input = RetrievalInput {
216 probe_embedding: &probe,
217 memory_embeddings: &memories,
218 access_histories_ms: &[vec![now - 1000.0], vec![now - 2000.0], vec![now - 3000.0]],
219 emotional_weights: &[0.5, 0.5, 0.5],
220 decay_rates: &[0.5, 0.5, 0.5],
221 working_memory_boosts: &[1.0, 1.0, 1.0],
222 associations: &[],
223 current_time_ms: now,
224 };
225
226 let config = RetrievalConfig {
227 min_probability: 0.0,
228 ..Default::default()
229 };
230
231 let results = retrieve(&input, &config);
232
233 // First result should match the probe
234 assert!(!results.is_empty());
235 assert_eq!(results[0].index, 0);
236 assert!(results[0].probe_activation > results[1].probe_activation);
237 }
238}