use serde::{Deserialize, Serialize};
use crate::error::{BodhError, Result, validate_finite, validate_positive};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChunkHistory {
pub presentation_ages: Vec<f64>,
}
#[must_use = "returns the base-level activation without side effects"]
pub fn base_level_activation(history: &ChunkHistory, decay: f64) -> Result<f64> {
if history.presentation_ages.is_empty() {
return Err(BodhError::InvalidParameter(
"presentation_ages must not be empty".into(),
));
}
validate_positive(decay, "decay")?;
let mut sum = 0.0;
for (i, &t) in history.presentation_ages.iter().enumerate() {
if !t.is_finite() || t <= 0.0 {
return Err(BodhError::InvalidParameter(format!(
"presentation_ages[{i}] must be positive, got {t}"
)));
}
sum += t.powf(-decay);
}
Ok(sum.ln())
}
#[must_use = "returns the total activation without side effects"]
pub fn spreading_activation(base_level: f64, associations: &[(f64, f64)]) -> Result<f64> {
validate_finite(base_level, "base_level")?;
let mut spread = 0.0;
for (i, &(weight, strength)) in associations.iter().enumerate() {
if !weight.is_finite() {
return Err(BodhError::InvalidParameter(format!(
"weight[{i}] must be finite, got {weight}"
)));
}
if !strength.is_finite() {
return Err(BodhError::InvalidParameter(format!(
"strength[{i}] must be finite, got {strength}"
)));
}
spread += weight * strength;
}
Ok(base_level + spread)
}
#[inline]
#[must_use = "returns the retrieval probability without side effects"]
pub fn retrieval_probability(activation: f64, threshold: f64, noise: f64) -> Result<f64> {
validate_finite(activation, "activation")?;
validate_finite(threshold, "threshold")?;
validate_positive(noise, "noise")?;
let exponent = (threshold - activation) / noise;
Ok(1.0 / (1.0 + exponent.exp()))
}
#[inline]
#[must_use = "returns the retrieval time in seconds without side effects"]
pub fn retrieval_latency(
activation: f64,
latency_factor: f64,
latency_exponent: f64,
) -> Result<f64> {
validate_finite(activation, "activation")?;
validate_positive(latency_factor, "latency_factor")?;
validate_positive(latency_exponent, "latency_exponent")?;
Ok(latency_factor * (-latency_exponent * activation).exp())
}
#[must_use = "returns the partial matching penalty without side effects"]
pub fn partial_matching(slot_penalties: &[(f64, f64)]) -> Result<f64> {
let mut total = 0.0;
for (i, &(penalty, similarity)) in slot_penalties.iter().enumerate() {
if !penalty.is_finite() {
return Err(BodhError::InvalidParameter(format!(
"penalty[{i}] must be finite, got {penalty}"
)));
}
if !similarity.is_finite() || !(-1.0..=0.0).contains(&similarity) {
return Err(BodhError::InvalidParameter(format!(
"similarity[{i}] must be in [-1, 0], got {similarity}"
)));
}
total += penalty * similarity;
}
Ok(total)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[non_exhaustive]
pub enum ProcessingLevel {
Structural,
Phonological,
Semantic,
}
impl ProcessingLevel {
#[inline]
#[must_use]
pub fn encoding_strength(self) -> f64 {
match self {
Self::Structural => 0.3,
Self::Phonological => 0.5,
Self::Semantic => 1.0,
}
}
}
#[inline]
#[must_use = "returns the encoding strength without side effects"]
pub fn encoding_strength(
level: ProcessingLevel,
elaboration: f64,
distinctiveness: f64,
) -> Result<f64> {
validate_finite(elaboration, "elaboration")?;
validate_finite(distinctiveness, "distinctiveness")?;
let elab = elaboration.max(0.0);
let dist = distinctiveness.clamp(0.0, 1.0);
Ok(level.encoding_strength() * (1.0 + elab) * dist)
}
#[inline]
#[must_use = "returns the boosted retention without side effects"]
pub fn generation_effect(
base_retention: f64,
generated: bool,
generation_weight: f64,
) -> Result<f64> {
validate_finite(base_retention, "base_retention")?;
validate_finite(generation_weight, "generation_weight")?;
let boost = if generated { generation_weight } else { 0.0 };
Ok((base_retention * (1.0 + boost)).clamp(0.0, 1.0))
}
#[inline]
#[must_use = "returns the updated strength without side effects"]
pub fn testing_effect(
old_strength: f64,
retrieval_bonus: f64,
success: bool,
difficulty: f64,
) -> Result<f64> {
validate_finite(old_strength, "old_strength")?;
validate_finite(retrieval_bonus, "retrieval_bonus")?;
validate_finite(difficulty, "difficulty")?;
let diff = difficulty.clamp(0.0, 1.0);
let bonus = if success { retrieval_bonus * diff } else { 0.0 };
Ok(old_strength + bonus)
}
#[inline]
#[must_use = "returns the recall probability without side effects"]
pub fn encoding_specificity(
base_probability: f64,
context_match: f64,
specificity: f64,
) -> Result<f64> {
validate_finite(base_probability, "base_probability")?;
validate_finite(context_match, "context_match")?;
validate_positive(specificity, "specificity")?;
let cm = context_match.clamp(0.0, 1.0);
Ok((base_probability * cm.powf(specificity)).clamp(0.0, 1.0))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_base_level_single_recent() {
let h = ChunkHistory {
presentation_ages: vec![1.0],
};
let b = base_level_activation(&h, 0.5).unwrap();
assert!(b.abs() < 1e-10);
}
#[test]
fn test_base_level_multiple_presentations() {
let h1 = ChunkHistory {
presentation_ages: vec![1.0],
};
let h5 = ChunkHistory {
presentation_ages: vec![1.0, 2.0, 3.0, 4.0, 5.0],
};
let b1 = base_level_activation(&h1, 0.5).unwrap();
let b5 = base_level_activation(&h5, 0.5).unwrap();
assert!(b5 > b1);
}
#[test]
fn test_base_level_recency() {
let recent = ChunkHistory {
presentation_ages: vec![1.0],
};
let old = ChunkHistory {
presentation_ages: vec![100.0],
};
let b_recent = base_level_activation(&recent, 0.5).unwrap();
let b_old = base_level_activation(&old, 0.5).unwrap();
assert!(b_recent > b_old);
}
#[test]
fn test_base_level_empty() {
let h = ChunkHistory {
presentation_ages: vec![],
};
assert!(base_level_activation(&h, 0.5).is_err());
}
#[test]
fn test_base_level_invalid_age() {
let h = ChunkHistory {
presentation_ages: vec![1.0, -1.0],
};
assert!(base_level_activation(&h, 0.5).is_err());
}
#[test]
fn test_base_level_known_value() {
let h = ChunkHistory {
presentation_ages: vec![4.0],
};
let b = base_level_activation(&h, 0.5).unwrap();
assert!((b - 0.5_f64.ln()).abs() < 1e-10);
}
#[test]
fn test_spreading_no_associations() {
let total = spreading_activation(1.0, &[]).unwrap();
assert!((total - 1.0).abs() < 1e-10);
}
#[test]
fn test_spreading_adds_to_base() {
let assocs = vec![(1.0, 0.5), (0.5, 0.3)];
let total = spreading_activation(1.0, &assocs).unwrap();
assert!((total - 1.65).abs() < 1e-10);
}
#[test]
fn test_retrieval_prob_above_threshold() {
let p = retrieval_probability(2.0, 0.0, 0.4).unwrap();
assert!(p > 0.99);
}
#[test]
fn test_retrieval_prob_below_threshold() {
let p = retrieval_probability(-2.0, 0.0, 0.4).unwrap();
assert!(p < 0.01);
}
#[test]
fn test_retrieval_prob_at_threshold() {
let p = retrieval_probability(1.0, 1.0, 0.4).unwrap();
assert!((p - 0.5).abs() < 1e-10);
}
#[test]
fn test_retrieval_latency_higher_activation_faster() {
let t_high = retrieval_latency(2.0, 1.0, 1.0).unwrap();
let t_low = retrieval_latency(0.0, 1.0, 1.0).unwrap();
assert!(t_high < t_low);
}
#[test]
fn test_retrieval_latency_known_value() {
let t = retrieval_latency(0.0, 1.0, 1.0).unwrap();
assert!((t - 1.0).abs() < 1e-10);
}
#[test]
fn test_retrieval_latency_reference() {
let t = retrieval_latency(1.0, 1.0, 1.0).unwrap();
assert!((t - (-1.0_f64).exp()).abs() < 1e-10);
}
#[test]
fn test_partial_matching_perfect() {
let slots = vec![(1.5, 0.0), (1.5, 0.0)];
let mp = partial_matching(&slots).unwrap();
assert!(mp.abs() < 1e-10); }
#[test]
fn test_partial_matching_mismatch() {
let slots = vec![(1.5, -0.5)];
let mp = partial_matching(&slots).unwrap();
assert!((mp - (-0.75)).abs() < 1e-10);
}
#[test]
fn test_partial_matching_invalid_similarity() {
assert!(partial_matching(&[(1.0, 0.5)]).is_err()); assert!(partial_matching(&[(1.0, -1.5)]).is_err()); }
#[test]
fn test_processing_level_ordering() {
assert!(ProcessingLevel::Semantic > ProcessingLevel::Phonological);
assert!(ProcessingLevel::Phonological > ProcessingLevel::Structural);
}
#[test]
fn test_encoding_strength_deeper_better() {
let shallow = encoding_strength(ProcessingLevel::Structural, 0.0, 1.0).unwrap();
let deep = encoding_strength(ProcessingLevel::Semantic, 0.0, 1.0).unwrap();
assert!(deep > shallow);
}
#[test]
fn test_encoding_strength_elaboration_boosts() {
let plain = encoding_strength(ProcessingLevel::Semantic, 0.0, 1.0).unwrap();
let elab = encoding_strength(ProcessingLevel::Semantic, 2.0, 1.0).unwrap();
assert!(elab > plain);
}
#[test]
fn test_generation_effect_boost() {
let read = generation_effect(0.5, false, 0.2).unwrap();
let generated = generation_effect(0.5, true, 0.2).unwrap();
assert!((read - 0.5).abs() < 1e-10);
assert!(generated > read);
}
#[test]
fn test_testing_effect_success() {
let before = 0.5;
let after = testing_effect(before, 0.3, true, 0.8).unwrap();
assert!(after > before);
}
#[test]
fn test_testing_effect_failure() {
let before = 0.5;
let after = testing_effect(before, 0.3, false, 0.8).unwrap();
assert!((after - before).abs() < 1e-10);
}
#[test]
fn test_testing_effect_difficulty_modulates() {
let easy = testing_effect(0.5, 0.3, true, 0.2).unwrap();
let hard = testing_effect(0.5, 0.3, true, 0.9).unwrap();
assert!(hard > easy); }
#[test]
fn test_encoding_specificity_perfect_match() {
let p = encoding_specificity(0.8, 1.0, 2.0).unwrap();
assert!((p - 0.8).abs() < 1e-10);
}
#[test]
fn test_encoding_specificity_mismatch_drops() {
let matched = encoding_specificity(0.8, 1.0, 2.0).unwrap();
let mismatched = encoding_specificity(0.8, 0.5, 2.0).unwrap();
assert!(matched > mismatched);
}
#[test]
fn test_processing_level_serde_roundtrip() {
let level = ProcessingLevel::Semantic;
let json = serde_json::to_string(&level).unwrap();
let back: ProcessingLevel = serde_json::from_str(&json).unwrap();
assert_eq!(level, back);
}
#[test]
fn test_chunk_history_serde_roundtrip() {
let h = ChunkHistory {
presentation_ages: vec![1.0, 5.0, 10.0],
};
let json = serde_json::to_string(&h).unwrap();
let back: ChunkHistory = serde_json::from_str(&json).unwrap();
assert_eq!(h.presentation_ages.len(), back.presentation_ages.len());
}
}