pub struct LearningEngine { /* private fields */ }Expand description
The HELM orchestration hub that bundles all six learning sub-modules.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
let mut engine = LearningEngine::new(LearningConfig::default());
assert!(engine.q_table().is_empty());
assert_eq!(engine.episode_count(), 0);Implementations§
Source§impl LearningEngine
impl LearningEngine
Sourcepub fn new(config: LearningConfig) -> Self
pub fn new(config: LearningConfig) -> Self
Constructs a new LearningEngine from a LearningConfig.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
let engine = LearningEngine::new(LearningConfig::default());
assert_eq!(engine.episode_count(), 0);Sourcepub fn config(&self) -> &LearningConfig
pub fn config(&self) -> &LearningConfig
Returns a reference to the current configuration.
Sourcepub fn reset_epsilon(&mut self, epsilon: f64)
pub fn reset_epsilon(&mut self, epsilon: f64)
Resets the Q-table exploration epsilon.
Sourcepub fn episode_count(&self) -> usize
pub fn episode_count(&self) -> usize
Returns the total number of completed episodes.
Sourcepub fn total_reward(&self) -> f64
pub fn total_reward(&self) -> f64
Returns the accumulated total reward.
Sourcepub fn record_step(
&mut self,
signal: &CognitionSignal,
state: u64,
action: ActionKey,
next_state: u64,
)
pub fn record_step( &mut self, signal: &CognitionSignal, state: u64, action: ActionKey, next_state: u64, )
Records one ThinkLoop step into all active learning sub-modules.
This should be called inside ThinkLoop::run after each step produces
a CognitionSignal.
§Arguments
signal- The signal produced by this ThinkLoop iteration.state- FNV-1a state key for the current query.action- The action applied to producesignal.query.next_state- FNV-1a state key for the observation after the action.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
use lmm_agent::cognition::learning::q_table::{ActionKey, QTable};
use lmm_agent::cognition::signal::CognitionSignal;
let mut engine = LearningEngine::new(LearningConfig::default());
let sig = CognitionSignal::new(0, "a b".into(), "a b c".into(), 1.0, 0.0);
let s = QTable::state_key("a b");
let s2 = QTable::state_key("a b c");
engine.record_step(&sig, s, ActionKey::Narrow, s2);
assert!(!engine.q_table().is_empty());Sourcepub fn recommend_action(
&mut self,
state: u64,
goal: &str,
step: usize,
) -> ActionKey
pub fn recommend_action( &mut self, state: u64, goal: &str, step: usize, ) -> ActionKey
Selects the recommended action for state using the Q-table and meta priors.
If a meta-adapter warm-start is available, it is added as a prior bias to the greedy Q selection before applying ε-greedy exploration.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
use lmm_agent::cognition::learning::q_table::{ActionKey, QTable};
let mut engine = LearningEngine::new(LearningConfig::default());
let s = QTable::state_key("hello world");
let action = engine.recommend_action(s, "hello world", 0);
assert!(ActionKey::all().contains(&action));Sourcepub fn end_of_episode(
&mut self,
cold: &ColdStore,
index: &mut KnowledgeIndex,
goal: &str,
avg_reward: f64,
)
pub fn end_of_episode( &mut self, cold: &ColdStore, index: &mut KnowledgeIndex, goal: &str, avg_reward: f64, )
Finalises a completed episode: distils knowledge, stores meta-prototype, synthesises PMI facts, and decays ε.
§Arguments
cold- The agent’s cold store afterdrain_to_cold.index- The agent’s knowledge index for distillation output.goal- The natural-language goal that was pursued.avg_reward- Mean reward across all steps in this episode.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
use lmm_agent::cognition::memory::ColdStore;
use lmm_agent::cognition::knowledge::KnowledgeIndex;
let mut engine = LearningEngine::new(LearningConfig::default());
let cold = ColdStore::default();
let mut idx = KnowledgeIndex::new();
engine.end_of_episode(&cold, &mut idx, "test goal", 0.5);
assert_eq!(engine.episode_count(), 1);Sourcepub fn federate(&mut self, snapshot: &AgentSnapshot)
pub fn federate(&mut self, snapshot: &AgentSnapshot)
Merges a remote AgentSnapshot into the local Q-table (federated step).
Only active when LearningMode::Federated is enabled.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
use lmm_agent::cognition::learning::q_table::{ActionKey, QTable};
use lmm_agent::types::AgentSnapshot;
let mut engine = LearningEngine::new(LearningConfig::default());
let remote_qt = QTable::new(0.1, 0.9, 0.0, 1.0, 0.0);
let snap = AgentSnapshot { agent_id: "remote".into(), q_table: remote_qt, total_reward: 1.0 };
engine.federate(&snap);
assert_eq!(engine.aggregator().merge_count, 1);Sourcepub fn export_snapshot(&self, agent_id: impl Into<String>) -> AgentSnapshot
pub fn export_snapshot(&self, agent_id: impl Into<String>) -> AgentSnapshot
Exports an AgentSnapshot for federated sharing with other agents.
§Examples
use lmm_agent::cognition::learning::engine::LearningEngine;
use lmm_agent::cognition::learning::config::LearningConfig;
let engine = LearningEngine::new(LearningConfig::default());
let snap = engine.export_snapshot("agent-1");
assert_eq!(snap.agent_id, "agent-1");Sourcepub fn aggregator(&self) -> &FederatedAggregator
pub fn aggregator(&self) -> &FederatedAggregator
Returns a reference to the federated aggregator.
Sourcepub fn elastic(&self) -> &ElasticMemoryGuard
pub fn elastic(&self) -> &ElasticMemoryGuard
Returns a reference to the elastic memory guard.
Sourcepub fn informal(&self) -> &InformalLearner
pub fn informal(&self) -> &InformalLearner
Returns a reference to the informal learner.
Sourcepub fn meta(&self) -> &MetaAdapter
pub fn meta(&self) -> &MetaAdapter
Returns a reference to the meta-adapter.
Sourcepub fn distiller(&self) -> &KnowledgeDistiller
pub fn distiller(&self) -> &KnowledgeDistiller
Returns a reference to the knowledge distiller.
Trait Implementations§
Source§impl Clone for LearningEngine
impl Clone for LearningEngine
Source§fn clone(&self) -> LearningEngine
fn clone(&self) -> LearningEngine
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read more