use std::env;
use noos::regulator::correction::MIN_CORRECTIONS_FOR_PATTERN as MIN_FOR_PATTERN;
use noos::{Decision, LLMEvent, Regulator, RegulatorState};
#[path = "regulator_common/mod.rs"]
mod regulator_common;
use regulator_common::{call_anthropic, call_ollama};
const USER_ID: &str = "user_123";
const USER_MESSAGES: &[&str] = &[
"Make my auth module async",
"Refactor auth to support async",
"Change my auth function to async",
"Add async handling to my auth",
];
const CANNED_RESPONSES: &[&str] = &[
"async fn auth() { /* ... */ }\n/// This function authenticates requests.",
"pub async fn auth() -> Result<User, AuthError> { /* ... */ }\n/// Authenticates the incoming user.",
"async fn auth_validate() { /* ... */ }\n/// Params: token (&str). Returns Session.",
];
const CORRECTIONS: &[&str] = &[
"Don't add docstrings to the refactor output please",
"Skip the doc comments — just show the code",
"No new docstrings. I want minimal diffs this time",
];
const PER_TURN_TOKENS_IN: u32 = 25;
const PER_TURN_TOKENS_OUT: u32 = 120;
const PER_TURN_WALLCLOCK_MS: u32 = 900;
fn main() {
let mode = env::args().nth(1).unwrap_or_else(|| "canned".into());
println!("╔════════════════════════════════════════════════════════════════╗");
println!("║ Regulator demo 3 — Procedural correction memory ║");
println!("║ Session 23 / Path 2 flagship (clearest Mem0/Letta contrast) ║");
println!("╚════════════════════════════════════════════════════════════════╝\n");
println!("Mode: {mode}");
println!("User: {USER_ID}\n");
println!("══════ Phase 1: Learning ({} corrections incoming) ══════\n", CORRECTIONS.len());
let mut regulator = Regulator::for_user(USER_ID);
for i in 0..CORRECTIONS.len() {
let turn_num = i + 1;
let user_msg = USER_MESSAGES[i];
let correction = CORRECTIONS[i];
let (response, tokens_in, tokens_out, wallclock_ms, provider_tag) =
match mode.as_str() {
"canned" => canned_turn(CANNED_RESPONSES[i], "canned"),
"ollama" => match call_ollama(user_msg) {
Ok((r, ti, to, wc)) => (r, ti, to, wc, "ollama"),
Err(e) => {
eprintln!("⚠ Turn {turn_num} Ollama failed: {e}; fallback\n");
canned_turn(CANNED_RESPONSES[i], "canned-fallback")
}
},
"anthropic" => match call_anthropic(user_msg) {
Ok((r, ti, to, wc)) => (r, ti, to, wc, "anthropic"),
Err(e) => {
eprintln!("⚠ Turn {turn_num} Anthropic failed: {e}; fallback\n");
canned_turn(CANNED_RESPONSES[i], "canned-fallback")
}
},
other => {
eprintln!("Unknown mode {other:?}. Use canned|ollama|anthropic.");
std::process::exit(2);
}
};
regulator.on_event(LLMEvent::TurnStart {
user_message: user_msg.into(),
});
regulator.on_event(LLMEvent::TurnComplete {
full_response: response.clone(),
});
regulator.on_event(LLMEvent::Cost {
tokens_in,
tokens_out,
wallclock_ms,
provider: Some(provider_tag.into()),
});
regulator.on_event(LLMEvent::UserCorrection {
correction_message: correction.into(),
corrects_last: true,
});
println!("── Turn {turn_num} ──");
println!("User: {user_msg}");
println!("LLM: {response}");
println!("User corrects: {correction}");
let snapshot = regulator.export();
let threshold_reached = !snapshot.correction_patterns.is_empty();
println!(
"Corrections so far on this cluster: {turn_num} / {MIN_FOR_PATTERN} (threshold: {})",
if threshold_reached { "REACHED" } else { "not yet" }
);
println!();
}
println!("══════ Phase 2: Persist → restart ══════\n");
let state: RegulatorState = regulator.export();
println!(
"Exported state has {} correction pattern(s) for {USER_ID}:",
state.correction_patterns.len()
);
for (cluster, pattern) in &state.correction_patterns {
print_pattern(cluster, pattern, " ");
}
let json = match serde_json::to_string(&state) {
Ok(s) => s,
Err(e) => {
eprintln!("Could not serialise RegulatorState: {e}");
std::process::exit(1);
}
};
println!("\nJSON snapshot: {} bytes.", json.len());
println!("(A real app persists this to disk / database / session store.)\n");
drop(regulator);
let restored_state: RegulatorState = match serde_json::from_str(&json) {
Ok(s) => s,
Err(e) => {
eprintln!("Could not deserialise RegulatorState: {e}");
std::process::exit(1);
}
};
let mut regulator = Regulator::import(restored_state);
println!("Regulator restored from snapshot.\n");
println!("══════ Phase 3: Next session — ProceduralWarning pre-generation ══════\n");
let next_msg = USER_MESSAGES[3];
println!("User (new session): {next_msg}");
regulator.on_event(LLMEvent::TurnStart {
user_message: next_msg.into(),
});
match regulator.decide() {
Decision::ProceduralWarning { patterns } => {
println!(
"Regulator (pre-generation): [ProceduralWarning] — {} pattern(s) apply",
patterns.len()
);
for pattern in &patterns {
print_pattern(&pattern.topic_cluster, pattern, " ");
}
println!();
println!("The app / LLM can now read these examples BEFORE generating,");
println!("avoiding the same class of mistake this user has corrected repeatedly.");
println!();
println!("── Path B: prompt injection helper (0.2.2) ────────────");
let injected = regulator.inject_corrections(next_msg);
for line in injected.lines() {
println!(" {line}");
}
println!();
}
Decision::Continue => {
println!("(Unexpected) Regulator returned Continue.");
println!("Check whether the new message hashes to the same cluster as Phase 1.");
}
other => {
println!("(Unexpected) Regulator returned {other:?}");
}
}
println!();
println!("══════ Take-away ══════");
println!("Baseline (Mem0 / Letta / LangChain memory):");
println!(" • store every correction message verbatim;");
println!(" • on every new turn, run a semantic-search query to retrieve");
println!(" similar past corrections;");
println!(" • effectiveness depends on embedding quality + similarity threshold.");
println!();
println!("Regulator:");
println!(" • counts per-cluster corrections structurally (no embedding);");
println!(" • fires ProceduralWarning proactively once MIN threshold trips;");
println!(" • raw example_corrections ride along so the LLM can read intent;");
println!(" • P9b-compliant: pattern_name is opaque (`corrections_on_{{cluster}}`),");
println!(" no English regex parses the correction text for rule extraction.");
println!();
println!("This is the \"extract behavioral patterns, not just store content\"");
println!("differentiation — demonstrably absent from every content-retrieval");
println!("memory system in the Rust or Python LLM ecosystem as of 2026-04.");
}
fn canned_turn(
response: &str,
provider_tag: &'static str,
) -> (String, u32, u32, u32, &'static str) {
(
response.to_string(),
PER_TURN_TOKENS_IN,
PER_TURN_TOKENS_OUT,
PER_TURN_WALLCLOCK_MS,
provider_tag,
)
}
fn print_pattern(cluster: &str, pattern: &noos::CorrectionPattern, indent: &str) {
println!("{indent}cluster: {cluster}");
println!("{indent} pattern_name: {}", pattern.pattern_name);
println!(
"{indent} learned_from_turns: {}",
pattern.learned_from_turns
);
println!("{indent} confidence: {:.2}", pattern.confidence);
println!(
"{indent} example_corrections ({}):",
pattern.example_corrections.len()
);
for ex in &pattern.example_corrections {
println!("{indent} • {ex}");
}
}