use crate::graph::{FactMetadata, MemKind, MemNode, Provenance};
use crate::trajectory::{Trajectory, TrajectoryOutcome};
use car_ir::json_extract::extract_json_object;
use chrono::Utc;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReflectionInsight {
pub fact: String,
pub recommendation: String,
pub category: String,
pub confidence: String,
pub tags: Vec<String>,
#[serde(default)]
pub trace_refs: Vec<String>,
}
#[derive(Debug, Clone, Default)]
pub struct ReflectionReport {
pub corrections_found: usize,
pub preferences_found: usize,
pub friction_points_found: usize,
pub insights_ingested: usize,
}
const CORRECTION_MARKERS: &[&str] = &[
"no, ",
"no not",
"don't do",
"stop doing",
"that's wrong",
"actually,",
"instead,",
"not that",
"I said",
"I meant",
"please don't",
"undo that",
"revert",
"that's not what",
"wrong approach",
"bad idea",
];
const PREFERENCE_MARKERS: &[&str] = &[
"i prefer",
"always use",
"never use",
"from now on",
"in the future",
"remember that",
"keep doing",
"good job",
"yes exactly",
"perfect",
"that's right",
];
const FRICTION_MARKERS: &[&str] = &[
"again",
"like i said",
"i already told you",
"for the third time",
"as i mentioned",
"same as before",
"we discussed this",
"i keep having to",
];
pub fn heuristic_reflect(conversations: &[&MemNode]) -> Vec<ReflectionInsight> {
let mut insights = Vec::new();
let user_turns: Vec<&MemNode> = conversations
.iter()
.filter(|n| n.kind == MemKind::Conversation)
.filter(|n| {
let lower = n.value.to_lowercase();
lower.starts_with("user:") || n.key == "user"
})
.copied()
.collect();
for (i, turn) in user_turns.iter().enumerate() {
let lower = turn.value.to_lowercase();
let cur_ref = turn_ref(turn);
let prev_ref = if i > 0 {
Some(turn_ref(user_turns[i - 1]))
} else {
None
};
if CORRECTION_MARKERS.iter().any(|m| lower.contains(m)) {
let context = if i > 0 {
format!("(following: {})", truncate(&user_turns[i - 1].value, 100))
} else {
String::new()
};
let mut refs = vec![cur_ref.clone()];
if let Some(r) = &prev_ref {
refs.push(r.clone());
}
insights.push(ReflectionInsight {
fact: format!(
"User correction: {} {}",
truncate(&turn.value, 200),
context
),
recommendation: extract_recommendation(&turn.value),
category: "correction".to_string(),
confidence: "high".to_string(),
tags: vec!["user_feedback".to_string(), "correction".to_string()],
trace_refs: refs,
});
}
if PREFERENCE_MARKERS.iter().any(|m| lower.contains(m)) {
let is_positive = [
"good job",
"yes exactly",
"perfect",
"that's right",
"keep doing",
]
.iter()
.any(|m| lower.contains(m));
insights.push(ReflectionInsight {
fact: format!("User preference: {}", truncate(&turn.value, 200)),
recommendation: if is_positive {
"Continue this approach.".to_string()
} else {
extract_recommendation(&turn.value)
},
category: "preference".to_string(),
confidence: "high".to_string(),
tags: vec!["user_feedback".to_string(), "preference".to_string()],
trace_refs: vec![cur_ref.clone()],
});
}
if FRICTION_MARKERS.iter().any(|m| lower.contains(m)) {
insights.push(ReflectionInsight {
fact: format!("Friction point: {}", truncate(&turn.value, 200)),
recommendation: "Automate or remember this to avoid user repetition.".to_string(),
category: "friction".to_string(),
confidence: "medium".to_string(),
tags: vec!["user_feedback".to_string(), "friction".to_string()],
trace_refs: vec![cur_ref.clone()],
});
}
}
insights
}
fn turn_ref(node: &MemNode) -> String {
if let Some(fid) = &node.fact_id {
return format!("conv:{}", fid);
}
format!("conv:{}@{}", node.key, node.created_at.timestamp_millis())
}
pub fn reflect_from_trajectories(trajs: &[Trajectory]) -> Vec<ReflectionInsight> {
let mut insights = Vec::new();
for t in trajs {
let failed = matches!(
t.outcome,
TrajectoryOutcome::Failed | TrajectoryOutcome::ReplanExhausted
);
if !failed {
continue;
}
for (idx, ev) in t.events.iter().enumerate() {
if ev.kind != "action_failed" {
continue;
}
let tool = ev.tool.clone().unwrap_or_else(|| "<unknown>".into());
let err = ev
.data
.get("error")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
insights.push(ReflectionInsight {
fact: if err.is_empty() {
format!("Tool `{}` failed in proposal {}", tool, t.proposal_id)
} else {
format!(
"Tool `{}` failed in proposal {}: {}",
tool,
t.proposal_id,
truncate(&err, 200)
)
},
recommendation: format!(
"Inspect raw trace (trajectory:{}:event:{}) before retrying `{}`.",
t.proposal_id, idx, tool
),
category: "anti_pattern".to_string(),
confidence: "high".to_string(),
tags: vec!["tool_failure".to_string(), format!("tool:{}", tool)],
trace_refs: vec![format!("trajectory:{}:event:{}", t.proposal_id, idx)],
});
}
}
insights
}
pub fn reflection_prompt(conversations: &[&MemNode]) -> String {
let turns: Vec<String> = conversations
.iter()
.filter(|n| n.kind == MemKind::Conversation)
.map(|n| n.value.clone())
.collect();
format!(
r#"Analyze this conversation session for learning opportunities.
## Conversation
{turns}
Look for:
1. **Corrections**: Where the user corrected or redirected the assistant
2. **Anti-patterns**: Approaches that failed or were rejected
3. **Preferences**: User-stated preferences about how to work
4. **Friction**: Things the user had to repeat or explain multiple times
For each finding, extract an actionable insight.
Respond with ONLY a JSON object:
```json
{{
"insights": [
{{
"fact": "What was observed",
"recommendation": "What to do differently",
"category": "correction|anti_pattern|preference|friction",
"confidence": "high|medium|low",
"tags": ["tag1", "tag2"]
}}
]
}}
```"#,
turns = turns.join("\n"),
)
}
pub fn parse_reflection_response(response: &str) -> Vec<ReflectionInsight> {
if let Some(json_str) = extract_json_object(response) {
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(&json_str) {
if let Some(insights) = parsed.get("insights").and_then(|i| i.as_array()) {
return insights
.iter()
.filter_map(|i| {
Some(ReflectionInsight {
fact: i.get("fact")?.as_str()?.to_string(),
recommendation: i.get("recommendation")?.as_str()?.to_string(),
category: i
.get("category")
.and_then(|c| c.as_str())
.unwrap_or("correction")
.to_string(),
confidence: i
.get("confidence")
.and_then(|c| c.as_str())
.unwrap_or("medium")
.to_string(),
tags: i
.get("tags")
.and_then(|t| t.as_array())
.map(|a| {
a.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default(),
trace_refs: i
.get("trace_refs")
.and_then(|t| t.as_array())
.map(|a| {
a.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default(),
})
})
.collect();
}
}
}
Vec::new()
}
pub fn insight_metadata(insight: &ReflectionInsight) -> FactMetadata {
let now = Utc::now();
let mut provenance = vec![Provenance {
source: "reflection".to_string(),
reference: "conversation analysis".to_string(),
date: Some(now),
}];
for r in &insight.trace_refs {
provenance.push(Provenance {
source: "trace".to_string(),
reference: r.clone(),
date: Some(now),
});
}
FactMetadata {
confidence: insight.confidence.clone(),
provenance,
affected_files: Vec::new(),
tags: insight.tags.clone(),
category: insight.category.clone(),
usage_count: 0,
helpful_count: 0,
outdated_reports: 0,
tenant_id: None,
}
}
fn truncate(s: &str, max: usize) -> &str {
if s.len() <= max {
s
} else {
let end = s.floor_char_boundary(max);
&s[..end]
}
}
fn extract_recommendation(text: &str) -> String {
let lower = text.to_lowercase();
for marker in &["instead,", "actually,", "please ", "use ", "don't "] {
if let Some(pos) = lower.find(marker) {
let remainder = &text[pos..];
let end = remainder.find('.').unwrap_or(remainder.len().min(150));
return remainder[..end].trim().to_string();
}
}
"Apply the user's correction.".to_string()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::graph::{ContentType, MemKind};
fn conv(speaker: &str, text: &str) -> MemNode {
MemNode {
kind: MemKind::Conversation,
layer: 3,
key: speaker.to_string(),
value: format!("{}: {}", speaker, text),
fact_id: None,
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::NaturalLanguage,
metadata: FactMetadata::default(),
}
}
#[test]
fn detects_corrections() {
let turns = vec![
conv("user", "Add a REST endpoint"),
conv("assistant", "I'll add a GraphQL mutation"),
conv("user", "No, not GraphQL. I said REST endpoint."),
];
let refs: Vec<&MemNode> = turns.iter().collect();
let insights = heuristic_reflect(&refs);
assert!(!insights.is_empty());
assert!(insights.iter().any(|i| i.category == "correction"));
}
#[test]
fn detects_preferences() {
let turns = vec![conv(
"user",
"I prefer using snake_case for all function names.",
)];
let refs: Vec<&MemNode> = turns.iter().collect();
let insights = heuristic_reflect(&refs);
assert!(insights.iter().any(|i| i.category == "preference"));
}
#[test]
fn detects_friction() {
let turns = vec![conv("user", "As I mentioned, use PostgreSQL not SQLite.")];
let refs: Vec<&MemNode> = turns.iter().collect();
let insights = heuristic_reflect(&refs);
assert!(insights.iter().any(|i| i.category == "friction"));
}
#[test]
fn no_insights_from_normal_conversation() {
let turns = vec![
conv("user", "What's the weather like?"),
conv("assistant", "I can't check the weather."),
];
let refs: Vec<&MemNode> = turns.iter().collect();
let insights = heuristic_reflect(&refs);
assert!(insights.is_empty());
}
#[test]
fn heuristic_attaches_trace_refs() {
let turns = vec![conv(
"user",
"I prefer using snake_case for all function names.",
)];
let refs: Vec<&MemNode> = turns.iter().collect();
let insights = heuristic_reflect(&refs);
assert!(!insights.is_empty());
assert!(
insights.iter().all(|i| !i.trace_refs.is_empty()),
"every insight must preserve at least one trace_ref"
);
assert!(insights[0].trace_refs[0].starts_with("conv:"));
}
#[test]
fn reflect_from_failed_trajectories_preserves_event_refs() {
use crate::{TraceEvent, Trajectory, TrajectoryOutcome};
let traj = Trajectory {
proposal_id: "p-42".into(),
source: "test".into(),
action_count: 2,
events: vec![
TraceEvent {
kind: "action_succeeded".into(),
action_id: Some("a1".into()),
tool: Some("search".into()),
data: serde_json::json!({}),
..Default::default()
},
TraceEvent {
kind: "action_failed".into(),
action_id: Some("a2".into()),
tool: Some("write_file".into()),
data: serde_json::json!({"error": "permission denied"}),
..Default::default()
},
],
outcome: TrajectoryOutcome::Failed,
timestamp: Utc::now(),
duration_ms: 120.0,
replan_attempts: 0,
};
let insights = reflect_from_trajectories(&[traj]);
assert_eq!(insights.len(), 1);
assert_eq!(insights[0].category, "anti_pattern");
assert_eq!(
insights[0].trace_refs,
vec!["trajectory:p-42:event:1".to_string()]
);
assert!(insights[0].fact.contains("permission denied"));
}
#[test]
fn reflect_from_successful_trajectories_emits_nothing() {
use crate::{Trajectory, TrajectoryOutcome};
let traj = Trajectory {
proposal_id: "p-ok".into(),
source: "test".into(),
action_count: 0,
events: vec![],
outcome: TrajectoryOutcome::Success,
timestamp: Utc::now(),
duration_ms: 10.0,
replan_attempts: 0,
};
assert!(reflect_from_trajectories(&[traj]).is_empty());
}
#[test]
fn insight_metadata_persists_trace_refs_as_provenance() {
let insight = ReflectionInsight {
fact: "x".into(),
recommendation: "y".into(),
category: "correction".into(),
confidence: "high".into(),
tags: vec![],
trace_refs: vec!["conv:abc@123".into()],
};
let meta = insight_metadata(&insight);
assert_eq!(meta.provenance.len(), 2);
assert_eq!(meta.provenance[1].source, "trace");
assert_eq!(meta.provenance[1].reference, "conv:abc@123");
}
#[test]
fn parse_reflection_json() {
let response = r#"```json
{"insights": [{"fact": "User prefers Rust", "recommendation": "Use Rust", "category": "preference", "confidence": "high", "tags": ["language"]}]}
```"#;
let insights = parse_reflection_response(response);
assert_eq!(insights.len(), 1);
assert_eq!(insights[0].category, "preference");
}
#[test]
fn truncate_ascii() {
assert_eq!(truncate("hello world", 5), "hello");
}
#[test]
fn truncate_no_op() {
assert_eq!(truncate("hi", 10), "hi");
}
#[test]
fn truncate_emoji() {
let s = "\u{1F600}\u{1F601}\u{1F602}"; let result = truncate(s, 5);
assert_eq!(result, "\u{1F600}");
}
#[test]
fn truncate_multibyte_boundary() {
let s = "caf\u{00E9}"; let result = truncate(s, 4);
assert_eq!(result, "caf");
}
}