Skip to main content

skilllite_agent/
chat_session.rs

1//! Chat session: persistent conversation with transcript and memory.
2//!
3//! Ported from Python `ChatSession`. Directly calls executor module
4//! (same process, no IPC). Handles transcript persistence, auto-compaction,
5//! and memory integration.
6
7use anyhow::Result;
8use std::path::{Path, PathBuf};
9
10use skilllite_executor::{memory as executor_memory, session, transcript};
11
12use skilllite_core::config::env_keys::evolution as evo_env_keys;
13
14use super::agent_loop;
15use super::evolution;
16use super::extensions;
17use super::llm::LlmClient;
18use super::long_text;
19use super::skills::LoadedSkill;
20use super::types::*;
21
22// Compaction threshold/keep are configurable via types::get_compaction_threshold()
23// and types::get_compaction_keep_recent() (SKILLLITE_COMPACTION_* env vars).
24
25/// Persistent chat session.
26///
27/// Storage layout (matching Python SDK, stored in `~/.skilllite/`):
28///   sessions.json            — session metadata
29///   transcripts/{key}-{date}.jsonl — append-only transcript
30pub struct ChatSession {
31    config: AgentConfig,
32    session_key: String,
33    session_id: Option<String>,
34    /// Data root for sessions/transcripts/memory — always `~/.skilllite/`.
35    /// NOT the user's workspace directory.
36    data_root: PathBuf,
37    skills: Vec<LoadedSkill>,
38    /// A9: handle for periodic evolution (every N minutes, does not reset on turn).
39    periodic_evolution_handle: Option<tokio::task::JoinHandle<()>>,
40}
41
42impl ChatSession {
43    /// Full constructor: starts periodic evolution timer. Use for long-lived chat.
44    pub fn new(config: AgentConfig, session_key: &str, skills: Vec<LoadedSkill>) -> Self {
45        let mut session = Self::new_inner(config, session_key, skills);
46        session.start_periodic_evolution_timer();
47        session
48    }
49
50    /// For one-off clear-session: no Tokio spawn. Avoids "no reactor running" when run from sync CLI.
51    pub fn new_for_clear(config: AgentConfig, session_key: &str, skills: Vec<LoadedSkill>) -> Self {
52        Self::new_inner(config, session_key, skills)
53    }
54
55    fn new_inner(config: AgentConfig, session_key: &str, skills: Vec<LoadedSkill>) -> Self {
56        let data_root = skilllite_executor::chat_root();
57        skilllite_evolution::seed::ensure_seed_data(&data_root);
58        Self {
59            config,
60            session_key: session_key.to_string(),
61            session_id: None,
62            data_root,
63            skills,
64            periodic_evolution_handle: None,
65        }
66    }
67
68    /// Ensure session and transcript exist, return session_id.
69    fn ensure_session(&mut self) -> Result<String> {
70        if let Some(ref id) = self.session_id {
71            return Ok(id.clone());
72        }
73
74        // Ensure data_root directory exists
75        if !self.data_root.exists() {
76            skilllite_fs::create_dir_all(&self.data_root)?;
77        }
78
79        let sessions_path = self.data_root.join("sessions.json");
80        let mut store = session::SessionStore::load(&sessions_path)?;
81        let entry = store.create_or_get(&self.session_key);
82        let session_id = entry.session_id.clone();
83        store.save(&sessions_path)?;
84
85        // Ensure transcript
86        let transcripts_dir = self.data_root.join("transcripts");
87        let t_path = transcript::transcript_path_today(&transcripts_dir, &self.session_key);
88        transcript::ensure_session_header(&t_path, &session_id, Some(&self.config.workspace))?;
89
90        self.session_id = Some(session_id.clone());
91        Ok(session_id)
92    }
93
94    /// Read transcript entries and convert to ChatMessages.
95    fn read_history(&self) -> Result<Vec<ChatMessage>> {
96        let transcripts_dir = self.data_root.join("transcripts");
97        let entries = transcript::read_entries_for_session(&transcripts_dir, &self.session_key)?;
98
99        let mut messages = Vec::new();
100        let mut use_from_compaction = false;
101        let mut compaction_summary: Option<String> = None;
102
103        // Check for compaction — if present, use summary + entries after it
104        for entry in entries.iter().rev() {
105            if let transcript::TranscriptEntry::Compaction { summary, .. } = entry {
106                use_from_compaction = true;
107                compaction_summary = summary.clone();
108                break;
109            }
110        }
111
112        if use_from_compaction {
113            // Add compaction summary as system context
114            if let Some(summary) = compaction_summary {
115                messages.push(ChatMessage::system(&format!(
116                    "[Previous conversation summary]\n{}",
117                    summary
118                )));
119            }
120
121            // Find the compaction entry and take entries after it
122            let mut past_compaction = false;
123            for entry in &entries {
124                if let transcript::TranscriptEntry::Compaction { .. } = entry {
125                    past_compaction = true;
126                    continue;
127                }
128                if past_compaction {
129                    if let Some(msg) = transcript_entry_to_message(entry) {
130                        messages.push(msg);
131                    }
132                }
133            }
134        } else {
135            // No compaction, use all message entries
136            for entry in &entries {
137                if let Some(msg) = transcript_entry_to_message(entry) {
138                    messages.push(msg);
139                }
140            }
141        }
142
143        Ok(messages)
144    }
145
146    /// Run one conversation turn.
147    pub async fn run_turn(
148        &mut self,
149        user_message: &str,
150        event_sink: &mut dyn EventSink,
151    ) -> Result<AgentResult> {
152        self.run_turn_inner(user_message, event_sink, None).await
153    }
154
155    /// A13: Run with overridden history (for --resume from checkpoint).
156    pub async fn run_turn_with_history(
157        &mut self,
158        user_message: &str,
159        event_sink: &mut dyn EventSink,
160        history_override: Vec<ChatMessage>,
161    ) -> Result<AgentResult> {
162        self.run_turn_inner(user_message, event_sink, Some(history_override))
163            .await
164    }
165
166    async fn run_turn_inner(
167        &mut self,
168        user_message: &str,
169        event_sink: &mut dyn EventSink,
170        history_override: Option<Vec<ChatMessage>>,
171    ) -> Result<AgentResult> {
172        let _session_id = self.ensure_session()?;
173
174        // EVO-1: Classify previous turn's user feedback from this message.
175        // The feedback is attributed to the PREVIOUS decision, not the current one.
176        self.update_previous_feedback(user_message);
177
178        // Read history from transcript (or use override for resume)
179        let history = if let Some(h) = history_override {
180            h
181        } else {
182            self.read_history()?
183        };
184        if !history.is_empty() {
185            tracing::debug!(
186                session_key = %self.session_key,
187                history_len = history.len(),
188                "Loaded conversation history from transcript"
189            );
190        }
191
192        // Early memory flush: run when history approaches compaction (OpenClaw-style).
193        // Lower SKILLLITE_MEMORY_FLUSH_THRESHOLD (default 12) = more frequent triggers.
194        let flush_threshold = get_memory_flush_threshold();
195        let compaction_threshold = get_compaction_threshold();
196        if self.config.enable_memory
197            && get_memory_flush_enabled()
198            && history.len() >= flush_threshold
199        {
200            let sessions_path = self.data_root.join("sessions.json");
201            if let Ok(store) = session::SessionStore::load(&sessions_path) {
202                if let Some(entry) = store.get(&self.session_key) {
203                    let next_compaction = entry.compaction_count + 1;
204                    let need_flush = entry.memory_flush_compaction_count != Some(next_compaction);
205                    if need_flush {
206                        if let Err(e) = self.run_memory_flush_turn(&history).await {
207                            tracing::warn!("Early memory flush failed: {}", e);
208                        } else {
209                            if let Ok(mut store) = session::SessionStore::load(&sessions_path) {
210                                if let Some(se) = store.sessions.get_mut(&self.session_key) {
211                                    se.memory_flush_compaction_count = Some(next_compaction);
212                                    se.memory_flush_at = Some(chrono::Utc::now().to_rfc3339());
213                                    let _ = store.save(&sessions_path);
214                                }
215                            }
216                            tracing::debug!(
217                                "Early memory flush completed (threshold={})",
218                                flush_threshold
219                            );
220                        }
221                    }
222                }
223            }
224        }
225
226        // Check if compaction is needed
227        let mut history = if history.len() >= compaction_threshold {
228            self.compact_history(history).await?
229        } else {
230            history
231        };
232
233        // ── Guard #1: truncate oversized user messages already in history ──────
234        // Handles old transcripts written before the compression fix.
235        // Sync simple truncation only — no LLM call here, too expensive per-turn.
236        {
237            let max_chars = get_user_input_max_chars();
238            for msg in history.iter_mut() {
239                if msg.role == "user" {
240                    if let Some(ref content) = msg.content {
241                        if content.len() > max_chars {
242                            tracing::debug!(
243                                len = content.len(),
244                                max_chars,
245                                "Truncating oversized historical user message"
246                            );
247                            msg.content = Some(long_text::truncate_content(content, max_chars));
248                        }
249                    }
250                }
251            }
252        }
253
254        // Build memory context (if enabled) — inject relevant memories as system context
255        // Uses original user_message for accurate intent-based vector search.
256        if self.config.enable_memory {
257            let workspace = std::path::Path::new(&self.config.workspace);
258            if let Some(mem_ctx) =
259                extensions::build_memory_context(workspace, "default", user_message)
260            {
261                history.push(ChatMessage::system(&mem_ctx));
262            }
263        }
264
265        // ── Guard #2: compress current user message if oversized ─────────────
266        // Processed BEFORE transcript write so the stored version is already
267        // compressed — read_history on next turn gets the compressed version directly.
268        let client = LlmClient::new(&self.config.api_base, &self.config.api_key)?;
269        let effective_user_message =
270            long_text::maybe_process_user_input(&client, &self.config.model, user_message).await;
271
272        // Append (compressed) user message to transcript
273        self.append_message("user", &effective_user_message)?;
274
275        event_sink.on_turn_start();
276
277        // Run the agent loop — receives the already-compressed message.
278        // Note: update_previous_feedback and build_memory_context above intentionally
279        // use the original user_message for accurate intent matching.
280        let result = agent_loop::run_agent_loop(
281            &self.config,
282            history,
283            &effective_user_message,
284            &self.skills,
285            event_sink,
286            Some(&self.session_key),
287        )
288        .await?;
289
290        // Persist task plan to plans/ directory (if non-empty)
291        if !result.task_plan.is_empty() {
292            if let Err(e) = self.persist_plan(user_message, &result.task_plan) {
293                tracing::warn!("Failed to persist task plan: {}", e);
294            }
295        }
296
297        // Append intermediate tool calls & results to transcript so they survive restart
298        self.save_intermediate_events(&result.messages);
299
300        // Append assistant response to transcript
301        self.append_message("assistant", &result.response)?;
302
303        // EVO-1: Record execution decision (async-safe, <1ms with WAL).
304        // Only record meaningful turns (at least 1 tool call).
305        if result.feedback.total_tools >= 1 {
306            self.record_decision(&result.feedback);
307            // A9: Decision-count trigger — if unprocessed decisions >= threshold, spawn evolution
308            self.maybe_trigger_evolution_by_decision_count();
309        }
310
311        Ok(result)
312    }
313
314    /// Graceful shutdown: flush evolution metrics, cancel evolution timers.
315    pub fn shutdown(&mut self) {
316        if let Some(handle) = self.periodic_evolution_handle.take() {
317            handle.abort();
318        }
319        shutdown_evolution(&self.data_root);
320    }
321
322    // ─── A9: Periodic + decision-count evolution triggers ────────────────────
323
324    /// Start periodic evolution timer (every 30 min). Does not reset on user turns.
325    fn start_periodic_evolution_timer(&mut self) {
326        if skilllite_evolution::EvolutionMode::from_env().is_disabled() {
327            return;
328        }
329        let interval_secs: u64 = std::env::var(evo_env_keys::SKILLLITE_EVOLUTION_INTERVAL_SECS)
330            .ok()
331            .and_then(|v| v.parse().ok())
332            .unwrap_or(1800); // 30 min default
333        let data_root = self.data_root.clone();
334        let workspace = self.config.workspace.clone();
335        let api_base = self.config.api_base.clone();
336        let api_key = self.config.api_key.clone();
337        let model = self.config.model.clone();
338        if let Some(handle) = spawn_periodic_evolution(
339            data_root,
340            workspace,
341            api_base,
342            api_key,
343            model,
344            interval_secs,
345        ) {
346            self.periodic_evolution_handle = Some(handle);
347        }
348    }
349
350    /// A9: If unprocessed decisions >= threshold, spawn evolution (runs even when user is active).
351    /// No-op when not inside a Tokio runtime.
352    fn maybe_trigger_evolution_by_decision_count(&self) {
353        if skilllite_evolution::EvolutionMode::from_env().is_disabled() {
354            return;
355        }
356        if tokio::runtime::Handle::try_current().is_err() {
357            return;
358        }
359        let threshold: i64 = std::env::var(evo_env_keys::SKILLLITE_EVOLUTION_DECISION_THRESHOLD)
360            .ok()
361            .and_then(|v| v.parse().ok())
362            .unwrap_or(10);
363        let Ok(conn) = skilllite_evolution::feedback::open_evolution_db(&self.data_root) else {
364            return;
365        };
366        let Ok(count) = skilllite_evolution::feedback::count_unprocessed_decisions(&conn) else {
367            return;
368        };
369        if count >= threshold {
370            tracing::debug!(
371                "Decision-count trigger: {} unprocessed >= {}, spawning evolution",
372                count,
373                threshold
374            );
375            let data_root = self.data_root.clone();
376            let workspace = self.config.workspace.clone();
377            let api_base = self.config.api_base.clone();
378            let api_key = self.config.api_key.clone();
379            let model = self.config.model.clone();
380            let _ = spawn_evolution_once(data_root, workspace, api_base, api_key, model);
381        }
382    }
383
384    // ─── EVO-1: Feedback collection helpers ─────────────────────────────────
385
386    /// Record an execution decision to the evolution DB.
387    fn record_decision(&self, feedback: &ExecutionFeedback) {
388        if let Ok(conn) = skilllite_evolution::feedback::open_evolution_db(&self.data_root) {
389            let input = evolution::execution_feedback_to_decision_input(feedback);
390            if let Err(e) = skilllite_evolution::feedback::insert_decision(
391                &conn,
392                Some(&self.session_key),
393                &input,
394                evolution::to_evolution_feedback(FeedbackSignal::Neutral),
395            ) {
396                tracing::warn!("Failed to record evolution decision: {}", e);
397            }
398            let _ = skilllite_evolution::feedback::update_daily_metrics(&conn);
399        }
400    }
401
402    /// Update the previous decision's feedback signal based on the current user message.
403    fn update_previous_feedback(&self, user_message: &str) {
404        let signal = classify_user_feedback(user_message);
405        if signal == FeedbackSignal::Neutral {
406            return;
407        }
408        if let Ok(conn) = skilllite_evolution::feedback::open_evolution_db(&self.data_root) {
409            if let Err(e) = skilllite_evolution::feedback::update_last_decision_feedback(
410                &conn,
411                &self.session_key,
412                evolution::to_evolution_feedback(signal),
413            ) {
414                tracing::debug!("Failed to update previous feedback: {}", e);
415            }
416        }
417    }
418
419    /// Append a message entry to the transcript.
420    fn append_message(&self, role: &str, content: &str) -> Result<()> {
421        let transcripts_dir = self.data_root.join("transcripts");
422        let t_path = transcript::transcript_path_today(&transcripts_dir, &self.session_key);
423        let entry = transcript::TranscriptEntry::Message {
424            id: uuid::Uuid::new_v4().to_string(),
425            parent_id: None,
426            role: role.to_string(),
427            content: Some(content.to_string()),
428            tool_calls: None,
429        };
430        transcript::append_entry(&t_path, &entry)
431    }
432
433    /// Save tool calls and results from agent loop messages to transcript.
434    /// Skips system/user/final-assistant messages (those are handled separately).
435    fn save_intermediate_events(&self, messages: &[ChatMessage]) {
436        let transcripts_dir = self.data_root.join("transcripts");
437        let t_path = transcript::transcript_path_today(&transcripts_dir, &self.session_key);
438        let ts = chrono::Utc::now().to_rfc3339();
439
440        for msg in messages {
441            if msg.role == "system" || msg.role == "user" {
442                continue;
443            }
444            // Assistant message with tool calls → save each tool call
445            if let Some(ref tool_calls) = msg.tool_calls {
446                for tc in tool_calls {
447                    let entry = transcript::TranscriptEntry::ToolCall {
448                        id: uuid::Uuid::new_v4().to_string(),
449                        parent_id: None,
450                        tool_call_id: tc.id.clone(),
451                        name: tc.function.name.clone(),
452                        arguments: tc.function.arguments.clone(),
453                        timestamp: ts.clone(),
454                    };
455                    if let Err(e) = transcript::append_entry(&t_path, &entry) {
456                        tracing::debug!("Failed to save tool_call entry: {}", e);
457                    }
458                }
459            }
460            // Tool result message → save as ToolResult
461            if msg.role == "tool" {
462                let content = msg.content.as_deref().unwrap_or("");
463                let is_error = content.starts_with("Error:")
464                    || content.starts_with("error:")
465                    || content.starts_with("Command failed");
466                let name = msg.name.as_deref().unwrap_or("").to_string();
467                let tool_call_id = msg.tool_call_id.as_deref().unwrap_or("").to_string();
468                let brief = if content.len() > 2000 {
469                    format!("{}…", &content[..2000])
470                } else {
471                    content.to_string()
472                };
473                let entry = transcript::TranscriptEntry::ToolResult {
474                    id: uuid::Uuid::new_v4().to_string(),
475                    parent_id: None,
476                    tool_call_id,
477                    name,
478                    result: brief,
479                    is_error,
480                    elapsed_ms: None,
481                    timestamp: ts.clone(),
482                };
483                if let Err(e) = transcript::append_entry(&t_path, &entry) {
484                    tracing::debug!("Failed to save tool_result entry: {}", e);
485                }
486            }
487        }
488    }
489
490    /// Persist the task plan to plans/{session_key}-{date}.jsonl (append).
491    /// Each plan is appended, preserving history. OpenClaw-style.
492    fn persist_plan(&self, user_message: &str, tasks: &[super::types::Task]) -> Result<()> {
493        let plans_dir = self.data_root.join("plans");
494
495        let mut steps = Vec::with_capacity(tasks.len());
496        let mut current_step_id: u32 = 0;
497        let mut found_running = false;
498        for task in tasks {
499            let status = if task.completed {
500                "completed"
501            } else if !found_running {
502                found_running = true;
503                current_step_id = task.id;
504                "running"
505            } else {
506                "pending"
507            };
508            steps.push(serde_json::json!({
509                "id": task.id,
510                "description": task.description,
511                "tool_hint": task.tool_hint,
512                "status": status,
513            }));
514        }
515        if current_step_id == 0 {
516            if let Some(last) = tasks.last() {
517                current_step_id = last.id;
518            }
519        }
520
521        let plan_json = serde_json::json!({
522            "session_key": self.session_key,
523            "task": user_message,
524            "steps": steps,
525            "current_step_id": current_step_id,
526            "updated_at": chrono::Utc::now().to_rfc3339(),
527        });
528
529        skilllite_executor::plan::append_plan(&plans_dir, &self.session_key, &plan_json)?;
530        tracing::info!("Task plan appended to plans/{}", self.session_key);
531        Ok(())
532    }
533
534    /// Compact old messages: summarize via LLM, write compaction entry.
535    /// Before compaction, runs pre-compaction memory flush (OpenClaw-style) when enabled:
536    /// a silent agent turn reminds the model to write durable memories to memory/YYYY-MM-DD.md.
537    async fn compact_history(&mut self, history: Vec<ChatMessage>) -> Result<Vec<ChatMessage>> {
538        let threshold = get_compaction_threshold();
539        if history.len() < threshold {
540            return Ok(history);
541        }
542
543        // Pre-compaction memory flush (OpenClaw-style): give model a chance to save to memory
544        // before we summarize away the conversation. Runs once per compaction cycle.
545        if self.config.enable_memory && get_memory_flush_enabled() {
546            let sessions_path = self.data_root.join("sessions.json");
547            if let Ok(store) = session::SessionStore::load(&sessions_path) {
548                if let Some(entry) = store.get(&self.session_key) {
549                    let next_compaction_count = entry.compaction_count + 1;
550                    let need_flush =
551                        entry.memory_flush_compaction_count != Some(next_compaction_count);
552                    if need_flush {
553                        if let Err(e) = self.run_memory_flush_turn(&history).await {
554                            tracing::warn!(
555                                "Memory flush failed (continuing with compaction): {}",
556                                e
557                            );
558                        } else if let Ok(mut store) = session::SessionStore::load(&sessions_path) {
559                            if let Some(session_entry) = store.sessions.get_mut(&self.session_key) {
560                                session_entry.memory_flush_compaction_count =
561                                    Some(next_compaction_count);
562                                session_entry.memory_flush_at =
563                                    Some(chrono::Utc::now().to_rfc3339());
564                                let _ = store.save(&sessions_path);
565                            }
566                        }
567                    }
568                }
569            }
570        }
571
572        self.compact_history_inner(history, threshold).await
573    }
574
575    /// Run a silent agent turn to remind the model to write durable memories before compaction.
576    /// OpenClaw-style: system + user prompt, model may call memory_write, we don't show/output.
577    async fn run_memory_flush_turn(&self, history: &[ChatMessage]) -> Result<()> {
578        let today = chrono::Local::now().format("%Y-%m-%d").to_string();
579        let memory_flush_reminder = format!(
580            "Session nearing compaction. Store durable memories now. \
581             Use memory_write to save key context (preferences, decisions, file paths, summaries) \
582             to memory/{}.md. Reply with NO_REPLY if nothing to store.",
583            today
584        );
585        let memory_flush_prompt = format!(
586            "Write any lasting notes to memory/{}.md; reply with NO_REPLY if nothing to store.",
587            today
588        );
589
590        let mut flush_messages: Vec<ChatMessage> = history.to_vec();
591        flush_messages.push(ChatMessage::system(&memory_flush_reminder));
592
593        let mut silent_sink = SilentEventSink;
594        tracing::debug!("Running pre-compaction memory flush");
595        let _ = agent_loop::run_agent_loop(
596            &self.config,
597            flush_messages,
598            &memory_flush_prompt,
599            &self.skills,
600            &mut silent_sink,
601            Some(&self.session_key),
602        )
603        .await?;
604        Ok(())
605    }
606
607    /// Inner compaction logic. `min_threshold`: use 0 for force_compact to bypass.
608    async fn compact_history_inner(
609        &mut self,
610        history: Vec<ChatMessage>,
611        min_threshold: usize,
612    ) -> Result<Vec<ChatMessage>> {
613        let keep_count = get_compaction_keep_recent();
614        if history.len() < min_threshold || history.len() <= keep_count {
615            return Ok(history);
616        }
617
618        let split_point = history.len().saturating_sub(keep_count);
619        let old_messages = &history[..split_point];
620        let recent_messages = &history[split_point..];
621
622        // Build summary of old messages via LLM
623        let client = LlmClient::new(&self.config.api_base, &self.config.api_key)?;
624        let summary_prompt = format!(
625            "Please summarize the following conversation concisely, preserving key context, decisions, and results:\n\n{}",
626            old_messages
627                .iter()
628                .filter_map(|m| {
629                    let content = m.content.as_deref().unwrap_or("");
630                    if content.is_empty() { None }
631                    else { Some(format!("[{}] {}", m.role, content)) }
632                })
633                .collect::<Vec<_>>()
634                .join("\n")
635        );
636
637        let summary = match client
638            .chat_completion(
639                &self.config.model,
640                &[ChatMessage::user(&summary_prompt)],
641                None,
642                Some(0.3),
643            )
644            .await
645        {
646            Ok(resp) => resp
647                .choices
648                .first()
649                .and_then(|c| c.message.content.clone())
650                .unwrap_or_else(|| "[Compaction summary unavailable]".to_string()),
651            Err(e) => {
652                tracing::warn!("Compaction summary failed: {}, keeping all messages", e);
653                return Ok(history);
654            }
655        };
656
657        // Write compaction entry to transcript
658        let transcripts_dir = self.data_root.join("transcripts");
659        let t_path = transcript::transcript_path_today(&transcripts_dir, &self.session_key);
660        let compaction_entry = transcript::TranscriptEntry::Compaction {
661            id: uuid::Uuid::new_v4().to_string(),
662            parent_id: None,
663            first_kept_entry_id: String::new(),
664            tokens_before: (old_messages.len() * 100) as u64, // rough estimate
665            summary: Some(summary.clone()),
666        };
667        transcript::append_entry(&t_path, &compaction_entry)?;
668
669        // Update session compaction count
670        let sessions_path = self.data_root.join("sessions.json");
671        if let Ok(mut store) = session::SessionStore::load(&sessions_path) {
672            if let Some(entry) = store.sessions.get_mut(&self.session_key) {
673                entry.compaction_count += 1;
674                entry.updated_at = chrono::Utc::now().to_rfc3339();
675                let _ = store.save(&sessions_path);
676            }
677        }
678
679        // Return summary + recent messages
680        let mut result = Vec::new();
681        result.push(ChatMessage::system(&format!(
682            "[Previous conversation summary]\n{}",
683            summary
684        )));
685        result.extend(recent_messages.to_vec());
686
687        Ok(result)
688    }
689
690    /// Force compaction: summarize history via LLM regardless of threshold.
691    /// Returns true if compaction was performed, false if history was too short.
692    pub async fn force_compact(&mut self) -> Result<bool> {
693        let _ = self.ensure_session()?;
694        let history = self.read_history()?;
695        let keep_count = get_compaction_keep_recent();
696        if history.len() <= keep_count {
697            return Ok(false);
698        }
699        let _ = self.compact_history_inner(history, 0).await?;
700        Ok(true)
701    }
702
703    /// Full clear (OpenClaw-style): summarize to memory, archive transcript, reset counts.
704    /// Used by Assistant /new and `skilllite clear-session`.
705    pub async fn clear_full(&mut self) -> Result<()> {
706        if let Ok(history) = self.read_history() {
707            if !history.is_empty() {
708                let _ = self.summarize_for_memory(&history).await;
709            }
710        }
711        self.archive_transcript()?;
712        self.reset_session_counts()?;
713        self.session_id = None;
714        Ok(())
715    }
716
717    fn archive_transcript(&self) -> Result<()> {
718        let transcripts_dir = self.data_root.join("transcripts");
719        let paths = transcript::list_transcript_files(&transcripts_dir, &self.session_key)?;
720        let timestamp = std::time::SystemTime::now()
721            .duration_since(std::time::UNIX_EPOCH)
722            .map(|d| d.as_secs())
723            .unwrap_or(0);
724        for path in paths {
725            let archived =
726                std::path::PathBuf::from(format!("{}.archived.{}", path.display(), timestamp));
727            skilllite_fs::rename(&path, &archived)?;
728        }
729        Ok(())
730    }
731
732    fn reset_session_counts(&self) -> Result<()> {
733        let sessions_path = self.data_root.join("sessions.json");
734        if let Ok(mut store) = session::SessionStore::load(&sessions_path) {
735            store.reset_compaction_state(&self.session_key);
736            let _ = store.save(&sessions_path);
737        }
738        Ok(())
739    }
740
741    /// Clear session: summarize conversation to memory, then reset (CLI /clear, transcript kept).
742    pub async fn clear(&mut self) -> Result<()> {
743        // If we have a session, summarize the conversation before clearing
744        if self.session_id.is_some() {
745            if let Ok(history) = self.read_history() {
746                if !history.is_empty() {
747                    let _ = self.summarize_for_memory(&history).await;
748                }
749            }
750        }
751        self.session_id = None;
752        Ok(())
753    }
754
755    /// Summarize conversation history and write to memory.
756    /// Called before clearing a session to preserve key context.
757    async fn summarize_for_memory(&self, history: &[ChatMessage]) -> Result<()> {
758        // clear-session should still finish quickly without an API key.
759        if self.config.api_key.trim().is_empty() {
760            tracing::info!("Skipping memory summary on clear: OPENAI_API_KEY is empty");
761            return Ok(());
762        }
763
764        let client = LlmClient::new(&self.config.api_base, &self.config.api_key)?;
765
766        let conversation: Vec<String> = history
767            .iter()
768            .filter_map(|m| {
769                let content = m.content.as_deref().unwrap_or("");
770                if content.is_empty() {
771                    None
772                } else {
773                    Some(format!("[{}] {}", m.role, content))
774                }
775            })
776            .collect();
777
778        if conversation.is_empty() {
779            return Ok(());
780        }
781
782        let summary_prompt = format!(
783            "Please summarize this conversation concisely for long-term memory. \
784             Preserve key decisions, results, file paths, and important context:\n\n{}",
785            conversation.join("\n")
786        );
787
788        let summary = match client
789            .chat_completion(
790                &self.config.model,
791                &[ChatMessage::user(&summary_prompt)],
792                None,
793                Some(0.3),
794            )
795            .await
796        {
797            Ok(resp) => resp
798                .choices
799                .first()
800                .and_then(|c| c.message.content.clone())
801                .unwrap_or_default(),
802            Err(e) => {
803                tracing::warn!("Memory summarization failed: {}", e);
804                return Ok(());
805            }
806        };
807
808        if summary.is_empty() {
809            return Ok(());
810        }
811
812        let memory_entry = format!(
813            "\n\n---\n\n## [Session cleared — {}]\n\n{}",
814            chrono::Local::now().format("%Y-%m-%d %H:%M"),
815            summary
816        );
817
818        // Write to memory/YYYY-MM-DD.md (durable, searchable)
819        let today = chrono::Local::now().format("%Y-%m-%d").to_string();
820        let memory_dir = self.data_root.join("memory");
821        skilllite_fs::create_dir_all(&memory_dir)?;
822        let memory_path = memory_dir.join(format!("{}.md", today));
823        let final_content = if memory_path.exists() {
824            format!(
825                "{}\n{}",
826                skilllite_fs::read_file(&memory_path).unwrap_or_default(),
827                memory_entry
828            )
829        } else {
830            memory_entry.trim_start().to_string()
831        };
832        skilllite_fs::write_file(&memory_path, &final_content)?;
833
834        // Index for BM25 search
835        let rel_path = format!("{}.md", today);
836        let idx_path = executor_memory::index_path(&self.data_root, &self.session_key);
837        if let Some(parent) = idx_path.parent() {
838            skilllite_fs::create_dir_all(parent)?;
839        }
840        if let Ok(conn) = rusqlite::Connection::open(&idx_path) {
841            let _ = executor_memory::ensure_index(&conn)
842                .and_then(|_| executor_memory::index_file(&conn, &rel_path, &final_content));
843        }
844
845        tracing::info!("Session memory summary written to memory/{}", rel_path);
846
847        // Also append compaction to transcript so read_history returns summary (CLI /clear case)
848        let transcripts_dir = self.data_root.join("transcripts");
849        let t_path = transcript::transcript_path_today(&transcripts_dir, &self.session_key);
850        let entry = transcript::TranscriptEntry::Compaction {
851            id: uuid::Uuid::new_v4().to_string(),
852            parent_id: None,
853            first_kept_entry_id: String::new(),
854            tokens_before: 0,
855            summary: Some(format!("[Session cleared — memory summary]\n{}", summary)),
856        };
857        let _ = transcript::append_entry(&t_path, &entry);
858
859        Ok(())
860    }
861}
862
863// ─── A9: Evolution triggers (periodic + decision-count) ─────────────────────
864
865/// Run evolution once and emit summary. Shared by periodic and decision-count triggers.
866/// workspace: project root for skill evolution (skills written to workspace/.skills/_evolved/).
867async fn run_evolution_and_emit_summary(
868    data_root: &Path,
869    workspace: &str,
870    api_base: &str,
871    api_key: &str,
872    model: &str,
873) {
874    let skills_root = if workspace.is_empty() {
875        None
876    } else {
877        let ws = std::path::Path::new(workspace);
878        let sr = if ws.is_absolute() {
879            ws.join(".skills")
880        } else {
881            std::env::current_dir()
882                .unwrap_or_else(|_| std::path::PathBuf::from("."))
883                .join(workspace)
884                .join(".skills")
885        };
886        Some(sr)
887    };
888    let llm = match LlmClient::new(api_base, api_key) {
889        Ok(c) => c,
890        Err(e) => {
891            tracing::error!("LLM client build failed for evolution: {}", e);
892            return;
893        }
894    };
895    let adapter = evolution::EvolutionLlmAdapter { llm: &llm };
896    let skills_root_ref = skills_root.as_deref();
897    match skilllite_evolution::run_evolution(
898        data_root,
899        skills_root_ref,
900        &adapter,
901        api_base,
902        api_key,
903        model,
904        false,
905    )
906    .await
907    {
908        Ok(skilllite_evolution::EvolutionRunResult::Completed(Some(txn_id))) => {
909            tracing::info!("Evolution completed: {}", txn_id);
910            if let Ok(conn) = skilllite_evolution::feedback::open_evolution_db(data_root) {
911                let changes = skilllite_evolution::query_changes_by_txn(&conn, &txn_id);
912                for msg in &skilllite_evolution::format_evolution_changes(&changes) {
913                    eprintln!("{}", msg);
914                }
915                let _ = skilllite_evolution::check_auto_rollback(&conn, data_root);
916                // 若本次进化写入了记忆知识,将其加入 memory 索引,以便 memory_search / build_memory_context 能搜到
917                if changes.iter().any(|(t, _)| t == "memory_knowledge_added") {
918                    let _ = extensions::index_evolution_knowledge(data_root, "default");
919                }
920            }
921        }
922        Ok(skilllite_evolution::EvolutionRunResult::SkippedBusy) => {
923            tracing::warn!("Evolution skipped: another run in progress");
924        }
925        Ok(skilllite_evolution::EvolutionRunResult::NoScope)
926        | Ok(skilllite_evolution::EvolutionRunResult::Completed(None)) => {
927            tracing::debug!("Evolution: nothing to evolve");
928        }
929        Err(e) => tracing::warn!("Evolution failed: {}", e),
930    }
931}
932
933/// A9: Periodic evolution trigger — runs every N seconds, even when user is active.
934/// Returns None when not inside a Tokio runtime (e.g. clear-session CLI), so no panic.
935pub fn spawn_periodic_evolution(
936    data_root: PathBuf,
937    workspace: String,
938    api_base: String,
939    api_key: String,
940    model: String,
941    interval_secs: u64,
942) -> Option<tokio::task::JoinHandle<()>> {
943    let _handle = tokio::runtime::Handle::try_current().ok()?;
944    Some(_handle.spawn(async move {
945        if skilllite_evolution::EvolutionMode::from_env().is_disabled() {
946            tracing::debug!("Evolution disabled, skipping periodic trigger");
947            return;
948        }
949        let interval = std::time::Duration::from_secs(interval_secs);
950        loop {
951            tokio::time::sleep(interval).await;
952            tracing::debug!(
953                "Periodic evolution trigger fired (every {}s)",
954                interval_secs
955            );
956            run_evolution_and_emit_summary(
957                &data_root,
958                workspace.as_str(),
959                &api_base,
960                &api_key,
961                &model,
962            )
963            .await;
964        }
965    }))
966}
967
968/// A9: Decision-count trigger — spawn evolution once when threshold is met.
969/// No-op when not inside a Tokio runtime (returns None).
970pub fn spawn_evolution_once(
971    data_root: PathBuf,
972    workspace: String,
973    api_base: String,
974    api_key: String,
975    model: String,
976) -> Option<tokio::task::JoinHandle<()>> {
977    let handle = tokio::runtime::Handle::try_current().ok()?;
978    Some(handle.spawn(async move {
979        if skilllite_evolution::EvolutionMode::from_env().is_disabled() {
980            return;
981        }
982        tracing::debug!("Decision-count evolution trigger fired");
983        run_evolution_and_emit_summary(&data_root, workspace.as_str(), &api_base, &api_key, &model)
984            .await;
985    }))
986}
987
988/// Shutdown hook: flush metrics, no LLM calls. Called before process exit.
989pub fn shutdown_evolution(data_root: &std::path::Path) {
990    skilllite_evolution::on_shutdown(data_root);
991}
992
993/// Convert a transcript entry to a ChatMessage.
994fn transcript_entry_to_message(entry: &transcript::TranscriptEntry) -> Option<ChatMessage> {
995    match entry {
996        transcript::TranscriptEntry::Message { role, content, .. } => Some(ChatMessage {
997            role: role.clone(),
998            content: content.clone(),
999            tool_calls: None,
1000            tool_call_id: None,
1001            name: None,
1002        }),
1003        transcript::TranscriptEntry::Compaction { summary, .. } => summary
1004            .as_ref()
1005            .map(|s| ChatMessage::system(&format!("[Previous conversation summary]\n{}", s))),
1006        _ => None,
1007    }
1008}