Skip to main content

codetether_agent/session/
mod.rs

1//! Session management
2//!
3//! Sessions track the conversation history and state for agent interactions.
4
5use crate::agent::ToolUse;
6use crate::audit::{AuditCategory, AuditOutcome, try_audit_log};
7use crate::provider::{Message, Usage};
8use crate::tool::ToolRegistry;
9use anyhow::Result;
10use chrono::{DateTime, Utc};
11use serde::{Deserialize, Serialize};
12use serde_json::json;
13use std::path::PathBuf;
14use std::sync::Arc;
15use tokio::fs;
16use uuid::Uuid;
17
18#[cfg(feature = "functiongemma")]
19use crate::cognition::tool_router::{ToolCallRouter, ToolRouterConfig};
20
21fn is_interactive_tool(tool_name: &str) -> bool {
22    matches!(tool_name, "question")
23}
24
25fn choose_default_provider<'a>(providers: &'a [&'a str]) -> Option<&'a str> {
26    // Keep Google as an explicit option, but don't default to it first because
27    // some environments expose API keys that are not valid for ChatCompletions.
28    let preferred = [
29        "zai",
30        "openai",
31        "github-copilot",
32        "anthropic",
33        "minimax",
34        "openrouter",
35        "novita",
36        "moonshotai",
37        "google",
38    ];
39    for name in preferred {
40        if let Some(found) = providers.iter().copied().find(|p| *p == name) {
41            return Some(found);
42        }
43    }
44    providers.first().copied()
45}
46
47fn prefers_temperature_one(model: &str) -> bool {
48    let normalized = model.to_ascii_lowercase();
49    normalized.contains("kimi-k2") || normalized.contains("glm-") || normalized.contains("minimax")
50}
51
52/// A conversation session
53#[derive(Debug, Clone, Serialize, Deserialize)]
54pub struct Session {
55    pub id: String,
56    pub title: Option<String>,
57    pub created_at: DateTime<Utc>,
58    pub updated_at: DateTime<Utc>,
59    pub messages: Vec<Message>,
60    pub tool_uses: Vec<ToolUse>,
61    pub usage: Usage,
62    pub agent: String,
63    pub metadata: SessionMetadata,
64}
65
66#[derive(Debug, Clone, Default, Serialize, Deserialize)]
67pub struct SessionMetadata {
68    pub directory: Option<PathBuf>,
69    pub model: Option<String>,
70    pub shared: bool,
71    pub share_url: Option<String>,
72}
73
74impl Session {
75    fn default_model_for_provider(provider: &str) -> String {
76        match provider {
77            "moonshotai" => "kimi-k2.5".to_string(),
78            "anthropic" => "claude-sonnet-4-20250514".to_string(),
79            "minimax" => "MiniMax-M2.5".to_string(),
80            "openai" => "gpt-4o".to_string(),
81            "google" => "gemini-2.5-pro".to_string(),
82            "zhipuai" | "zai" => "glm-5".to_string(),
83            // OpenRouter uses model IDs like "z-ai/glm-5".
84            "openrouter" => "z-ai/glm-5".to_string(),
85            "novita" => "qwen/qwen3-coder-next".to_string(),
86            "github-copilot" | "github-copilot-enterprise" => "gpt-5-mini".to_string(),
87            _ => "glm-5".to_string(),
88        }
89    }
90
91    /// Create a new session
92    pub async fn new() -> Result<Self> {
93        let id = Uuid::new_v4().to_string();
94        let now = Utc::now();
95
96        Ok(Self {
97            id,
98            title: None,
99            created_at: now,
100            updated_at: now,
101            messages: Vec::new(),
102            tool_uses: Vec::new(),
103            usage: Usage::default(),
104            agent: "build".to_string(),
105            metadata: SessionMetadata {
106                directory: Some(std::env::current_dir()?),
107                ..Default::default()
108            },
109        })
110    }
111
112    /// Load an existing session
113    pub async fn load(id: &str) -> Result<Self> {
114        let path = Self::session_path(id)?;
115        let content = fs::read_to_string(&path).await?;
116        let session: Session = serde_json::from_str(&content)?;
117        Ok(session)
118    }
119
120    /// Load the last session, optionally scoped to a workspace directory
121    ///
122    /// When `workspace` is Some, only considers sessions created in that directory.
123    /// When None, returns the most recent session globally (legacy behavior).
124    pub async fn last_for_directory(workspace: Option<&std::path::Path>) -> Result<Self> {
125        let sessions_dir = Self::sessions_dir()?;
126
127        if !sessions_dir.exists() {
128            anyhow::bail!("No sessions found");
129        }
130
131        let mut entries: Vec<tokio::fs::DirEntry> = Vec::new();
132        let mut read_dir = fs::read_dir(&sessions_dir).await?;
133        while let Some(entry) = read_dir.next_entry().await? {
134            entries.push(entry);
135        }
136
137        if entries.is_empty() {
138            anyhow::bail!("No sessions found");
139        }
140
141        // Sort by modification time (most recent first)
142        // Use std::fs::metadata since we can't await in sort_by_key
143        entries.sort_by_key(|e| {
144            std::cmp::Reverse(
145                std::fs::metadata(e.path())
146                    .ok()
147                    .and_then(|m| m.modified().ok())
148                    .unwrap_or(std::time::SystemTime::UNIX_EPOCH),
149            )
150        });
151
152        let canonical_workspace =
153            workspace.map(|w| w.canonicalize().unwrap_or_else(|_| w.to_path_buf()));
154
155        for entry in &entries {
156            let content: String = fs::read_to_string(entry.path()).await?;
157            if let Ok(session) = serde_json::from_str::<Session>(&content) {
158                // If workspace scoping requested, filter by directory
159                if let Some(ref ws) = canonical_workspace {
160                    if let Some(ref dir) = session.metadata.directory {
161                        let canonical_dir = dir.canonicalize().unwrap_or_else(|_| dir.clone());
162                        if &canonical_dir == ws {
163                            return Ok(session);
164                        }
165                    }
166                    continue;
167                }
168                return Ok(session);
169            }
170        }
171
172        anyhow::bail!("No sessions found")
173    }
174
175    /// Load the last session (global, unscoped — legacy compatibility)
176    pub async fn last() -> Result<Self> {
177        Self::last_for_directory(None).await
178    }
179
180    /// Save the session to disk
181    pub async fn save(&self) -> Result<()> {
182        let path = Self::session_path(&self.id)?;
183
184        if let Some(parent) = path.parent() {
185            fs::create_dir_all(parent).await?;
186        }
187
188        let content = serde_json::to_string_pretty(self)?;
189        fs::write(&path, content).await?;
190
191        Ok(())
192    }
193
194    /// Add a message to the session
195    pub fn add_message(&mut self, message: Message) {
196        self.messages.push(message);
197        self.updated_at = Utc::now();
198    }
199
200    /// Execute a prompt and get the result
201    pub async fn prompt(&mut self, message: &str) -> Result<SessionResult> {
202        use crate::provider::{
203            CompletionRequest, ContentPart, ProviderRegistry, Role, parse_model_string,
204        };
205
206        // Load providers from Vault
207        let registry = ProviderRegistry::from_vault().await?;
208
209        let providers = registry.list();
210        if providers.is_empty() {
211            anyhow::bail!(
212                "No providers available. Configure API keys in HashiCorp Vault (for Copilot use `codetether auth copilot`)."
213            );
214        }
215
216        tracing::info!("Available providers: {:?}", providers);
217
218        // Parse model string (format: "provider/model", "provider", or just "model")
219        let (provider_name, model_id) = if let Some(ref model_str) = self.metadata.model {
220            let (prov, model) = parse_model_string(model_str);
221            let prov = prov.map(|p| if p == "zhipuai" { "zai" } else { p });
222            if prov.is_some() {
223                // Format: provider/model
224                (prov.map(|s| s.to_string()), model.to_string())
225            } else if providers.contains(&model) {
226                // Format: just provider name (e.g., "novita")
227                (Some(model.to_string()), String::new())
228            } else {
229                // Format: just model name
230                (None, model.to_string())
231            }
232        } else {
233            (None, String::new())
234        };
235
236        // Determine which provider to use with deterministic fallback ordering.
237        let selected_provider = provider_name
238            .as_deref()
239            .filter(|p| providers.contains(p))
240            .or_else(|| choose_default_provider(providers.as_slice()))
241            .ok_or_else(|| anyhow::anyhow!("No providers available"))?;
242
243        let provider = registry
244            .get(selected_provider)
245            .ok_or_else(|| anyhow::anyhow!("Provider {} not found", selected_provider))?;
246
247        // Add user message to session using add_message
248        self.add_message(Message {
249            role: Role::User,
250            content: vec![ContentPart::Text {
251                text: message.to_string(),
252            }],
253        });
254
255        // Generate title if this is the first user message and no title exists
256        if self.title.is_none() {
257            self.generate_title().await?;
258        }
259
260        // Determine model to use
261        let model = if !model_id.is_empty() {
262            model_id
263        } else {
264            Self::default_model_for_provider(selected_provider)
265        };
266
267        // Create tool registry with all available tools
268        let tool_registry = ToolRegistry::with_provider_arc(Arc::clone(&provider), model.clone());
269        let tool_definitions: Vec<_> = tool_registry
270            .definitions()
271            .into_iter()
272            .filter(|tool| !is_interactive_tool(&tool.name))
273            .collect();
274
275        // Some models behave best with temperature=1.0.
276        // - Kimi K2.x requires temperature=1.0
277        // - GLM (Z.AI) defaults to temperature 1.0 for coding workflows
278        // Use contains() to match both short aliases and provider-qualified IDs.
279        let temperature = if prefers_temperature_one(&model) {
280            Some(1.0)
281        } else {
282            Some(0.7)
283        };
284
285        tracing::info!("Using model: {} via provider: {}", model, selected_provider);
286        tracing::info!("Available tools: {}", tool_definitions.len());
287
288        // All current providers support native tool calling.  Hardcode to
289        // true so we skip the expensive list_models() API call on every message.
290        #[cfg(feature = "functiongemma")]
291        let model_supports_tools = true;
292
293        // Build system prompt with AGENTS.md
294        let cwd = self
295            .metadata
296            .directory
297            .clone()
298            .unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
299        let system_prompt = crate::agent::builtin::build_system_prompt(&cwd);
300
301        // Run agentic loop with tool execution
302        let max_steps = 50;
303        let mut final_output = String::new();
304
305        // Initialise the FunctionGemma tool-call router (feature-gated, opt-in).
306        #[cfg(feature = "functiongemma")]
307        let tool_router: Option<ToolCallRouter> = {
308            let cfg = ToolRouterConfig::from_env();
309            match ToolCallRouter::from_config(&cfg) {
310                Ok(r) => r,
311                Err(e) => {
312                    tracing::warn!(error = %e, "FunctionGemma tool router init failed; disabled");
313                    None
314                }
315            }
316        };
317
318        for step in 1..=max_steps {
319            tracing::info!(step = step, "Agent step starting");
320
321            // Build messages with system prompt first
322            let mut messages = vec![Message {
323                role: Role::System,
324                content: vec![ContentPart::Text {
325                    text: system_prompt.clone(),
326                }],
327            }];
328            messages.extend(self.messages.clone());
329
330            // Create completion request with tools
331            let request = CompletionRequest {
332                messages,
333                tools: tool_definitions.clone(),
334                model: model.clone(),
335                temperature,
336                top_p: None,
337                max_tokens: Some(8192),
338                stop: Vec::new(),
339            };
340
341            // Call the provider
342            let response = provider.complete(request).await?;
343
344            // Optionally route text-only responses through FunctionGemma to
345            // produce structured tool calls.  Skipped when the model natively
346            // supports tool calling (which all current providers do).
347            #[cfg(feature = "functiongemma")]
348            let response = if let Some(ref router) = tool_router {
349                router
350                    .maybe_reformat(response, &tool_definitions, model_supports_tools)
351                    .await
352            } else {
353                response
354            };
355
356            // Record token usage
357            crate::telemetry::TOKEN_USAGE.record_model_usage(
358                &model,
359                response.usage.prompt_tokens as u64,
360                response.usage.completion_tokens as u64,
361            );
362
363            // Extract tool calls from response
364            let tool_calls: Vec<(String, String, serde_json::Value)> = response
365                .message
366                .content
367                .iter()
368                .filter_map(|part| {
369                    if let ContentPart::ToolCall {
370                        id,
371                        name,
372                        arguments,
373                    } = part
374                    {
375                        // Parse arguments JSON string into Value
376                        let args: serde_json::Value =
377                            serde_json::from_str(arguments).unwrap_or(serde_json::json!({}));
378                        Some((id.clone(), name.clone(), args))
379                    } else {
380                        None
381                    }
382                })
383                .collect();
384
385            // Collect text output
386            for part in &response.message.content {
387                if let ContentPart::Text { text } = part {
388                    if !text.is_empty() {
389                        final_output.push_str(text);
390                        final_output.push('\n');
391                    }
392                }
393            }
394
395            // If no tool calls, we're done
396            if tool_calls.is_empty() {
397                self.add_message(response.message.clone());
398                break;
399            }
400
401            // Add assistant message with tool calls
402            self.add_message(response.message.clone());
403
404            tracing::info!(
405                step = step,
406                num_tools = tool_calls.len(),
407                "Executing tool calls"
408            );
409
410            // Execute each tool call
411            for (tool_id, tool_name, tool_input) in tool_calls {
412                tracing::info!(tool = %tool_name, tool_id = %tool_id, "Executing tool");
413
414                if is_interactive_tool(&tool_name) {
415                    tracing::warn!(tool = %tool_name, "Blocking interactive tool in session loop");
416                    self.add_message(Message {
417                        role: Role::Tool,
418                        content: vec![ContentPart::ToolResult {
419                            tool_call_id: tool_id,
420                            content: "Error: Interactive tool 'question' is disabled in this interface. Ask the user directly in assistant text.".to_string(),
421                        }],
422                    });
423                    continue;
424                }
425
426                // Get and execute the tool
427                let exec_start = std::time::Instant::now();
428                let content = if let Some(tool) = tool_registry.get(&tool_name) {
429                    match tool.execute(tool_input.clone()).await {
430                        Ok(result) => {
431                            let duration_ms = exec_start.elapsed().as_millis() as u64;
432                            tracing::info!(tool = %tool_name, success = result.success, "Tool execution completed");
433                            if let Some(audit) = try_audit_log() {
434                                audit.log(
435                                    AuditCategory::ToolExecution,
436                                    format!("tool:{}", tool_name),
437                                    if result.success { AuditOutcome::Success } else { AuditOutcome::Failure },
438                                    None,
439                                    Some(json!({ "duration_ms": duration_ms, "output_len": result.output.len() })),
440                                ).await;
441                            }
442                            result.output
443                        }
444                        Err(e) => {
445                            let duration_ms = exec_start.elapsed().as_millis() as u64;
446                            tracing::warn!(tool = %tool_name, error = %e, "Tool execution failed");
447                            if let Some(audit) = try_audit_log() {
448                                audit.log(
449                                    AuditCategory::ToolExecution,
450                                    format!("tool:{}", tool_name),
451                                    AuditOutcome::Failure,
452                                    None,
453                                    Some(json!({ "duration_ms": duration_ms, "error": e.to_string() })),
454                                ).await;
455                            }
456                            format!("Error: {}", e)
457                        }
458                    }
459                } else {
460                    tracing::warn!(tool = %tool_name, "Tool not found");
461                    if let Some(audit) = try_audit_log() {
462                        audit
463                            .log(
464                                AuditCategory::ToolExecution,
465                                format!("tool:{}", tool_name),
466                                AuditOutcome::Failure,
467                                None,
468                                Some(json!({ "error": "unknown_tool" })),
469                            )
470                            .await;
471                    }
472                    format!("Error: Unknown tool '{}'", tool_name)
473                };
474
475                // Add tool result message
476                self.add_message(Message {
477                    role: Role::Tool,
478                    content: vec![ContentPart::ToolResult {
479                        tool_call_id: tool_id,
480                        content,
481                    }],
482                });
483            }
484        }
485
486        // Save session after each prompt to persist messages
487        self.save().await?;
488
489        Ok(SessionResult {
490            text: final_output.trim().to_string(),
491            session_id: self.id.clone(),
492        })
493    }
494
495    /// Process a user message with real-time event streaming for UI updates.
496    /// Events are sent through the provided channel as tool calls execute.
497    ///
498    /// Accepts a pre-loaded `ProviderRegistry` to avoid re-fetching secrets
499    /// from Vault on every message (which was the primary TUI performance
500    /// bottleneck).
501    pub async fn prompt_with_events(
502        &mut self,
503        message: &str,
504        event_tx: tokio::sync::mpsc::Sender<SessionEvent>,
505        registry: std::sync::Arc<crate::provider::ProviderRegistry>,
506    ) -> Result<SessionResult> {
507        use crate::provider::{CompletionRequest, ContentPart, Role, parse_model_string};
508
509        let _ = event_tx.send(SessionEvent::Thinking).await;
510
511        let providers = registry.list();
512        if providers.is_empty() {
513            anyhow::bail!(
514                "No providers available. Configure API keys in HashiCorp Vault (for Copilot use `codetether auth copilot`)."
515            );
516        }
517        tracing::info!("Available providers: {:?}", providers);
518
519        // Parse model string (format: "provider/model", "provider", or just "model")
520        let (provider_name, model_id) = if let Some(ref model_str) = self.metadata.model {
521            let (prov, model) = parse_model_string(model_str);
522            let prov = prov.map(|p| if p == "zhipuai" { "zai" } else { p });
523            if prov.is_some() {
524                (prov.map(|s| s.to_string()), model.to_string())
525            } else if providers.contains(&model) {
526                (Some(model.to_string()), String::new())
527            } else {
528                (None, model.to_string())
529            }
530        } else {
531            (None, String::new())
532        };
533
534        // Determine which provider to use with deterministic fallback ordering.
535        let selected_provider = provider_name
536            .as_deref()
537            .filter(|p| providers.contains(p))
538            .or_else(|| choose_default_provider(providers.as_slice()))
539            .ok_or_else(|| anyhow::anyhow!("No providers available"))?;
540
541        let provider = registry
542            .get(selected_provider)
543            .ok_or_else(|| anyhow::anyhow!("Provider {} not found", selected_provider))?;
544
545        // Add user message
546        self.add_message(Message {
547            role: Role::User,
548            content: vec![ContentPart::Text {
549                text: message.to_string(),
550            }],
551        });
552
553        // Generate title if needed
554        if self.title.is_none() {
555            self.generate_title().await?;
556        }
557
558        // Determine model
559        let model = if !model_id.is_empty() {
560            model_id
561        } else {
562            Self::default_model_for_provider(selected_provider)
563        };
564
565        // Create tool registry
566        let tool_registry = ToolRegistry::with_provider_arc(Arc::clone(&provider), model.clone());
567        let tool_definitions: Vec<_> = tool_registry
568            .definitions()
569            .into_iter()
570            .filter(|tool| !is_interactive_tool(&tool.name))
571            .collect();
572
573        let temperature = if prefers_temperature_one(&model) {
574            Some(1.0)
575        } else {
576            Some(0.7)
577        };
578
579        tracing::info!("Using model: {} via provider: {}", model, selected_provider);
580        tracing::info!("Available tools: {}", tool_definitions.len());
581
582        // All current providers support native tool calling.  Hardcode to
583        // true so we skip the expensive list_models() API call on every message.
584        #[cfg(feature = "functiongemma")]
585        let model_supports_tools = true;
586
587        // Build system prompt
588        let cwd = std::env::var("PWD")
589            .map(std::path::PathBuf::from)
590            .unwrap_or_else(|_| std::env::current_dir().unwrap_or_default());
591        let system_prompt = crate::agent::builtin::build_system_prompt(&cwd);
592
593        let mut final_output = String::new();
594        let max_steps = 50;
595
596        // Initialise the FunctionGemma tool-call router (feature-gated, opt-in).
597        #[cfg(feature = "functiongemma")]
598        let tool_router: Option<ToolCallRouter> = {
599            let cfg = ToolRouterConfig::from_env();
600            match ToolCallRouter::from_config(&cfg) {
601                Ok(r) => r,
602                Err(e) => {
603                    tracing::warn!(error = %e, "FunctionGemma tool router init failed; disabled");
604                    None
605                }
606            }
607        };
608
609        for step in 1..=max_steps {
610            tracing::info!(step = step, "Agent step starting");
611            let _ = event_tx.send(SessionEvent::Thinking).await;
612
613            // Build messages with system prompt first
614            let mut messages = vec![Message {
615                role: Role::System,
616                content: vec![ContentPart::Text {
617                    text: system_prompt.clone(),
618                }],
619            }];
620            messages.extend(self.messages.clone());
621
622            let request = CompletionRequest {
623                messages,
624                tools: tool_definitions.clone(),
625                model: model.clone(),
626                temperature,
627                top_p: None,
628                max_tokens: Some(8192),
629                stop: Vec::new(),
630            };
631
632            let llm_start = std::time::Instant::now();
633            let response = provider.complete(request).await?;
634            let llm_duration_ms = llm_start.elapsed().as_millis() as u64;
635
636            // Optionally route text-only responses through FunctionGemma to
637            // produce structured tool calls.  Skipped for native tool-calling models.
638            #[cfg(feature = "functiongemma")]
639            let response = if let Some(ref router) = tool_router {
640                router
641                    .maybe_reformat(response, &tool_definitions, model_supports_tools)
642                    .await
643            } else {
644                response
645            };
646
647            crate::telemetry::TOKEN_USAGE.record_model_usage(
648                &model,
649                response.usage.prompt_tokens as u64,
650                response.usage.completion_tokens as u64,
651            );
652
653            // Emit usage report for TUI display
654            let _ = event_tx
655                .send(SessionEvent::UsageReport {
656                    prompt_tokens: response.usage.prompt_tokens,
657                    completion_tokens: response.usage.completion_tokens,
658                    duration_ms: llm_duration_ms,
659                    model: model.clone(),
660                })
661                .await;
662
663            // Extract tool calls
664            let tool_calls: Vec<(String, String, serde_json::Value)> = response
665                .message
666                .content
667                .iter()
668                .filter_map(|part| {
669                    if let ContentPart::ToolCall {
670                        id,
671                        name,
672                        arguments,
673                    } = part
674                    {
675                        let args: serde_json::Value =
676                            serde_json::from_str(arguments).unwrap_or(serde_json::json!({}));
677                        Some((id.clone(), name.clone(), args))
678                    } else {
679                        None
680                    }
681                })
682                .collect();
683
684            // Collect text output for this step
685            // Collect thinking and text output
686            let mut thinking_text = String::new();
687            let mut step_text = String::new();
688            for part in &response.message.content {
689                match part {
690                    ContentPart::Thinking { text } => {
691                        if !text.is_empty() {
692                            thinking_text.push_str(text);
693                            thinking_text.push('\n');
694                        }
695                    }
696                    ContentPart::Text { text } => {
697                        if !text.is_empty() {
698                            step_text.push_str(text);
699                            step_text.push('\n');
700                        }
701                    }
702                    _ => {}
703                }
704            }
705
706            // Emit thinking output first
707            if !thinking_text.trim().is_empty() {
708                let _ = event_tx
709                    .send(SessionEvent::ThinkingComplete(
710                        thinking_text.trim().to_string(),
711                    ))
712                    .await;
713            }
714
715            // Emit this step's text BEFORE tool calls so it appears in correct
716            // chronological order in the TUI chat display.
717            if !step_text.trim().is_empty() {
718                let trimmed = step_text.trim().to_string();
719                let _ = event_tx
720                    .send(SessionEvent::TextChunk(trimmed.clone()))
721                    .await;
722                let _ = event_tx.send(SessionEvent::TextComplete(trimmed)).await;
723                final_output.push_str(&step_text);
724            }
725
726            if tool_calls.is_empty() {
727                self.add_message(response.message.clone());
728                break;
729            }
730
731            self.add_message(response.message.clone());
732
733            tracing::info!(
734                step = step,
735                num_tools = tool_calls.len(),
736                "Executing tool calls"
737            );
738
739            // Execute each tool call with events
740            for (tool_id, tool_name, tool_input) in tool_calls {
741                let args_str = serde_json::to_string(&tool_input).unwrap_or_default();
742                let _ = event_tx
743                    .send(SessionEvent::ToolCallStart {
744                        name: tool_name.clone(),
745                        arguments: args_str,
746                    })
747                    .await;
748
749                tracing::info!(tool = %tool_name, tool_id = %tool_id, "Executing tool");
750
751                if is_interactive_tool(&tool_name) {
752                    tracing::warn!(tool = %tool_name, "Blocking interactive tool in session loop");
753                    let content = "Error: Interactive tool 'question' is disabled in this interface. Ask the user directly in assistant text.".to_string();
754                    let _ = event_tx
755                        .send(SessionEvent::ToolCallComplete {
756                            name: tool_name.clone(),
757                            output: content.clone(),
758                            success: false,
759                        })
760                        .await;
761                    self.add_message(Message {
762                        role: Role::Tool,
763                        content: vec![ContentPart::ToolResult {
764                            tool_call_id: tool_id,
765                            content,
766                        }],
767                    });
768                    continue;
769                }
770
771                let exec_start = std::time::Instant::now();
772                let (content, success) = if let Some(tool) = tool_registry.get(&tool_name) {
773                    match tool.execute(tool_input.clone()).await {
774                        Ok(result) => {
775                            let duration_ms = exec_start.elapsed().as_millis() as u64;
776                            tracing::info!(tool = %tool_name, success = result.success, "Tool execution completed");
777                            if let Some(audit) = try_audit_log() {
778                                audit.log(
779                                    AuditCategory::ToolExecution,
780                                    format!("tool:{}", tool_name),
781                                    if result.success { AuditOutcome::Success } else { AuditOutcome::Failure },
782                                    None,
783                                    Some(json!({ "duration_ms": duration_ms, "output_len": result.output.len() })),
784                                ).await;
785                            }
786                            (result.output, result.success)
787                        }
788                        Err(e) => {
789                            let duration_ms = exec_start.elapsed().as_millis() as u64;
790                            tracing::warn!(tool = %tool_name, error = %e, "Tool execution failed");
791                            if let Some(audit) = try_audit_log() {
792                                audit.log(
793                                    AuditCategory::ToolExecution,
794                                    format!("tool:{}", tool_name),
795                                    AuditOutcome::Failure,
796                                    None,
797                                    Some(json!({ "duration_ms": duration_ms, "error": e.to_string() })),
798                                ).await;
799                            }
800                            (format!("Error: {}", e), false)
801                        }
802                    }
803                } else {
804                    tracing::warn!(tool = %tool_name, "Tool not found");
805                    if let Some(audit) = try_audit_log() {
806                        audit
807                            .log(
808                                AuditCategory::ToolExecution,
809                                format!("tool:{}", tool_name),
810                                AuditOutcome::Failure,
811                                None,
812                                Some(json!({ "error": "unknown_tool" })),
813                            )
814                            .await;
815                    }
816                    (format!("Error: Unknown tool '{}'", tool_name), false)
817                };
818
819                let _ = event_tx
820                    .send(SessionEvent::ToolCallComplete {
821                        name: tool_name.clone(),
822                        output: content.clone(),
823                        success,
824                    })
825                    .await;
826
827                self.add_message(Message {
828                    role: Role::Tool,
829                    content: vec![ContentPart::ToolResult {
830                        tool_call_id: tool_id,
831                        content,
832                    }],
833                });
834            }
835        }
836
837        self.save().await?;
838
839        // Text was already sent per-step via TextComplete events.
840        // Send updated session state so the caller can sync back.
841        let _ = event_tx.send(SessionEvent::SessionSync(self.clone())).await;
842        let _ = event_tx.send(SessionEvent::Done).await;
843
844        Ok(SessionResult {
845            text: final_output.trim().to_string(),
846            session_id: self.id.clone(),
847        })
848    }
849
850    /// Generate a title for the session based on the first message
851    /// Only sets title if not already set (for initial title generation)
852    pub async fn generate_title(&mut self) -> Result<()> {
853        if self.title.is_some() {
854            return Ok(());
855        }
856
857        // Get first user message
858        let first_message = self
859            .messages
860            .iter()
861            .find(|m| m.role == crate::provider::Role::User);
862
863        if let Some(msg) = first_message {
864            let text: String = msg
865                .content
866                .iter()
867                .filter_map(|p| match p {
868                    crate::provider::ContentPart::Text { text } => Some(text.clone()),
869                    _ => None,
870                })
871                .collect::<Vec<_>>()
872                .join(" ");
873
874            // Truncate to reasonable length
875            self.title = Some(truncate_with_ellipsis(&text, 47));
876        }
877
878        Ok(())
879    }
880
881    /// Regenerate the title based on the first message, even if already set
882    /// Use this for on-demand title updates or after context changes
883    pub async fn regenerate_title(&mut self) -> Result<()> {
884        // Get first user message
885        let first_message = self
886            .messages
887            .iter()
888            .find(|m| m.role == crate::provider::Role::User);
889
890        if let Some(msg) = first_message {
891            let text: String = msg
892                .content
893                .iter()
894                .filter_map(|p| match p {
895                    crate::provider::ContentPart::Text { text } => Some(text.clone()),
896                    _ => None,
897                })
898                .collect::<Vec<_>>()
899                .join(" ");
900
901            // Truncate to reasonable length
902            self.title = Some(truncate_with_ellipsis(&text, 47));
903        }
904
905        Ok(())
906    }
907
908    /// Set a custom title for the session
909    pub fn set_title(&mut self, title: impl Into<String>) {
910        self.title = Some(title.into());
911        self.updated_at = Utc::now();
912    }
913
914    /// Clear the title, allowing it to be regenerated
915    pub fn clear_title(&mut self) {
916        self.title = None;
917        self.updated_at = Utc::now();
918    }
919
920    /// Handle context change - updates metadata and optionally regenerates title
921    /// Call this when the session context changes (e.g., directory change, model change)
922    pub async fn on_context_change(&mut self, regenerate_title: bool) -> Result<()> {
923        self.updated_at = Utc::now();
924
925        if regenerate_title {
926            self.regenerate_title().await?;
927        }
928
929        Ok(())
930    }
931
932    /// Import an OpenCode session into CodeTether
933    ///
934    /// Loads messages and parts from OpenCode storage and converts them
935    /// into a CodeTether session that can be resumed.
936    pub async fn from_opencode(
937        session_id: &str,
938        storage: &crate::opencode::OpenCodeStorage,
939    ) -> Result<Self> {
940        let oc_session = storage.load_session(session_id).await?;
941        let oc_messages = storage.load_messages(session_id).await?;
942
943        let mut messages_with_parts = Vec::new();
944        for msg in oc_messages {
945            let parts = storage.load_parts(&msg.id).await?;
946            messages_with_parts.push((msg, parts));
947        }
948
949        crate::opencode::convert::to_codetether_session(&oc_session, messages_with_parts).await
950    }
951
952    /// Try to load the last OpenCode session for a directory as a fallback
953    pub async fn last_opencode_for_directory(dir: &std::path::Path) -> Result<Self> {
954        let storage = crate::opencode::OpenCodeStorage::new()
955            .ok_or_else(|| anyhow::anyhow!("OpenCode storage directory not found"))?;
956
957        if !storage.exists() {
958            anyhow::bail!("OpenCode storage does not exist");
959        }
960
961        let oc_session = storage.last_session_for_directory(dir).await?;
962        Self::from_opencode(&oc_session.id, &storage).await
963    }
964
965    /// Delete a session by ID
966    pub async fn delete(id: &str) -> Result<()> {
967        let path = Self::session_path(id)?;
968        if path.exists() {
969            tokio::fs::remove_file(&path).await?;
970        }
971        Ok(())
972    }
973
974    /// Get the sessions directory
975    fn sessions_dir() -> Result<PathBuf> {
976        crate::config::Config::data_dir()
977            .map(|d| d.join("sessions"))
978            .ok_or_else(|| anyhow::anyhow!("Could not determine data directory"))
979    }
980
981    /// Get the path for a session file
982    fn session_path(id: &str) -> Result<PathBuf> {
983        Ok(Self::sessions_dir()?.join(format!("{}.json", id)))
984    }
985}
986
987/// Result from a session prompt
988#[derive(Debug, Clone, Serialize, Deserialize)]
989pub struct SessionResult {
990    pub text: String,
991    pub session_id: String,
992}
993
994/// Events emitted during session processing for real-time UI updates
995#[derive(Debug, Clone)]
996pub enum SessionEvent {
997    /// Agent is thinking/processing
998    Thinking,
999    /// Tool call started
1000    ToolCallStart { name: String, arguments: String },
1001    /// Tool call completed with result
1002    ToolCallComplete {
1003        name: String,
1004        output: String,
1005        success: bool,
1006    },
1007    /// Partial text output (for streaming)
1008    TextChunk(String),
1009    /// Final text output
1010    TextComplete(String),
1011    /// Model thinking/reasoning output
1012    ThinkingComplete(String),
1013    /// Token usage for one LLM round-trip
1014    UsageReport {
1015        prompt_tokens: usize,
1016        completion_tokens: usize,
1017        duration_ms: u64,
1018        model: String,
1019    },
1020    /// Updated session state for caller to sync back
1021    SessionSync(Session),
1022    /// Processing complete
1023    Done,
1024    /// Error occurred
1025    Error(String),
1026}
1027
1028/// List all sessions
1029pub async fn list_sessions() -> Result<Vec<SessionSummary>> {
1030    let sessions_dir = crate::config::Config::data_dir()
1031        .map(|d| d.join("sessions"))
1032        .ok_or_else(|| anyhow::anyhow!("Could not determine data directory"))?;
1033
1034    if !sessions_dir.exists() {
1035        return Ok(Vec::new());
1036    }
1037
1038    let mut summaries = Vec::new();
1039    let mut entries = fs::read_dir(&sessions_dir).await?;
1040
1041    while let Some(entry) = entries.next_entry().await? {
1042        let path = entry.path();
1043        if path.extension().map(|e| e == "json").unwrap_or(false) {
1044            if let Ok(content) = fs::read_to_string(&path).await {
1045                if let Ok(session) = serde_json::from_str::<Session>(&content) {
1046                    summaries.push(SessionSummary {
1047                        id: session.id,
1048                        title: session.title,
1049                        created_at: session.created_at,
1050                        updated_at: session.updated_at,
1051                        message_count: session.messages.len(),
1052                        agent: session.agent,
1053                        directory: session.metadata.directory,
1054                    });
1055                }
1056            }
1057        }
1058    }
1059
1060    summaries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
1061    Ok(summaries)
1062}
1063
1064/// List sessions scoped to a specific directory (workspace)
1065///
1066/// Only returns sessions whose `metadata.directory` matches the given path.
1067/// This prevents sessions from other workspaces "leaking" into the TUI.
1068pub async fn list_sessions_for_directory(dir: &std::path::Path) -> Result<Vec<SessionSummary>> {
1069    let all = list_sessions().await?;
1070    let canonical = dir.canonicalize().unwrap_or_else(|_| dir.to_path_buf());
1071    Ok(all
1072        .into_iter()
1073        .filter(|s| {
1074            s.directory
1075                .as_ref()
1076                .map(|d| d.canonicalize().unwrap_or_else(|_| d.clone()) == canonical)
1077                .unwrap_or(false)
1078        })
1079        .collect())
1080}
1081
1082/// List sessions including OpenCode sessions for a directory.
1083///
1084/// Merges CodeTether sessions with any discovered OpenCode sessions,
1085/// sorted by most recently updated first. OpenCode sessions are
1086/// prefixed with `opencode_` in their ID.
1087pub async fn list_sessions_with_opencode(dir: &std::path::Path) -> Result<Vec<SessionSummary>> {
1088    let mut sessions = list_sessions_for_directory(dir).await?;
1089
1090    // Also include OpenCode sessions if available
1091    if let Some(storage) = crate::opencode::OpenCodeStorage::new() {
1092        if storage.exists() {
1093            if let Ok(oc_sessions) = storage.list_sessions_for_directory(dir).await {
1094                for oc in oc_sessions {
1095                    // Skip if we already have a CodeTether import of this session
1096                    let import_id = format!("opencode_{}", oc.id);
1097                    if sessions.iter().any(|s| s.id == import_id) {
1098                        continue;
1099                    }
1100
1101                    sessions.push(SessionSummary {
1102                        id: import_id,
1103                        title: Some(format!("[opencode] {}", oc.title)),
1104                        created_at: oc.created_at,
1105                        updated_at: oc.updated_at,
1106                        message_count: oc.message_count,
1107                        agent: "build".to_string(),
1108                        directory: Some(PathBuf::from(&oc.directory)),
1109                    });
1110                }
1111            }
1112        }
1113    }
1114
1115    // Re-sort merged list by updated_at descending
1116    sessions.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
1117    Ok(sessions)
1118}
1119
1120/// Summary of a session for listing
1121#[derive(Debug, Clone, Serialize, Deserialize)]
1122pub struct SessionSummary {
1123    pub id: String,
1124    pub title: Option<String>,
1125    pub created_at: DateTime<Utc>,
1126    pub updated_at: DateTime<Utc>,
1127    pub message_count: usize,
1128    pub agent: String,
1129    /// The working directory this session was created in
1130    #[serde(default)]
1131    pub directory: Option<PathBuf>,
1132}
1133
1134fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
1135    if max_chars == 0 {
1136        return String::new();
1137    }
1138
1139    let mut chars = value.chars();
1140    let mut output = String::new();
1141    for _ in 0..max_chars {
1142        if let Some(ch) = chars.next() {
1143            output.push(ch);
1144        } else {
1145            return value.to_string();
1146        }
1147    }
1148
1149    if chars.next().is_some() {
1150        format!("{output}...")
1151    } else {
1152        output
1153    }
1154}
1155
1156// Async helper for Vec - kept for potential future use
1157#[allow(dead_code)]
1158use futures::StreamExt;
1159
1160#[allow(dead_code)]
1161trait AsyncCollect<T> {
1162    async fn collect(self) -> Vec<T>;
1163}
1164
1165#[allow(dead_code)]
1166impl<S, T> AsyncCollect<T> for S
1167where
1168    S: futures::Stream<Item = T> + Unpin,
1169{
1170    async fn collect(mut self) -> Vec<T> {
1171        let mut items = Vec::new();
1172        while let Some(item) = self.next().await {
1173            items.push(item);
1174        }
1175        items
1176    }
1177}