Skip to main content

codetether_agent/session/
mod.rs

1//! Session management
2//!
3//! Sessions track the conversation history and state for agent interactions.
4
5use crate::agent::ToolUse;
6use crate::provider::{Message, Usage};
7use crate::tool::ToolRegistry;
8use anyhow::Result;
9use chrono::{DateTime, Utc};
10use serde::{Deserialize, Serialize};
11use std::path::PathBuf;
12use std::sync::Arc;
13use tokio::fs;
14use uuid::Uuid;
15
16#[cfg(feature = "functiongemma")]
17use crate::cognition::tool_router::{ToolCallRouter, ToolRouterConfig};
18
19fn is_interactive_tool(tool_name: &str) -> bool {
20    matches!(tool_name, "question")
21}
22
23fn choose_default_provider<'a>(providers: &'a [&'a str]) -> Option<&'a str> {
24    // Keep Google as an explicit option, but don't default to it first because
25    // some environments expose API keys that are not valid for ChatCompletions.
26    let preferred = [
27        "zhipuai",
28        "openai",
29        "github-copilot",
30        "anthropic",
31        "openrouter",
32        "novita",
33        "moonshotai",
34        "google",
35    ];
36    for name in preferred {
37        if let Some(found) = providers.iter().copied().find(|p| *p == name) {
38            return Some(found);
39        }
40    }
41    providers.first().copied()
42}
43
44/// A conversation session
45#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct Session {
47    pub id: String,
48    pub title: Option<String>,
49    pub created_at: DateTime<Utc>,
50    pub updated_at: DateTime<Utc>,
51    pub messages: Vec<Message>,
52    pub tool_uses: Vec<ToolUse>,
53    pub usage: Usage,
54    pub agent: String,
55    pub metadata: SessionMetadata,
56}
57
58#[derive(Debug, Clone, Default, Serialize, Deserialize)]
59pub struct SessionMetadata {
60    pub directory: Option<PathBuf>,
61    pub model: Option<String>,
62    pub shared: bool,
63    pub share_url: Option<String>,
64}
65
66impl Session {
67    fn default_model_for_provider(provider: &str) -> String {
68        match provider {
69            "moonshotai" => "kimi-k2.5".to_string(),
70            "anthropic" => "claude-sonnet-4-20250514".to_string(),
71            "openai" => "gpt-4o".to_string(),
72            "google" => "gemini-2.5-pro".to_string(),
73            "zhipuai" => "glm-4.7".to_string(),
74            "openrouter" => "z-ai/glm-4.7".to_string(),
75            "novita" => "qwen/qwen3-coder-next".to_string(),
76            "github-copilot" | "github-copilot-enterprise" => "gpt-5-mini".to_string(),
77            _ => "glm-4.7".to_string(),
78        }
79    }
80
81    /// Create a new session
82    pub async fn new() -> Result<Self> {
83        let id = Uuid::new_v4().to_string();
84        let now = Utc::now();
85
86        Ok(Self {
87            id,
88            title: None,
89            created_at: now,
90            updated_at: now,
91            messages: Vec::new(),
92            tool_uses: Vec::new(),
93            usage: Usage::default(),
94            agent: "build".to_string(),
95            metadata: SessionMetadata {
96                directory: Some(std::env::current_dir()?),
97                ..Default::default()
98            },
99        })
100    }
101
102    /// Load an existing session
103    pub async fn load(id: &str) -> Result<Self> {
104        let path = Self::session_path(id)?;
105        let content = fs::read_to_string(&path).await?;
106        let session: Session = serde_json::from_str(&content)?;
107        Ok(session)
108    }
109
110    /// Load the last session, optionally scoped to a workspace directory
111    ///
112    /// When `workspace` is Some, only considers sessions created in that directory.
113    /// When None, returns the most recent session globally (legacy behavior).
114    pub async fn last_for_directory(workspace: Option<&std::path::Path>) -> Result<Self> {
115        let sessions_dir = Self::sessions_dir()?;
116
117        if !sessions_dir.exists() {
118            anyhow::bail!("No sessions found");
119        }
120
121        let mut entries: Vec<tokio::fs::DirEntry> = Vec::new();
122        let mut read_dir = fs::read_dir(&sessions_dir).await?;
123        while let Some(entry) = read_dir.next_entry().await? {
124            entries.push(entry);
125        }
126
127        if entries.is_empty() {
128            anyhow::bail!("No sessions found");
129        }
130
131        // Sort by modification time (most recent first)
132        // Use std::fs::metadata since we can't await in sort_by_key
133        entries.sort_by_key(|e| {
134            std::cmp::Reverse(
135                std::fs::metadata(e.path())
136                    .ok()
137                    .and_then(|m| m.modified().ok())
138                    .unwrap_or(std::time::SystemTime::UNIX_EPOCH),
139            )
140        });
141
142        let canonical_workspace =
143            workspace.map(|w| w.canonicalize().unwrap_or_else(|_| w.to_path_buf()));
144
145        for entry in &entries {
146            let content: String = fs::read_to_string(entry.path()).await?;
147            if let Ok(session) = serde_json::from_str::<Session>(&content) {
148                // If workspace scoping requested, filter by directory
149                if let Some(ref ws) = canonical_workspace {
150                    if let Some(ref dir) = session.metadata.directory {
151                        let canonical_dir = dir.canonicalize().unwrap_or_else(|_| dir.clone());
152                        if &canonical_dir == ws {
153                            return Ok(session);
154                        }
155                    }
156                    continue;
157                }
158                return Ok(session);
159            }
160        }
161
162        anyhow::bail!("No sessions found")
163    }
164
165    /// Load the last session (global, unscoped — legacy compatibility)
166    pub async fn last() -> Result<Self> {
167        Self::last_for_directory(None).await
168    }
169
170    /// Save the session to disk
171    pub async fn save(&self) -> Result<()> {
172        let path = Self::session_path(&self.id)?;
173
174        if let Some(parent) = path.parent() {
175            fs::create_dir_all(parent).await?;
176        }
177
178        let content = serde_json::to_string_pretty(self)?;
179        fs::write(&path, content).await?;
180
181        Ok(())
182    }
183
184    /// Add a message to the session
185    pub fn add_message(&mut self, message: Message) {
186        self.messages.push(message);
187        self.updated_at = Utc::now();
188    }
189
190    /// Execute a prompt and get the result
191    pub async fn prompt(&mut self, message: &str) -> Result<SessionResult> {
192        use crate::provider::{
193            CompletionRequest, ContentPart, ProviderRegistry, Role, parse_model_string,
194        };
195
196        // Load providers from Vault
197        let registry = ProviderRegistry::from_vault().await?;
198
199        let providers = registry.list();
200        if providers.is_empty() {
201            anyhow::bail!(
202                "No providers available. Configure API keys in HashiCorp Vault (for Copilot use `codetether auth copilot`)."
203            );
204        }
205
206        tracing::info!("Available providers: {:?}", providers);
207
208        // Parse model string (format: "provider/model", "provider", or just "model")
209        let (provider_name, model_id) = if let Some(ref model_str) = self.metadata.model {
210            let (prov, model) = parse_model_string(model_str);
211            if prov.is_some() {
212                // Format: provider/model
213                (prov.map(|s| s.to_string()), model.to_string())
214            } else if providers.contains(&model) {
215                // Format: just provider name (e.g., "novita")
216                (Some(model.to_string()), String::new())
217            } else {
218                // Format: just model name
219                (None, model.to_string())
220            }
221        } else {
222            (None, String::new())
223        };
224
225        // Determine which provider to use with deterministic fallback ordering.
226        let selected_provider = provider_name
227            .as_deref()
228            .filter(|p| providers.contains(p))
229            .or_else(|| choose_default_provider(providers.as_slice()))
230            .ok_or_else(|| anyhow::anyhow!("No providers available"))?;
231
232        let provider = registry
233            .get(selected_provider)
234            .ok_or_else(|| anyhow::anyhow!("Provider {} not found", selected_provider))?;
235
236        // Add user message to session using add_message
237        self.add_message(Message {
238            role: Role::User,
239            content: vec![ContentPart::Text {
240                text: message.to_string(),
241            }],
242        });
243
244        // Generate title if this is the first user message and no title exists
245        if self.title.is_none() {
246            self.generate_title().await?;
247        }
248
249        // Determine model to use
250        let model = if !model_id.is_empty() {
251            model_id
252        } else {
253            Self::default_model_for_provider(selected_provider)
254        };
255
256        // Create tool registry with all available tools
257        let tool_registry = ToolRegistry::with_provider_arc(Arc::clone(&provider), model.clone());
258        let tool_definitions: Vec<_> = tool_registry
259            .definitions()
260            .into_iter()
261            .filter(|tool| !is_interactive_tool(&tool.name))
262            .collect();
263
264        // Kimi K2.5 requires temperature=1.0
265        let temperature = if model.starts_with("kimi-k2") {
266            Some(1.0)
267        } else {
268            Some(0.7)
269        };
270
271        tracing::info!("Using model: {} via provider: {}", model, selected_provider);
272        tracing::info!("Available tools: {}", tool_definitions.len());
273
274        // All current providers support native tool calling.  Hardcode to
275        // true so we skip the expensive list_models() API call on every message.
276        #[cfg(feature = "functiongemma")]
277        let model_supports_tools = true;
278
279        // Build system prompt with AGENTS.md
280        let cwd = self
281            .metadata
282            .directory
283            .clone()
284            .unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
285        let system_prompt = crate::agent::builtin::build_system_prompt(&cwd);
286
287        // Run agentic loop with tool execution
288        let max_steps = 50;
289        let mut final_output = String::new();
290
291        // Initialise the FunctionGemma tool-call router (feature-gated, opt-in).
292        #[cfg(feature = "functiongemma")]
293        let tool_router: Option<ToolCallRouter> = {
294            let cfg = ToolRouterConfig::from_env();
295            match ToolCallRouter::from_config(&cfg) {
296                Ok(r) => r,
297                Err(e) => {
298                    tracing::warn!(error = %e, "FunctionGemma tool router init failed; disabled");
299                    None
300                }
301            }
302        };
303
304        for step in 1..=max_steps {
305            tracing::info!(step = step, "Agent step starting");
306
307            // Build messages with system prompt first
308            let mut messages = vec![Message {
309                role: Role::System,
310                content: vec![ContentPart::Text {
311                    text: system_prompt.clone(),
312                }],
313            }];
314            messages.extend(self.messages.clone());
315
316            // Create completion request with tools
317            let request = CompletionRequest {
318                messages,
319                tools: tool_definitions.clone(),
320                model: model.clone(),
321                temperature,
322                top_p: None,
323                max_tokens: Some(8192),
324                stop: Vec::new(),
325            };
326
327            // Call the provider
328            let response = provider.complete(request).await?;
329
330            // Optionally route text-only responses through FunctionGemma to
331            // produce structured tool calls.  Skipped when the model natively
332            // supports tool calling (which all current providers do).
333            #[cfg(feature = "functiongemma")]
334            let response = if let Some(ref router) = tool_router {
335                router
336                    .maybe_reformat(response, &tool_definitions, model_supports_tools)
337                    .await
338            } else {
339                response
340            };
341
342            // Record token usage
343            crate::telemetry::TOKEN_USAGE.record_model_usage(
344                &model,
345                response.usage.prompt_tokens as u64,
346                response.usage.completion_tokens as u64,
347            );
348
349            // Extract tool calls from response
350            let tool_calls: Vec<(String, String, serde_json::Value)> = response
351                .message
352                .content
353                .iter()
354                .filter_map(|part| {
355                    if let ContentPart::ToolCall {
356                        id,
357                        name,
358                        arguments,
359                    } = part
360                    {
361                        // Parse arguments JSON string into Value
362                        let args: serde_json::Value =
363                            serde_json::from_str(arguments).unwrap_or(serde_json::json!({}));
364                        Some((id.clone(), name.clone(), args))
365                    } else {
366                        None
367                    }
368                })
369                .collect();
370
371            // Collect text output
372            for part in &response.message.content {
373                if let ContentPart::Text { text } = part {
374                    if !text.is_empty() {
375                        final_output.push_str(text);
376                        final_output.push('\n');
377                    }
378                }
379            }
380
381            // If no tool calls, we're done
382            if tool_calls.is_empty() {
383                self.add_message(response.message.clone());
384                break;
385            }
386
387            // Add assistant message with tool calls
388            self.add_message(response.message.clone());
389
390            tracing::info!(
391                step = step,
392                num_tools = tool_calls.len(),
393                "Executing tool calls"
394            );
395
396            // Execute each tool call
397            for (tool_id, tool_name, tool_input) in tool_calls {
398                tracing::info!(tool = %tool_name, tool_id = %tool_id, "Executing tool");
399
400                if is_interactive_tool(&tool_name) {
401                    tracing::warn!(tool = %tool_name, "Blocking interactive tool in session loop");
402                    self.add_message(Message {
403                        role: Role::Tool,
404                        content: vec![ContentPart::ToolResult {
405                            tool_call_id: tool_id,
406                            content: "Error: Interactive tool 'question' is disabled in this interface. Ask the user directly in assistant text.".to_string(),
407                        }],
408                    });
409                    continue;
410                }
411
412                // Get and execute the tool
413                let content = if let Some(tool) = tool_registry.get(&tool_name) {
414                    match tool.execute(tool_input.clone()).await {
415                        Ok(result) => {
416                            tracing::info!(tool = %tool_name, success = result.success, "Tool execution completed");
417                            result.output
418                        }
419                        Err(e) => {
420                            tracing::warn!(tool = %tool_name, error = %e, "Tool execution failed");
421                            format!("Error: {}", e)
422                        }
423                    }
424                } else {
425                    tracing::warn!(tool = %tool_name, "Tool not found");
426                    format!("Error: Unknown tool '{}'", tool_name)
427                };
428
429                // Add tool result message
430                self.add_message(Message {
431                    role: Role::Tool,
432                    content: vec![ContentPart::ToolResult {
433                        tool_call_id: tool_id,
434                        content,
435                    }],
436                });
437            }
438        }
439
440        // Save session after each prompt to persist messages
441        self.save().await?;
442
443        Ok(SessionResult {
444            text: final_output.trim().to_string(),
445            session_id: self.id.clone(),
446        })
447    }
448
449    /// Process a user message with real-time event streaming for UI updates.
450    /// Events are sent through the provided channel as tool calls execute.
451    ///
452    /// Accepts a pre-loaded `ProviderRegistry` to avoid re-fetching secrets
453    /// from Vault on every message (which was the primary TUI performance
454    /// bottleneck).
455    pub async fn prompt_with_events(
456        &mut self,
457        message: &str,
458        event_tx: tokio::sync::mpsc::Sender<SessionEvent>,
459        registry: std::sync::Arc<crate::provider::ProviderRegistry>,
460    ) -> Result<SessionResult> {
461        use crate::provider::{CompletionRequest, ContentPart, Role, parse_model_string};
462
463        let _ = event_tx.send(SessionEvent::Thinking).await;
464
465        let providers = registry.list();
466        if providers.is_empty() {
467            anyhow::bail!(
468                "No providers available. Configure API keys in HashiCorp Vault (for Copilot use `codetether auth copilot`)."
469            );
470        }
471        tracing::info!("Available providers: {:?}", providers);
472
473        // Parse model string (format: "provider/model", "provider", or just "model")
474        let (provider_name, model_id) = if let Some(ref model_str) = self.metadata.model {
475            let (prov, model) = parse_model_string(model_str);
476            if prov.is_some() {
477                (prov.map(|s| s.to_string()), model.to_string())
478            } else if providers.contains(&model) {
479                (Some(model.to_string()), String::new())
480            } else {
481                (None, model.to_string())
482            }
483        } else {
484            (None, String::new())
485        };
486
487        // Determine which provider to use with deterministic fallback ordering.
488        let selected_provider = provider_name
489            .as_deref()
490            .filter(|p| providers.contains(p))
491            .or_else(|| choose_default_provider(providers.as_slice()))
492            .ok_or_else(|| anyhow::anyhow!("No providers available"))?;
493
494        let provider = registry
495            .get(selected_provider)
496            .ok_or_else(|| anyhow::anyhow!("Provider {} not found", selected_provider))?;
497
498        // Add user message
499        self.add_message(Message {
500            role: Role::User,
501            content: vec![ContentPart::Text {
502                text: message.to_string(),
503            }],
504        });
505
506        // Generate title if needed
507        if self.title.is_none() {
508            self.generate_title().await?;
509        }
510
511        // Determine model
512        let model = if !model_id.is_empty() {
513            model_id
514        } else {
515            Self::default_model_for_provider(selected_provider)
516        };
517
518        // Create tool registry
519        let tool_registry = ToolRegistry::with_provider_arc(Arc::clone(&provider), model.clone());
520        let tool_definitions: Vec<_> = tool_registry
521            .definitions()
522            .into_iter()
523            .filter(|tool| !is_interactive_tool(&tool.name))
524            .collect();
525
526        let temperature = if model.starts_with("kimi-k2") {
527            Some(1.0)
528        } else {
529            Some(0.7)
530        };
531
532        tracing::info!("Using model: {} via provider: {}", model, selected_provider);
533        tracing::info!("Available tools: {}", tool_definitions.len());
534
535        // All current providers support native tool calling.  Hardcode to
536        // true so we skip the expensive list_models() API call on every message.
537        #[cfg(feature = "functiongemma")]
538        let model_supports_tools = true;
539
540        // Build system prompt
541        let cwd = std::env::var("PWD")
542            .map(std::path::PathBuf::from)
543            .unwrap_or_else(|_| std::env::current_dir().unwrap_or_default());
544        let system_prompt = crate::agent::builtin::build_system_prompt(&cwd);
545
546        let mut final_output = String::new();
547        let max_steps = 50;
548
549        // Initialise the FunctionGemma tool-call router (feature-gated, opt-in).
550        #[cfg(feature = "functiongemma")]
551        let tool_router: Option<ToolCallRouter> = {
552            let cfg = ToolRouterConfig::from_env();
553            match ToolCallRouter::from_config(&cfg) {
554                Ok(r) => r,
555                Err(e) => {
556                    tracing::warn!(error = %e, "FunctionGemma tool router init failed; disabled");
557                    None
558                }
559            }
560        };
561
562        for step in 1..=max_steps {
563            tracing::info!(step = step, "Agent step starting");
564            let _ = event_tx.send(SessionEvent::Thinking).await;
565
566            // Build messages with system prompt first
567            let mut messages = vec![Message {
568                role: Role::System,
569                content: vec![ContentPart::Text {
570                    text: system_prompt.clone(),
571                }],
572            }];
573            messages.extend(self.messages.clone());
574
575            let request = CompletionRequest {
576                messages,
577                tools: tool_definitions.clone(),
578                model: model.clone(),
579                temperature,
580                top_p: None,
581                max_tokens: Some(8192),
582                stop: Vec::new(),
583            };
584
585            let response = provider.complete(request).await?;
586
587            // Optionally route text-only responses through FunctionGemma to
588            // produce structured tool calls.  Skipped for native tool-calling models.
589            #[cfg(feature = "functiongemma")]
590            let response = if let Some(ref router) = tool_router {
591                router
592                    .maybe_reformat(response, &tool_definitions, model_supports_tools)
593                    .await
594            } else {
595                response
596            };
597
598            crate::telemetry::TOKEN_USAGE.record_model_usage(
599                &model,
600                response.usage.prompt_tokens as u64,
601                response.usage.completion_tokens as u64,
602            );
603
604            // Extract tool calls
605            let tool_calls: Vec<(String, String, serde_json::Value)> = response
606                .message
607                .content
608                .iter()
609                .filter_map(|part| {
610                    if let ContentPart::ToolCall {
611                        id,
612                        name,
613                        arguments,
614                    } = part
615                    {
616                        let args: serde_json::Value =
617                            serde_json::from_str(arguments).unwrap_or(serde_json::json!({}));
618                        Some((id.clone(), name.clone(), args))
619                    } else {
620                        None
621                    }
622                })
623                .collect();
624
625            // Collect text output for this step
626            let mut step_text = String::new();
627            for part in &response.message.content {
628                if let ContentPart::Text { text } = part {
629                    if !text.is_empty() {
630                        step_text.push_str(text);
631                        step_text.push('\n');
632                    }
633                }
634            }
635
636            // Emit this step's text BEFORE tool calls so it appears in correct
637            // chronological order in the TUI chat display.
638            if !step_text.trim().is_empty() {
639                let trimmed = step_text.trim().to_string();
640                let _ = event_tx.send(SessionEvent::TextChunk(trimmed.clone())).await;
641                let _ = event_tx.send(SessionEvent::TextComplete(trimmed)).await;
642                final_output.push_str(&step_text);
643            }
644
645            if tool_calls.is_empty() {
646                self.add_message(response.message.clone());
647                break;
648            }
649
650            self.add_message(response.message.clone());
651
652            tracing::info!(
653                step = step,
654                num_tools = tool_calls.len(),
655                "Executing tool calls"
656            );
657
658            // Execute each tool call with events
659            for (tool_id, tool_name, tool_input) in tool_calls {
660                let args_str = serde_json::to_string(&tool_input).unwrap_or_default();
661                let _ = event_tx
662                    .send(SessionEvent::ToolCallStart {
663                        name: tool_name.clone(),
664                        arguments: args_str,
665                    })
666                    .await;
667
668                tracing::info!(tool = %tool_name, tool_id = %tool_id, "Executing tool");
669
670                if is_interactive_tool(&tool_name) {
671                    tracing::warn!(tool = %tool_name, "Blocking interactive tool in session loop");
672                    let content = "Error: Interactive tool 'question' is disabled in this interface. Ask the user directly in assistant text.".to_string();
673                    let _ = event_tx
674                        .send(SessionEvent::ToolCallComplete {
675                            name: tool_name.clone(),
676                            output: content.clone(),
677                            success: false,
678                        })
679                        .await;
680                    self.add_message(Message {
681                        role: Role::Tool,
682                        content: vec![ContentPart::ToolResult {
683                            tool_call_id: tool_id,
684                            content,
685                        }],
686                    });
687                    continue;
688                }
689
690                let (content, success) = if let Some(tool) = tool_registry.get(&tool_name) {
691                    match tool.execute(tool_input.clone()).await {
692                        Ok(result) => {
693                            tracing::info!(tool = %tool_name, success = result.success, "Tool execution completed");
694                            (result.output, result.success)
695                        }
696                        Err(e) => {
697                            tracing::warn!(tool = %tool_name, error = %e, "Tool execution failed");
698                            (format!("Error: {}", e), false)
699                        }
700                    }
701                } else {
702                    tracing::warn!(tool = %tool_name, "Tool not found");
703                    (format!("Error: Unknown tool '{}'", tool_name), false)
704                };
705
706                let _ = event_tx
707                    .send(SessionEvent::ToolCallComplete {
708                        name: tool_name.clone(),
709                        output: content.clone(),
710                        success,
711                    })
712                    .await;
713
714                self.add_message(Message {
715                    role: Role::Tool,
716                    content: vec![ContentPart::ToolResult {
717                        tool_call_id: tool_id,
718                        content,
719                    }],
720                });
721            }
722        }
723
724        self.save().await?;
725
726        // Text was already sent per-step via TextComplete events.
727        // Send updated session state so the caller can sync back.
728        let _ = event_tx.send(SessionEvent::SessionSync(self.clone())).await;
729        let _ = event_tx.send(SessionEvent::Done).await;
730
731        Ok(SessionResult {
732            text: final_output.trim().to_string(),
733            session_id: self.id.clone(),
734        })
735    }
736
737    /// Generate a title for the session based on the first message
738    /// Only sets title if not already set (for initial title generation)
739    pub async fn generate_title(&mut self) -> Result<()> {
740        if self.title.is_some() {
741            return Ok(());
742        }
743
744        // Get first user message
745        let first_message = self
746            .messages
747            .iter()
748            .find(|m| m.role == crate::provider::Role::User);
749
750        if let Some(msg) = first_message {
751            let text: String = msg
752                .content
753                .iter()
754                .filter_map(|p| match p {
755                    crate::provider::ContentPart::Text { text } => Some(text.clone()),
756                    _ => None,
757                })
758                .collect::<Vec<_>>()
759                .join(" ");
760
761            // Truncate to reasonable length
762            self.title = Some(truncate_with_ellipsis(&text, 47));
763        }
764
765        Ok(())
766    }
767
768    /// Regenerate the title based on the first message, even if already set
769    /// Use this for on-demand title updates or after context changes
770    pub async fn regenerate_title(&mut self) -> Result<()> {
771        // Get first user message
772        let first_message = self
773            .messages
774            .iter()
775            .find(|m| m.role == crate::provider::Role::User);
776
777        if let Some(msg) = first_message {
778            let text: String = msg
779                .content
780                .iter()
781                .filter_map(|p| match p {
782                    crate::provider::ContentPart::Text { text } => Some(text.clone()),
783                    _ => None,
784                })
785                .collect::<Vec<_>>()
786                .join(" ");
787
788            // Truncate to reasonable length
789            self.title = Some(truncate_with_ellipsis(&text, 47));
790        }
791
792        Ok(())
793    }
794
795    /// Set a custom title for the session
796    pub fn set_title(&mut self, title: impl Into<String>) {
797        self.title = Some(title.into());
798        self.updated_at = Utc::now();
799    }
800
801    /// Clear the title, allowing it to be regenerated
802    pub fn clear_title(&mut self) {
803        self.title = None;
804        self.updated_at = Utc::now();
805    }
806
807    /// Handle context change - updates metadata and optionally regenerates title
808    /// Call this when the session context changes (e.g., directory change, model change)
809    pub async fn on_context_change(&mut self, regenerate_title: bool) -> Result<()> {
810        self.updated_at = Utc::now();
811
812        if regenerate_title {
813            self.regenerate_title().await?;
814        }
815
816        Ok(())
817    }
818
819    /// Delete a session by ID
820    pub async fn delete(id: &str) -> Result<()> {
821        let path = Self::session_path(id)?;
822        if path.exists() {
823            tokio::fs::remove_file(&path).await?;
824        }
825        Ok(())
826    }
827
828    /// Get the sessions directory
829    fn sessions_dir() -> Result<PathBuf> {
830        crate::config::Config::data_dir()
831            .map(|d| d.join("sessions"))
832            .ok_or_else(|| anyhow::anyhow!("Could not determine data directory"))
833    }
834
835    /// Get the path for a session file
836    fn session_path(id: &str) -> Result<PathBuf> {
837        Ok(Self::sessions_dir()?.join(format!("{}.json", id)))
838    }
839}
840
841/// Result from a session prompt
842#[derive(Debug, Clone, Serialize, Deserialize)]
843pub struct SessionResult {
844    pub text: String,
845    pub session_id: String,
846}
847
848/// Events emitted during session processing for real-time UI updates
849#[derive(Debug, Clone)]
850pub enum SessionEvent {
851    /// Agent is thinking/processing
852    Thinking,
853    /// Tool call started
854    ToolCallStart { name: String, arguments: String },
855    /// Tool call completed with result
856    ToolCallComplete {
857        name: String,
858        output: String,
859        success: bool,
860    },
861    /// Partial text output (for streaming)
862    TextChunk(String),
863    /// Final text output
864    TextComplete(String),
865    /// Updated session state for caller to sync back
866    SessionSync(Session),
867    /// Processing complete
868    Done,
869    /// Error occurred
870    Error(String),
871}
872
873/// List all sessions
874pub async fn list_sessions() -> Result<Vec<SessionSummary>> {
875    let sessions_dir = crate::config::Config::data_dir()
876        .map(|d| d.join("sessions"))
877        .ok_or_else(|| anyhow::anyhow!("Could not determine data directory"))?;
878
879    if !sessions_dir.exists() {
880        return Ok(Vec::new());
881    }
882
883    let mut summaries = Vec::new();
884    let mut entries = fs::read_dir(&sessions_dir).await?;
885
886    while let Some(entry) = entries.next_entry().await? {
887        let path = entry.path();
888        if path.extension().map(|e| e == "json").unwrap_or(false) {
889            if let Ok(content) = fs::read_to_string(&path).await {
890                if let Ok(session) = serde_json::from_str::<Session>(&content) {
891                    summaries.push(SessionSummary {
892                        id: session.id,
893                        title: session.title,
894                        created_at: session.created_at,
895                        updated_at: session.updated_at,
896                        message_count: session.messages.len(),
897                        agent: session.agent,
898                        directory: session.metadata.directory,
899                    });
900                }
901            }
902        }
903    }
904
905    summaries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
906    Ok(summaries)
907}
908
909/// List sessions scoped to a specific directory (workspace)
910///
911/// Only returns sessions whose `metadata.directory` matches the given path.
912/// This prevents sessions from other workspaces "leaking" into the TUI.
913pub async fn list_sessions_for_directory(dir: &std::path::Path) -> Result<Vec<SessionSummary>> {
914    let all = list_sessions().await?;
915    let canonical = dir.canonicalize().unwrap_or_else(|_| dir.to_path_buf());
916    Ok(all
917        .into_iter()
918        .filter(|s| {
919            s.directory
920                .as_ref()
921                .map(|d| d.canonicalize().unwrap_or_else(|_| d.clone()) == canonical)
922                .unwrap_or(false)
923        })
924        .collect())
925}
926
927/// Summary of a session for listing
928#[derive(Debug, Clone, Serialize, Deserialize)]
929pub struct SessionSummary {
930    pub id: String,
931    pub title: Option<String>,
932    pub created_at: DateTime<Utc>,
933    pub updated_at: DateTime<Utc>,
934    pub message_count: usize,
935    pub agent: String,
936    /// The working directory this session was created in
937    #[serde(default)]
938    pub directory: Option<PathBuf>,
939}
940
941fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
942    if max_chars == 0 {
943        return String::new();
944    }
945
946    let mut chars = value.chars();
947    let mut output = String::new();
948    for _ in 0..max_chars {
949        if let Some(ch) = chars.next() {
950            output.push(ch);
951        } else {
952            return value.to_string();
953        }
954    }
955
956    if chars.next().is_some() {
957        format!("{output}...")
958    } else {
959        output
960    }
961}
962
963// Async helper for Vec - kept for potential future use
964#[allow(dead_code)]
965use futures::StreamExt;
966
967#[allow(dead_code)]
968trait AsyncCollect<T> {
969    async fn collect(self) -> Vec<T>;
970}
971
972#[allow(dead_code)]
973impl<S, T> AsyncCollect<T> for S
974where
975    S: futures::Stream<Item = T> + Unpin,
976{
977    async fn collect(mut self) -> Vec<T> {
978        let mut items = Vec::new();
979        while let Some(item) = self.next().await {
980            items.push(item);
981        }
982        items
983    }
984}