Skip to main content

syncable_cli/agent/session/
mod.rs

1//! Interactive chat session with /model and /provider commands
2//!
3//! Provides a rich REPL experience similar to Claude Code with:
4//! - `/model` - Select from available models based on configured API keys
5//! - `/provider` - Switch provider (prompts for API key if not set)
6//! - `/cost` - Show token usage and estimated cost
7//! - `/help` - Show available commands
8//! - `/clear` - Clear conversation history
9//! - `/exit` or `/quit` - Exit the session
10
11// Submodules
12mod commands;
13mod plan_mode;
14mod providers;
15mod ui;
16
17// Re-exports for backward compatibility
18pub use plan_mode::{IncompletePlan, PlanMode, find_incomplete_plans};
19pub use providers::{get_available_models, get_configured_providers, prompt_api_key};
20
21use crate::agent::commands::TokenUsage;
22use crate::agent::{AgentResult, ProviderType};
23use crate::platform::PlatformSession;
24use colored::Colorize;
25use std::io;
26use std::path::Path;
27
28/// Chat session state
29pub struct ChatSession {
30    pub provider: ProviderType,
31    pub model: String,
32    pub project_path: std::path::PathBuf,
33    pub history: Vec<(String, String)>, // (role, content)
34    pub token_usage: TokenUsage,
35    /// Current planning mode state
36    pub plan_mode: PlanMode,
37    /// Whether the previous turn used generation mode (write/shell tools active).
38    /// Used so short follow-up messages ("sure", "go ahead", "yes") inherit the
39    /// tool set from the previous turn instead of losing write/shell access.
40    pub last_was_generation: bool,
41    /// Session loaded via /resume command, to be processed by main loop
42    pub pending_resume: Option<crate::agent::persistence::ConversationRecord>,
43    /// Platform session state (selected project/org context)
44    pub platform_session: PlatformSession,
45}
46
47impl ChatSession {
48    pub fn new(project_path: &Path, provider: ProviderType, model: Option<String>) -> Self {
49        let default_model = match provider {
50            ProviderType::OpenAI => "gpt-5.2".to_string(),
51            ProviderType::Anthropic => "claude-sonnet-4-5-20250929".to_string(),
52            ProviderType::Bedrock => "global.anthropic.claude-sonnet-4-20250514-v1:0".to_string(),
53        };
54
55        // Load platform session from disk (returns default if not exists)
56        let platform_session = PlatformSession::load().unwrap_or_default();
57
58        Self {
59            provider,
60            model: model.unwrap_or(default_model),
61            project_path: project_path.to_path_buf(),
62            history: Vec::new(),
63            token_usage: TokenUsage::new(),
64            plan_mode: PlanMode::default(),
65            last_was_generation: false,
66            pending_resume: None,
67            platform_session,
68        }
69    }
70
71    /// Update the platform session and save to disk
72    pub fn update_platform_session(&mut self, session: PlatformSession) {
73        self.platform_session = session;
74        if let Err(e) = self.platform_session.save() {
75            eprintln!(
76                "{}",
77                format!("Warning: Failed to save platform session: {}", e).yellow()
78            );
79        }
80    }
81
82    /// Toggle planning mode and return the new mode
83    pub fn toggle_plan_mode(&mut self) -> PlanMode {
84        self.plan_mode = self.plan_mode.toggle();
85        self.plan_mode
86    }
87
88    /// Check if currently in planning mode
89    pub fn is_planning(&self) -> bool {
90        self.plan_mode.is_planning()
91    }
92
93    /// Check if API key is configured for a provider (env var OR config file)
94    pub fn has_api_key(provider: ProviderType) -> bool {
95        providers::has_api_key(provider)
96    }
97
98    /// Load API key from config if not in env, and set it in env for use
99    pub fn load_api_key_to_env(provider: ProviderType) {
100        providers::load_api_key_to_env(provider)
101    }
102
103    /// Prompt user to enter API key for a provider
104    pub fn prompt_api_key(provider: ProviderType) -> AgentResult<String> {
105        providers::prompt_api_key(provider)
106    }
107
108    /// Handle /model command - interactive model selection
109    pub fn handle_model_command(&mut self) -> AgentResult<()> {
110        commands::handle_model_command(self)
111    }
112
113    /// Handle /provider command - switch provider with API key prompt if needed
114    pub fn handle_provider_command(&mut self) -> AgentResult<()> {
115        commands::handle_provider_command(self)
116    }
117
118    /// Handle /reset command - reset provider credentials
119    pub fn handle_reset_command(&mut self) -> AgentResult<()> {
120        commands::handle_reset_command(self)
121    }
122
123    /// Handle /profile command - manage global profiles
124    pub fn handle_profile_command(&mut self) -> AgentResult<()> {
125        commands::handle_profile_command(self)
126    }
127
128    /// Handle /plans command - show incomplete plans and offer to continue
129    pub fn handle_plans_command(&self) -> AgentResult<()> {
130        commands::handle_plans_command(self)
131    }
132
133    /// Handle /resume command - browse and select a session to resume
134    /// Returns true if a session was loaded and should be displayed
135    pub fn handle_resume_command(&mut self) -> AgentResult<bool> {
136        commands::handle_resume_command(self)
137    }
138
139    /// Handle /sessions command - list available sessions
140    pub fn handle_list_sessions_command(&self) {
141        commands::handle_list_sessions_command(self)
142    }
143
144    /// Handle /help command - delegates to ui module
145    pub fn print_help() {
146        ui::print_help()
147    }
148
149    /// Print session banner with colorful SYNCABLE ASCII art - delegates to ui module
150    pub fn print_logo() {
151        ui::print_logo()
152    }
153
154    /// Print the welcome banner - delegates to ui module
155    pub fn print_banner(&self) {
156        ui::print_banner(self)
157    }
158
159    /// Process a command (returns true if should continue, false if should exit)
160    pub fn process_command(&mut self, input: &str) -> AgentResult<bool> {
161        let cmd = input.trim().to_lowercase();
162
163        // Handle bare "/" - now handled interactively in read_input
164        // Just show help if they somehow got here
165        if cmd == "/" {
166            Self::print_help();
167            return Ok(true);
168        }
169
170        match cmd.as_str() {
171            "/exit" | "/quit" | "/q" => {
172                println!("\n{}", "👋 Goodbye!".green());
173                return Ok(false);
174            }
175            "/help" | "/h" | "/?" => {
176                Self::print_help();
177            }
178            "/model" | "/m" => {
179                self.handle_model_command()?;
180            }
181            "/provider" | "/p" => {
182                self.handle_provider_command()?;
183            }
184            "/cost" => {
185                self.token_usage.print_report(&self.model);
186            }
187            "/clear" | "/c" => {
188                self.history.clear();
189                println!("{}", "✓ Conversation history cleared".green());
190            }
191            "/reset" | "/r" => {
192                self.handle_reset_command()?;
193            }
194            "/profile" => {
195                self.handle_profile_command()?;
196            }
197            "/plans" => {
198                self.handle_plans_command()?;
199            }
200            "/resume" | "/s" => {
201                // Resume loads session into self.pending_resume
202                // Main loop in mod.rs will detect and process it
203                let _ = self.handle_resume_command()?;
204            }
205            "/sessions" | "/ls" => {
206                self.handle_list_sessions_command();
207            }
208            _ => {
209                if cmd.starts_with('/') {
210                    // Unknown command - interactive picker already handled in read_input
211                    println!(
212                        "{}",
213                        format!(
214                            "Unknown command: {}. Type /help for available commands.",
215                            cmd
216                        )
217                        .yellow()
218                    );
219                }
220            }
221        }
222
223        Ok(true)
224    }
225
226    /// Check if input is a command
227    pub fn is_command(input: &str) -> bool {
228        input.trim().starts_with('/')
229    }
230
231    /// Strip @ prefix from file/folder references for AI consumption
232    /// Keeps the path but removes the leading @ that was used for autocomplete
233    /// e.g., "check @src/main.rs for issues" -> "check src/main.rs for issues"
234    fn strip_file_references(input: &str) -> String {
235        let mut result = String::with_capacity(input.len());
236        let chars: Vec<char> = input.chars().collect();
237        let mut i = 0;
238
239        while i < chars.len() {
240            if chars[i] == '@' {
241                // Check if this @ is at start or after whitespace (valid file reference trigger)
242                let is_valid_trigger = i == 0 || chars[i - 1].is_whitespace();
243
244                if is_valid_trigger {
245                    // Check if there's a path after @ (not just @ followed by space/end)
246                    let has_path = i + 1 < chars.len() && !chars[i + 1].is_whitespace();
247
248                    if has_path {
249                        // Skip the @ but keep the path
250                        i += 1;
251                        continue;
252                    }
253                }
254            }
255            result.push(chars[i]);
256            i += 1;
257        }
258
259        result
260    }
261
262    /// Read user input with prompt - with interactive file picker support
263    /// Uses custom terminal handling for @ file references and / commands
264    /// Returns InputResult which the main loop should handle
265    pub fn read_input(&self) -> io::Result<crate::agent::ui::input::InputResult> {
266        use crate::agent::ui::input::read_input_with_file_picker;
267
268        // Build prompt with platform context if project is selected
269        let prompt = if self.platform_session.is_project_selected() {
270            format!("{} >", self.platform_session.display_context())
271        } else {
272            ">".to_string()
273        };
274
275        Ok(read_input_with_file_picker(
276            &prompt,
277            &self.project_path,
278            self.plan_mode.is_planning(),
279        ))
280    }
281
282    /// Process a submitted input text - strips @ references and handles suggestion format
283    pub fn process_submitted_text(text: &str) -> String {
284        let trimmed = text.trim();
285        // Handle case where full suggestion was submitted (e.g., "/model        Description")
286        // Extract just the command if it looks like a suggestion format
287        if trimmed.starts_with('/') && trimmed.contains("  ") {
288            // This looks like a suggestion format, extract just the command
289            if let Some(cmd) = trimmed.split_whitespace().next() {
290                return cmd.to_string();
291            }
292        }
293        // Strip @ prefix from file references before sending to AI
294        // The @ is for UI autocomplete, but the AI should see just the path
295        Self::strip_file_references(trimmed)
296    }
297}