syncable_cli/agent/
mod.rs1pub mod commands;
28pub mod session;
29pub mod tools;
30pub mod ui;
31
32use colored::Colorize;
33use rig::{
34 client::{CompletionClient, ProviderClient},
35 completion::Prompt,
36 providers::{anthropic, openai},
37};
38use session::ChatSession;
39use commands::TokenUsage;
40use std::path::Path;
41use std::sync::Arc;
42use ui::{ResponseFormatter, Spinner, ToolDisplayHook, spawn_tool_display_handler};
43
44#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
46pub enum ProviderType {
47 #[default]
48 OpenAI,
49 Anthropic,
50}
51
52impl std::fmt::Display for ProviderType {
53 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
54 match self {
55 ProviderType::OpenAI => write!(f, "openai"),
56 ProviderType::Anthropic => write!(f, "anthropic"),
57 }
58 }
59}
60
61impl std::str::FromStr for ProviderType {
62 type Err = String;
63
64 fn from_str(s: &str) -> Result<Self, Self::Err> {
65 match s.to_lowercase().as_str() {
66 "openai" => Ok(ProviderType::OpenAI),
67 "anthropic" => Ok(ProviderType::Anthropic),
68 _ => Err(format!("Unknown provider: {}", s)),
69 }
70 }
71}
72
73#[derive(Debug, thiserror::Error)]
75pub enum AgentError {
76 #[error("Missing API key. Set {0} environment variable.")]
77 MissingApiKey(String),
78
79 #[error("Provider error: {0}")]
80 ProviderError(String),
81
82 #[error("Tool error: {0}")]
83 ToolError(String),
84}
85
86pub type AgentResult<T> = Result<T, AgentError>;
87
88fn get_system_prompt(project_path: &Path) -> String {
90 format!(
91 r#"You are a helpful AI assistant integrated into the Syncable CLI tool. You help developers understand and improve their codebases.
92
93## Project Context
94You are currently working with a project located at: {}
95
96## Your Capabilities
97You have access to tools to help analyze and understand the project:
98
991. **analyze_project** - Analyze the project to detect languages, frameworks, dependencies, and architecture
1002. **security_scan** - Perform security analysis to find potential vulnerabilities and secrets
1013. **check_vulnerabilities** - Check dependencies for known security vulnerabilities
1024. **read_file** - Read the contents of a file in the project
1035. **list_directory** - List files and directories in a path
104
105## Guidelines
106- Use the available tools to gather information before answering questions about the project
107- Be concise but thorough in your explanations
108- When you find issues, suggest specific fixes
109- Format code examples using markdown code blocks"#,
110 project_path.display()
111 )
112}
113
114pub async fn run_interactive(
116 project_path: &Path,
117 provider: ProviderType,
118 model: Option<String>,
119) -> AgentResult<()> {
120 use tools::*;
121
122 let mut session = ChatSession::new(project_path, provider, model);
123
124 ChatSession::load_api_key_to_env(session.provider);
126
127 if !ChatSession::has_api_key(session.provider) {
129 ChatSession::prompt_api_key(session.provider)?;
130 }
131
132 session.print_banner();
133
134 loop {
135 let input = match session.read_input() {
137 Ok(input) => input,
138 Err(_) => break,
139 };
140
141 if input.is_empty() {
142 continue;
143 }
144
145 if ChatSession::is_command(&input) {
147 match session.process_command(&input) {
148 Ok(true) => continue,
149 Ok(false) => break, Err(e) => {
151 eprintln!("{}", format!("Error: {}", e).red());
152 continue;
153 }
154 }
155 }
156
157 if !ChatSession::has_api_key(session.provider) {
159 eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
160 continue;
161 }
162
163 println!();
165 let spinner = Arc::new(Spinner::new("Thinking..."));
166
167 let (hook, receiver) = ToolDisplayHook::new();
169 let spinner_clone = spinner.clone();
170 let _tool_display_handle = spawn_tool_display_handler(receiver, spinner_clone);
171
172 let project_path_buf = session.project_path.clone();
173 let preamble = get_system_prompt(&session.project_path);
174
175 let response = match session.provider {
176 ProviderType::OpenAI => {
177 let client = openai::Client::from_env();
178 let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
181 Some(serde_json::json!({
182 "reasoning": {
183 "effort": "medium"
184 }
185 }))
186 } else {
187 None
188 };
189
190 let mut builder = client
191 .agent(&session.model)
192 .preamble(&preamble)
193 .max_tokens(4096)
194 .tool(AnalyzeTool::new(project_path_buf.clone()))
195 .tool(SecurityScanTool::new(project_path_buf.clone()))
196 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
197 .tool(ReadFileTool::new(project_path_buf.clone()))
198 .tool(ListDirectoryTool::new(project_path_buf));
199
200 if let Some(params) = reasoning_params {
201 builder = builder.additional_params(params);
202 }
203
204 let agent = builder.build();
205 agent.prompt(&input).with_hook(hook.clone()).multi_turn(10).await
208 }
209 ProviderType::Anthropic => {
210 let client = anthropic::Client::from_env();
211 let agent = client
212 .agent(&session.model)
213 .preamble(&preamble)
214 .max_tokens(4096)
215 .tool(AnalyzeTool::new(project_path_buf.clone()))
216 .tool(SecurityScanTool::new(project_path_buf.clone()))
217 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
218 .tool(ReadFileTool::new(project_path_buf.clone()))
219 .tool(ListDirectoryTool::new(project_path_buf))
220 .build();
221
222 agent.prompt(&input).with_hook(hook.clone()).multi_turn(10).await
225 }
226 };
227
228 match response {
229 Ok(text) => {
230 spinner.stop().await;
232 ResponseFormatter::print_response(&text);
233
234 let prompt_tokens = TokenUsage::estimate_tokens(&input);
236 let completion_tokens = TokenUsage::estimate_tokens(&text);
237 session.token_usage.add_request(prompt_tokens, completion_tokens);
238
239 session.history.push(("user".to_string(), input));
240 session.history.push(("assistant".to_string(), text));
241 }
242 Err(e) => {
243 spinner.stop().await;
244 eprintln!("{}", format!("Error: {}", e).red());
245 }
246 }
247 println!();
248 }
249
250 Ok(())
251}
252
253pub async fn run_query(
255 project_path: &Path,
256 query: &str,
257 provider: ProviderType,
258 model: Option<String>,
259) -> AgentResult<String> {
260 use tools::*;
261
262 let project_path_buf = project_path.to_path_buf();
263 let preamble = get_system_prompt(project_path);
264
265 match provider {
266 ProviderType::OpenAI => {
267 let client = openai::Client::from_env();
268 let model_name = model.as_deref().unwrap_or("gpt-5.2");
269
270 let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
272 Some(serde_json::json!({
273 "reasoning": {
274 "effort": "medium"
275 }
276 }))
277 } else {
278 None
279 };
280
281 let mut builder = client
282 .agent(model_name)
283 .preamble(&preamble)
284 .max_tokens(4096)
285 .tool(AnalyzeTool::new(project_path_buf.clone()))
286 .tool(SecurityScanTool::new(project_path_buf.clone()))
287 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
288 .tool(ReadFileTool::new(project_path_buf.clone()))
289 .tool(ListDirectoryTool::new(project_path_buf));
290
291 if let Some(params) = reasoning_params {
292 builder = builder.additional_params(params);
293 }
294
295 let agent = builder.build();
296
297 agent
298 .prompt(query)
299 .multi_turn(10)
300 .await
301 .map_err(|e| AgentError::ProviderError(e.to_string()))
302 }
303 ProviderType::Anthropic => {
304 let client = anthropic::Client::from_env();
305 let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
306
307 let agent = client
308 .agent(model_name)
309 .preamble(&preamble)
310 .max_tokens(4096)
311 .tool(AnalyzeTool::new(project_path_buf.clone()))
312 .tool(SecurityScanTool::new(project_path_buf.clone()))
313 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
314 .tool(ReadFileTool::new(project_path_buf.clone()))
315 .tool(ListDirectoryTool::new(project_path_buf))
316 .build();
317
318 agent
319 .prompt(query)
320 .multi_turn(10)
321 .await
322 .map_err(|e| AgentError::ProviderError(e.to_string()))
323 }
324 }
325}