syncable_cli/agent/
mod.rs1pub mod session;
28pub mod tools;
29pub mod ui;
30
31use colored::Colorize;
32use rig::{
33 client::{CompletionClient, ProviderClient},
34 completion::Prompt,
35 providers::{anthropic, openai},
36};
37use session::ChatSession;
38use std::path::Path;
39use std::sync::Arc;
40use ui::{ResponseFormatter, Spinner, ToolDisplayHook, spawn_tool_display_handler};
41
42#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
44pub enum ProviderType {
45 #[default]
46 OpenAI,
47 Anthropic,
48}
49
50impl std::fmt::Display for ProviderType {
51 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
52 match self {
53 ProviderType::OpenAI => write!(f, "openai"),
54 ProviderType::Anthropic => write!(f, "anthropic"),
55 }
56 }
57}
58
59impl std::str::FromStr for ProviderType {
60 type Err = String;
61
62 fn from_str(s: &str) -> Result<Self, Self::Err> {
63 match s.to_lowercase().as_str() {
64 "openai" => Ok(ProviderType::OpenAI),
65 "anthropic" => Ok(ProviderType::Anthropic),
66 _ => Err(format!("Unknown provider: {}", s)),
67 }
68 }
69}
70
71#[derive(Debug, thiserror::Error)]
73pub enum AgentError {
74 #[error("Missing API key. Set {0} environment variable.")]
75 MissingApiKey(String),
76
77 #[error("Provider error: {0}")]
78 ProviderError(String),
79
80 #[error("Tool error: {0}")]
81 ToolError(String),
82}
83
84pub type AgentResult<T> = Result<T, AgentError>;
85
86fn get_system_prompt(project_path: &Path) -> String {
88 format!(
89 r#"You are a helpful AI assistant integrated into the Syncable CLI tool. You help developers understand and improve their codebases.
90
91## Project Context
92You are currently working with a project located at: {}
93
94## Your Capabilities
95You have access to tools to help analyze and understand the project:
96
971. **analyze_project** - Analyze the project to detect languages, frameworks, dependencies, and architecture
982. **security_scan** - Perform security analysis to find potential vulnerabilities and secrets
993. **check_vulnerabilities** - Check dependencies for known security vulnerabilities
1004. **read_file** - Read the contents of a file in the project
1015. **list_directory** - List files and directories in a path
102
103## Guidelines
104- Use the available tools to gather information before answering questions about the project
105- Be concise but thorough in your explanations
106- When you find issues, suggest specific fixes
107- Format code examples using markdown code blocks"#,
108 project_path.display()
109 )
110}
111
112pub async fn run_interactive(
114 project_path: &Path,
115 provider: ProviderType,
116 model: Option<String>,
117) -> AgentResult<()> {
118 use tools::*;
119
120 let mut session = ChatSession::new(project_path, provider, model);
121
122 ChatSession::load_api_key_to_env(session.provider);
124
125 if !ChatSession::has_api_key(session.provider) {
127 ChatSession::prompt_api_key(session.provider)?;
128 }
129
130 session.print_banner();
131
132 loop {
133 let input = match session.read_input() {
135 Ok(input) => input,
136 Err(_) => break,
137 };
138
139 if input.is_empty() {
140 continue;
141 }
142
143 if ChatSession::is_command(&input) {
145 match session.process_command(&input) {
146 Ok(true) => continue,
147 Ok(false) => break, Err(e) => {
149 eprintln!("{}", format!("Error: {}", e).red());
150 continue;
151 }
152 }
153 }
154
155 if !ChatSession::has_api_key(session.provider) {
157 eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
158 continue;
159 }
160
161 println!();
163 let spinner = Arc::new(Spinner::new("Thinking..."));
164
165 let (hook, receiver) = ToolDisplayHook::new();
167 let spinner_clone = spinner.clone();
168 let _tool_display_handle = spawn_tool_display_handler(receiver, spinner_clone);
169
170 let project_path_buf = session.project_path.clone();
171 let preamble = get_system_prompt(&session.project_path);
172
173 let response = match session.provider {
174 ProviderType::OpenAI => {
175 let client = openai::Client::from_env();
176 let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
179 Some(serde_json::json!({
180 "reasoning": {
181 "effort": "medium"
182 }
183 }))
184 } else {
185 None
186 };
187
188 let mut builder = client
189 .agent(&session.model)
190 .preamble(&preamble)
191 .max_tokens(4096)
192 .tool(AnalyzeTool::new(project_path_buf.clone()))
193 .tool(SecurityScanTool::new(project_path_buf.clone()))
194 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
195 .tool(ReadFileTool::new(project_path_buf.clone()))
196 .tool(ListDirectoryTool::new(project_path_buf));
197
198 if let Some(params) = reasoning_params {
199 builder = builder.additional_params(params);
200 }
201
202 let agent = builder.build();
203 agent.prompt(&input).with_hook(hook.clone()).multi_turn(10).await
206 }
207 ProviderType::Anthropic => {
208 let client = anthropic::Client::from_env();
209 let agent = client
210 .agent(&session.model)
211 .preamble(&preamble)
212 .max_tokens(4096)
213 .tool(AnalyzeTool::new(project_path_buf.clone()))
214 .tool(SecurityScanTool::new(project_path_buf.clone()))
215 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
216 .tool(ReadFileTool::new(project_path_buf.clone()))
217 .tool(ListDirectoryTool::new(project_path_buf))
218 .build();
219
220 agent.prompt(&input).with_hook(hook.clone()).multi_turn(10).await
223 }
224 };
225
226 match response {
227 Ok(text) => {
228 spinner.stop().await;
230 ResponseFormatter::print_response(&text);
231 session.history.push(("user".to_string(), input));
232 session.history.push(("assistant".to_string(), text));
233 }
234 Err(e) => {
235 spinner.stop().await;
236 eprintln!("{}", format!("Error: {}", e).red());
237 }
238 }
239 println!();
240 }
241
242 Ok(())
243}
244
245pub async fn run_query(
247 project_path: &Path,
248 query: &str,
249 provider: ProviderType,
250 model: Option<String>,
251) -> AgentResult<String> {
252 use tools::*;
253
254 let project_path_buf = project_path.to_path_buf();
255 let preamble = get_system_prompt(project_path);
256
257 match provider {
258 ProviderType::OpenAI => {
259 let client = openai::Client::from_env();
260 let model_name = model.as_deref().unwrap_or("gpt-5.2");
261
262 let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
264 Some(serde_json::json!({
265 "reasoning": {
266 "effort": "medium"
267 }
268 }))
269 } else {
270 None
271 };
272
273 let mut builder = client
274 .agent(model_name)
275 .preamble(&preamble)
276 .max_tokens(4096)
277 .tool(AnalyzeTool::new(project_path_buf.clone()))
278 .tool(SecurityScanTool::new(project_path_buf.clone()))
279 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
280 .tool(ReadFileTool::new(project_path_buf.clone()))
281 .tool(ListDirectoryTool::new(project_path_buf));
282
283 if let Some(params) = reasoning_params {
284 builder = builder.additional_params(params);
285 }
286
287 let agent = builder.build();
288
289 agent
290 .prompt(query)
291 .multi_turn(10)
292 .await
293 .map_err(|e| AgentError::ProviderError(e.to_string()))
294 }
295 ProviderType::Anthropic => {
296 let client = anthropic::Client::from_env();
297 let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
298
299 let agent = client
300 .agent(model_name)
301 .preamble(&preamble)
302 .max_tokens(4096)
303 .tool(AnalyzeTool::new(project_path_buf.clone()))
304 .tool(SecurityScanTool::new(project_path_buf.clone()))
305 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
306 .tool(ReadFileTool::new(project_path_buf.clone()))
307 .tool(ListDirectoryTool::new(project_path_buf))
308 .build();
309
310 agent
311 .prompt(query)
312 .multi_turn(10)
313 .await
314 .map_err(|e| AgentError::ProviderError(e.to_string()))
315 }
316 }
317}