syncable_cli/agent/tools/
analyze.rs1use super::compression::{CompressionConfig, compress_analysis_output};
4use super::error::{ErrorCategory, format_error_for_llm};
5use rig::completion::ToolDefinition;
6use rig::tool::Tool;
7use serde::{Deserialize, Serialize};
8use serde_json::json;
9use std::path::PathBuf;
10
11#[derive(Debug, Deserialize)]
13pub struct AnalyzeArgs {
14 pub path: Option<String>,
16}
17
18#[derive(Debug, thiserror::Error)]
20#[error("Analysis error: {0}")]
21pub struct AnalyzeError(String);
22
23#[derive(Debug, Clone, Serialize, Deserialize)]
25pub struct AnalyzeTool {
26 project_path: PathBuf,
27}
28
29impl AnalyzeTool {
30 pub fn new(project_path: PathBuf) -> Self {
31 Self { project_path }
32 }
33}
34
35impl Tool for AnalyzeTool {
36 const NAME: &'static str = "analyze_project";
37
38 type Error = AnalyzeError;
39 type Args = AnalyzeArgs;
40 type Output = String;
41
42 async fn definition(&self, _prompt: String) -> ToolDefinition {
43 ToolDefinition {
44 name: Self::NAME.to_string(),
45 description: r#"Analyze the project to detect programming languages, frameworks, dependencies, build tools, and architecture patterns.
46
47**What gets analyzed:**
48- Languages: Java, Go, JavaScript/TypeScript, Rust, Python
49- Frameworks: Spring Boot, Express, React, Vue, Django, FastAPI, Actix, etc.
50- Dependencies: package.json, go.mod, Cargo.toml, pom.xml, requirements.txt
51- Build tools: Maven, Gradle, npm/yarn/pnpm, Cargo, Make
52- Architecture: microservices, monolith, monorepo structure
53
54**Monorepo detection:**
55Automatically detects and analyzes all sub-projects in monorepos (Nx, Turborepo, Lerna, Yarn workspaces, etc.). Returns analysis for each discovered project.
56
57**Output format:**
58Returns a compressed summary with key findings. Full analysis is stored and can be retrieved using the `retrieve_output` tool with the returned `retrieval_id`.
59
60**When to use:**
61- Start of analysis to understand project structure
62- After major changes to verify project configuration
63- To identify all languages/frameworks before linting or optimization"#.to_string(),
64 parameters: json!({
65 "type": "object",
66 "properties": {
67 "path": {
68 "type": "string",
69 "description": "Subdirectory path to analyze (relative to project root). Use to target a specific sub-project in a monorepo. Leave empty/omit to analyze the entire project from root."
70 }
71 }
72 }),
73 }
74 }
75
76 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
77 let path = if let Some(ref subpath) = args.path {
78 let joined = self.project_path.join(subpath);
79 if !joined.exists() {
81 return Ok(format_error_for_llm(
82 "analyze_project",
83 ErrorCategory::FileNotFound,
84 &format!("Path not found: {}", subpath),
85 Some(vec![
86 "Check if the path exists",
87 "Use list_directory to explore available paths",
88 "Omit path parameter to analyze the entire project",
89 ]),
90 ));
91 }
92 joined
93 } else {
94 self.project_path.clone()
95 };
96
97 let entries: Vec<_> = match std::fs::read_dir(&path) {
99 Ok(dir) => dir.filter_map(Result::ok).collect(),
100 Err(e) => {
101 return Ok(format_error_for_llm(
102 "analyze_project",
103 ErrorCategory::PermissionDenied,
104 &format!("Cannot read directory: {}", e),
105 Some(vec![
106 "Check file permissions",
107 "Ensure the path is a directory, not a file",
108 ]),
109 ));
110 }
111 };
112
113 if entries.is_empty() {
114 return Ok(format_error_for_llm(
115 "analyze_project",
116 ErrorCategory::ValidationFailed,
117 "Directory appears to be empty",
118 Some(vec![
119 "Check if the path is correct",
120 "Hidden files (starting with .) are included in analysis",
121 "Use list_directory to see what's in this path",
122 ]),
123 ));
124 }
125
126 let file_count = count_files_recursive(&path, 15000);
129 let large_project_warning = if file_count >= 10000 {
130 Some(format!(
131 "Note: Large project detected (~{}+ files). Analysis may take longer.",
132 file_count
133 ))
134 } else {
135 None
136 };
137
138 match crate::analyzer::analyze_monorepo(&path) {
141 Ok(analysis) => {
142 if analysis.technology_summary.languages.is_empty() {
144 return Ok(format_error_for_llm(
145 "analyze_project",
146 ErrorCategory::ValidationFailed,
147 "No supported programming languages detected in this directory",
148 Some(vec![
149 "Supported languages: Java, Go, JavaScript/TypeScript, Rust, Python",
150 "Check if source files exist in this directory or subdirectories",
151 "For non-code projects, use list_directory to explore contents",
152 "Try analyzing a specific subdirectory if this is a monorepo",
153 ]),
154 ));
155 }
156
157 let json_value = serde_json::to_value(&analysis).map_err(|e| {
158 AnalyzeError(format!("Failed to serialize analysis results: {}", e))
159 })?;
160
161 let config = CompressionConfig::default();
164 let mut result = compress_analysis_output(&json_value, &config);
165
166 if let Some(warning) = large_project_warning {
168 result = format!("{}\n\n{}", warning, result);
169 }
170
171 Ok(result)
172 }
173 Err(e) => {
174 let error_str = e.to_string();
176 let (category, suggestions) = if error_str.contains("permission")
177 || error_str.contains("Permission")
178 {
179 (
180 ErrorCategory::PermissionDenied,
181 vec!["Check file permissions", "Try a different subdirectory"],
182 )
183 } else if error_str.contains("not found") || error_str.contains("No such file") {
184 (
185 ErrorCategory::FileNotFound,
186 vec!["Verify the path exists", "Use list_directory to explore"],
187 )
188 } else {
189 (
190 ErrorCategory::InternalError,
191 vec!["Try analyzing a subdirectory", "Check project structure"],
192 )
193 };
194
195 Ok(format_error_for_llm(
196 "analyze_project",
197 category,
198 &format!("Analysis failed: {}", e),
199 Some(suggestions),
200 ))
201 }
202 }
203 }
204}
205
206fn count_files_recursive(path: &std::path::Path, limit: usize) -> usize {
208 let mut count = 0;
209 let mut dirs_to_visit = vec![path.to_path_buf()];
210
211 while let Some(dir) = dirs_to_visit.pop() {
212 if count >= limit {
213 break;
214 }
215
216 if let Ok(entries) = std::fs::read_dir(&dir) {
217 for entry in entries.filter_map(Result::ok) {
218 if count >= limit {
219 break;
220 }
221
222 let path = entry.path();
223 if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
225 if matches!(
226 name,
227 "node_modules"
228 | "target"
229 | ".git"
230 | "vendor"
231 | "dist"
232 | "build"
233 | "__pycache__"
234 | ".venv"
235 | "venv"
236 ) {
237 continue;
238 }
239 }
240
241 if path.is_file() {
242 count += 1;
243 } else if path.is_dir() {
244 dirs_to_visit.push(path);
245 }
246 }
247 }
248 }
249
250 count
251}
252
253#[cfg(test)]
254mod tests {
255 use super::*;
256 use tempfile::tempdir;
257
258 #[test]
259 fn test_count_files_empty_dir() {
260 let dir = tempdir().unwrap();
261 let count = count_files_recursive(dir.path(), 10000);
262 assert_eq!(count, 0);
263 }
264
265 #[test]
266 fn test_count_files_with_files() {
267 let dir = tempdir().unwrap();
268 std::fs::write(dir.path().join("file1.rs"), "fn main() {}").unwrap();
269 std::fs::write(dir.path().join("file2.go"), "package main").unwrap();
270 let count = count_files_recursive(dir.path(), 10000);
271 assert_eq!(count, 2);
272 }
273
274 #[tokio::test]
275 async fn test_analyze_nonexistent_path() {
276 let dir = tempdir().unwrap();
277 let tool = AnalyzeTool::new(dir.path().to_path_buf());
278 let args = AnalyzeArgs {
279 path: Some("nonexistent".to_string()),
280 };
281
282 let result = tool.call(args).await.unwrap();
283 assert!(
285 result.contains("error")
286 || result.contains("not found")
287 || result.contains("Path not found")
288 );
289 }
290}