1use crate::config::constants::tools;
11use crate::tools::ToolRegistry;
12use anyhow::Result;
13use console::style;
14use indexmap::IndexMap;
15use serde_json::json;
16use std::fs;
17use std::path::{Path, PathBuf};
18use walkdir::WalkDir;
19
20#[derive(Debug, Clone)]
22struct ProjectAnalysis {
23 project_name: String,
25 languages: Vec<String>,
26 build_systems: Vec<String>,
27 dependencies: IndexMap<String, Vec<String>>,
28
29 source_dirs: Vec<String>,
31 test_patterns: Vec<String>,
32 config_files: Vec<String>,
33 documentation_files: Vec<String>,
34
35 commit_patterns: Vec<String>,
37 has_git_history: bool,
38
39 is_library: bool,
41 is_application: bool,
42 has_ci_cd: bool,
43 has_docker: bool,
44
45 estimated_word_count: usize,
47}
48
49pub async fn handle_init_command(registry: &mut ToolRegistry, workspace: &PathBuf) -> Result<()> {
51 println!(
52 "{}",
53 style("Initializing project with AGENTS.md...")
54 .cyan()
55 .bold()
56 );
57
58 println!("{}", style("1. Analyzing project structure...").dim());
60 let analysis = analyze_project(registry, workspace).await?;
61
62 println!("{}", style("2. Generating AGENTS.md content...").dim());
64 let agents_md_content = generate_agents_md(&analysis)?;
65
66 println!("{}", style("3. Writing AGENTS.md file...").dim());
68 let agents_md_path = workspace.join("AGENTS.md");
69
70 registry
71 .execute_tool(
72 tools::WRITE_FILE,
73 json!({
74 "path": agents_md_path.to_string_lossy(),
75 "content": agents_md_content,
76 "overwrite": true
77 }),
78 )
79 .await?;
80
81 println!(
82 "{} {}",
83 style("✓").green().bold(),
84 style("AGENTS.md generated successfully!").green()
85 );
86 println!(
87 "{} {}",
88 style(" Location:").blue(),
89 agents_md_path.display()
90 );
91
92 Ok(())
93}
94
95async fn analyze_project(
97 registry: &mut ToolRegistry,
98 workspace: &PathBuf,
99) -> Result<ProjectAnalysis> {
100 let project_name = workspace
101 .file_name()
102 .and_then(|n| n.to_str())
103 .unwrap_or("project")
104 .to_string();
105
106 let mut analysis = ProjectAnalysis {
107 project_name,
108 languages: Vec::new(),
109 build_systems: Vec::new(),
110 dependencies: IndexMap::new(),
111 source_dirs: Vec::new(),
112 test_patterns: Vec::new(),
113 config_files: Vec::new(),
114 documentation_files: Vec::new(),
115 commit_patterns: Vec::new(),
116 has_git_history: false,
117 is_library: false,
118 is_application: false,
119 has_ci_cd: false,
120 has_docker: false,
121 estimated_word_count: 0,
122 };
123
124 let root_files = registry
126 .execute_tool(tools::LIST_FILES, json!({"path": ".", "max_items": 100}))
127 .await?;
128
129 if let Some(files) = root_files.get("files") {
130 if let Some(files_array) = files.as_array() {
131 for file_obj in files_array {
132 if let Some(path) = file_obj.get("path").and_then(|p| p.as_str()) {
133 analyze_file(&mut analysis, path, registry).await?;
134 }
135 }
136 }
137 }
138
139 let common_src_dirs = vec!["src", "lib", "pkg", "internal", "cmd", "app", "core"];
141 for dir in common_src_dirs {
142 if workspace.join(dir).exists() {
143 analysis.source_dirs.push(dir.to_string());
144 }
145 }
146
147 let test_patterns = vec!["test_", "_test", ".test.", ".spec.", "__tests__"];
149 for pattern in test_patterns {
150 if files_contain_pattern(&analysis, pattern) {
151 analysis.test_patterns.push(pattern.to_string());
152 }
153 }
154
155 analyze_git_history(&mut analysis, registry).await?;
157
158 analyze_project_characteristics(&mut analysis);
160
161 Ok(analysis)
162}
163
164async fn analyze_file(
166 analysis: &mut ProjectAnalysis,
167 path: &str,
168 registry: &mut ToolRegistry,
169) -> Result<()> {
170 match path {
171 "Cargo.toml" => {
173 analysis.languages.push("Rust".to_string());
174 analysis.build_systems.push("Cargo".to_string());
175
176 let cargo_content = registry
178 .execute_tool(
179 tools::READ_FILE,
180 json!({"path": "Cargo.toml", "max_bytes": 5000}),
181 )
182 .await?;
183
184 if let Some(content) = cargo_content.get("content").and_then(|c| c.as_str()) {
185 extract_cargo_dependencies(analysis, content);
186 }
187 }
188 "Cargo.lock" => {
189 analysis.config_files.push("Cargo.lock".to_string());
190 }
191
192 "package.json" => {
194 analysis.languages.push("JavaScript/TypeScript".to_string());
195 analysis.build_systems.push("npm/yarn/pnpm".to_string());
196
197 let package_content = registry
199 .execute_tool(
200 tools::READ_FILE,
201 json!({"path": "package.json", "max_bytes": 5000}),
202 )
203 .await?;
204
205 if let Some(content) = package_content.get("content").and_then(|c| c.as_str()) {
206 extract_package_dependencies(analysis, content);
207 }
208 }
209 "yarn.lock" | "package-lock.json" | "pnpm-lock.yaml" => {
210 analysis.config_files.push(path.to_string());
211 }
212
213 "requirements.txt" | "pyproject.toml" | "setup.py" | "Pipfile" => {
215 if !analysis.languages.contains(&"Python".to_string()) {
216 analysis.languages.push("Python".to_string());
217 }
218 analysis.build_systems.push("pip/poetry".to_string());
219 analysis.config_files.push(path.to_string());
220 }
221
222 "go.mod" | "go.sum" => {
224 analysis.languages.push("Go".to_string());
225 analysis.build_systems.push("Go Modules".to_string());
226 analysis.config_files.push(path.to_string());
227 }
228
229 "pom.xml" | "build.gradle" | "build.gradle.kts" => {
231 analysis.languages.push("Java/Kotlin".to_string());
232 analysis.build_systems.push("Maven/Gradle".to_string());
233 analysis.config_files.push(path.to_string());
234 }
235
236 "README.md" | "CHANGELOG.md" | "CONTRIBUTING.md" | "LICENSE" | "LICENSE.md" => {
238 analysis.documentation_files.push(path.to_string());
239 }
240
241 ".gitignore" | ".editorconfig" | ".prettierrc" | ".eslintrc" | ".eslintrc.js"
243 | ".eslintrc.json" => {
244 analysis.config_files.push(path.to_string());
245 }
246
247 "Dockerfile" | "docker-compose.yml" | "docker-compose.yaml" | ".dockerignore" => {
249 analysis.config_files.push(path.to_string());
250 }
251
252 "Jenkinsfile" | ".travis.yml" | "azure-pipelines.yml" | ".circleci/config.yml" => {
254 analysis.config_files.push(path.to_string());
255 }
256
257 path if path.starts_with(".github/workflows/") => {
259 analysis.config_files.push(path.to_string());
260 }
261
262 "src" | "lib" | "pkg" | "internal" | "cmd" | "app" | "core" => {
264 analysis.source_dirs.push(path.to_string());
265 }
266
267 _ => {}
268 }
269
270 Ok(())
271}
272
273fn extract_cargo_dependencies(analysis: &mut ProjectAnalysis, content: &str) {
275 let mut deps = Vec::new();
276
277 for line in content.lines() {
279 let line = line.trim();
280 if line.starts_with('"') && line.contains(" = ") {
281 if let Some(dep_name) = line.split('"').nth(1) {
282 deps.push(dep_name.to_string());
283 }
284 }
285 }
286
287 if !deps.is_empty() {
288 analysis
289 .dependencies
290 .insert("Rust (Cargo)".to_string(), deps);
291 }
292}
293
294fn extract_package_dependencies(analysis: &mut ProjectAnalysis, content: &str) {
296 let mut deps = Vec::new();
297
298 if content.contains("\"dependencies\":") {
300 for line in content.lines() {
302 if line.contains("\"")
303 && line.contains(":")
304 && !line.contains("{")
305 && !line.contains("}")
306 {
307 if let Some(dep_name) = line.split('"').nth(1) {
308 if !dep_name.is_empty()
309 && dep_name != "dependencies"
310 && dep_name != "devDependencies"
311 {
312 deps.push(dep_name.to_string());
313 }
314 }
315 }
316 }
317 }
318
319 if !deps.is_empty() {
320 analysis
321 .dependencies
322 .insert("JavaScript/TypeScript (npm)".to_string(), deps);
323 }
324}
325
326fn files_contain_pattern(analysis: &ProjectAnalysis, pattern: &str) -> bool {
328 for dir in &analysis.source_dirs {
330 let path = Path::new(dir);
331 if path.is_dir() {
332 for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
333 if entry.file_type().is_file() {
334 if let Ok(content) = fs::read_to_string(entry.path()) {
335 if content.contains(pattern) {
336 return true;
337 }
338 }
339 }
340 }
341 } else if path.is_file() {
342 if let Ok(content) = fs::read_to_string(path) {
343 if content.contains(pattern) {
344 return true;
345 }
346 }
347 }
348 }
349
350 for file in &analysis.config_files {
352 let path = Path::new(file);
353 if let Ok(content) = fs::read_to_string(path) {
354 if content.contains(pattern) {
355 return true;
356 }
357 }
358 }
359
360 analysis.languages.iter().any(|lang| lang.contains(pattern))
362}
363
364async fn analyze_git_history(
366 analysis: &mut ProjectAnalysis,
367 registry: &mut ToolRegistry,
368) -> Result<()> {
369 let git_check = registry
371 .execute_tool("list_files", json!({"path": ".git", "max_items": 1}))
372 .await;
373
374 if git_check.is_ok() {
375 analysis.has_git_history = true;
376
377 let git_log_result = registry
379 .execute_tool(
380 tools::RUN_TERMINAL_CMD,
381 json!({
382 "command": "git log --oneline -20 --pretty=format:'%s'",
383 "timeout": 5000
384 }),
385 )
386 .await;
387
388 if let Ok(output) = git_log_result {
389 if let Some(stdout) = output.get("stdout").and_then(|s| s.as_str()) {
390 let mut conventional_count = 0;
391 let mut total_commits = 0;
392
393 for line in stdout.lines() {
394 total_commits += 1;
395 let line = line.trim();
396
397 if line.contains("feat:")
399 || line.contains("fix:")
400 || line.contains("docs:")
401 || line.contains("style:")
402 || line.contains("refactor:")
403 || line.contains("test:")
404 || line.contains("chore:")
405 {
406 conventional_count += 1;
407 }
408 }
409
410 if total_commits > 0 && (conventional_count * 100 / total_commits) > 50 {
412 analysis
413 .commit_patterns
414 .push("Conventional Commits".to_string());
415 } else {
416 analysis
417 .commit_patterns
418 .push("Standard commit messages".to_string());
419 }
420 }
421 } else {
422 analysis
424 .commit_patterns
425 .push("Standard commit messages".to_string());
426 }
427 } else {
428 analysis.has_git_history = false;
430 analysis
431 .commit_patterns
432 .push("No version control detected".to_string());
433 }
434
435 Ok(())
436}
437
438fn analyze_project_characteristics(analysis: &mut ProjectAnalysis) {
440 analysis.is_library = analysis.config_files.iter().any(|f| {
442 f == "Cargo.toml" && analysis.languages.contains(&"Rust".to_string())
443 || f == "package.json"
444 && analysis
445 .languages
446 .contains(&"JavaScript/TypeScript".to_string())
447 || f == "setup.py"
448 || f == "pyproject.toml"
449 });
450
451 analysis.is_application = analysis.source_dirs.contains(&"src".to_string())
452 || analysis.source_dirs.contains(&"cmd".to_string())
453 || analysis.source_dirs.contains(&"app".to_string());
454
455 analysis.has_ci_cd = analysis.config_files.iter().any(|f| {
457 f.contains(".github/workflows")
458 || f.contains(".gitlab-ci")
459 || f.contains(".travis")
460 || f == "Jenkinsfile"
461 || f == ".circleci/config.yml"
462 || f == "azure-pipelines.yml"
463 });
464
465 analysis.has_docker = analysis.config_files.iter().any(|f| {
467 f == "Dockerfile"
468 || f == "docker-compose.yml"
469 || f == "docker-compose.yaml"
470 || f == ".dockerignore"
471 });
472}
473
474fn generate_agents_md(analysis: &ProjectAnalysis) -> Result<String> {
476 let mut content = String::new();
477 let mut word_count = 0;
478
479 content.push_str("# Repository Guidelines\n\n");
481 word_count += 2;
482
483 let intro = format!(
485 "This document serves as a contributor guide for the {} repository.\n\n",
486 analysis.project_name
487 );
488 content.push_str(&intro);
489 word_count += intro.split_whitespace().count();
490
491 if !analysis.source_dirs.is_empty() || !analysis.languages.is_empty() {
493 content.push_str("## Project Structure & Module Organization\n\n");
494 word_count += 5;
495
496 if !analysis.source_dirs.is_empty() {
498 for dir in &analysis.source_dirs {
499 let line = format!("- `{}/` - Source code\n", dir);
500 content.push_str(&line);
501 word_count += line.split_whitespace().count();
502 }
503 }
504
505 for language in &analysis.languages {
507 match language.as_str() {
508 "Rust" => {
509 content.push_str(
510 "- `tests/` - Integration tests\n- `examples/` - Usage examples\n",
511 );
512 word_count += 8;
513 }
514 "JavaScript/TypeScript" => {
515 content.push_str(
516 "- `test/` or `__tests__/` - Test files\n- `dist/` - Built assets\n",
517 );
518 word_count += 10;
519 }
520 "Python" => {
521 content.push_str("- `tests/` - Test files\n- Package modules in root\n");
522 word_count += 9;
523 }
524 _ => {}
525 }
526 }
527 content.push('\n');
528 }
529
530 if !analysis.build_systems.is_empty() && word_count < 300 {
532 content.push_str("## Build, Test, and Development Commands\n\n");
533 word_count += 6;
534
535 for system in &analysis.build_systems {
536 match system.as_str() {
537 "Cargo" => {
538 content.push_str("- `cargo build` - Build project\n- `cargo test` - Run tests\n- `cargo run` - Run application\n");
539 word_count += 15;
540 }
541 "npm/yarn/pnpm" => {
542 content.push_str("- `npm install` - Install dependencies\n- `npm test` - Run tests\n- `npm run build` - Build for production\n");
543 word_count += 18;
544 }
545 "pip/poetry" => {
546 content.push_str("- `python -m pytest` - Run tests\n- `pip install -r requirements.txt` - Install dependencies\n");
547 word_count += 15;
548 }
549 _ => {}
550 }
551 }
552 content.push('\n');
553 }
554
555 if !analysis.languages.is_empty() && word_count < 350 {
557 content.push_str("## Coding Style & Naming Conventions\n\n");
558 word_count += 5;
559
560 for language in &analysis.languages {
561 match language.as_str() {
562 "Rust" => {
563 content.push_str("- **Indentation:** 4 spaces\n- **Naming:** snake_case functions, PascalCase types\n- **Formatting:** `cargo fmt`\n\n");
564 word_count += 15;
565 }
566 "JavaScript/TypeScript" => {
567 content.push_str("- **Indentation:** 2 spaces\n- **Naming:** camelCase variables, PascalCase classes\n- **Formatting:** Prettier\n\n");
568 word_count += 14;
569 }
570 "Python" => {
571 content.push_str("- **Style:** PEP 8\n- **Indentation:** 4 spaces\n- **Formatting:** Black\n\n");
572 word_count += 10;
573 }
574 _ => {}
575 }
576 }
577 }
578
579 if !analysis.test_patterns.is_empty() && word_count < 370 {
581 content.push_str("## Testing Guidelines\n\n");
582 word_count += 3;
583
584 let test_info = format!(
585 "- Test files: {}\n- Run tests using build system commands above\n\n",
586 analysis.test_patterns.join(", ")
587 );
588 content.push_str(&test_info);
589 word_count += test_info.split_whitespace().count();
590 }
591
592 if word_count < 380 {
594 content.push_str("## Commit & Pull Request Guidelines\n\n");
595 word_count += 5;
596
597 if analysis
598 .commit_patterns
599 .contains(&"Conventional Commits".to_string())
600 {
601 content.push_str("- Use conventional commit format: `type(scope): description`\n");
602 content.push_str("- Types: `feat`, `fix`, `docs`, `refactor`, `test`, `chore`\n");
603 word_count += 14;
604 } else {
605 content.push_str("- Write clear, descriptive commit messages\n");
606 content.push_str("- Use imperative mood: \"Add feature\" not \"Added feature\"\n");
607 word_count += 13;
608 }
609
610 content.push_str("- Link issues with `Fixes #123` or `Closes #123`\n");
611 content.push_str("- Ensure tests pass before submitting PRs\n\n");
612 word_count += 12;
613 }
614
615 if word_count < 390 {
617 content.push_str("## Agent-Specific Instructions\n\n");
618 content.push_str("- Follow established patterns above\n");
619 content.push_str("- Include tests for new functionality\n");
620 content.push_str("- Update documentation for API changes\n");
621 word_count += 15;
622 }
623
624 let mut updated_analysis = analysis.clone();
626 updated_analysis.estimated_word_count = word_count;
627
628 Ok(content)
629}