1#![allow(non_snake_case, unused_variables)]
2
3use std::collections::HashMap;
4use std::fs;
5use std::io::Write;
6use std::path::{Path, PathBuf};
7
8use regex::Regex;
9use serde::{Deserialize, Serialize};
10
11use super::{CRT_DRAW_MS, colors, icons, theme};
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct DefragFinding {
17 pub file: PathBuf,
18 pub line: Option<usize>,
19 pub severity: Severity,
20 pub message: String,
21 pub fix_description: Option<String>,
22 pub fixable: bool,
23 pub pass_name: String,
24}
25
26#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
27#[serde(rename_all = "lowercase")]
28pub enum Severity {
29 Info,
30 Warning,
31 Error,
32}
33
34#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct DefragSummary {
36 pub total_findings: usize,
37 pub fixable_count: usize,
38 pub by_severity: SeverityCounts,
39 pub by_pass: Vec<PassSummary>,
40}
41
42#[derive(Debug, Clone, Default, Serialize, Deserialize)]
43pub struct SeverityCounts {
44 pub info: usize,
45 pub warning: usize,
46 pub error: usize,
47}
48
49#[derive(Debug, Clone, Serialize, Deserialize)]
50pub struct PassSummary {
51 pub name: String,
52 pub findings: usize,
53 pub status: PassStatus,
54}
55
56#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
57pub enum PassStatus {
58 Clean,
59 Findings,
60 Error,
61}
62
63fn migration_map() -> HashMap<&'static str, &'static str> {
66 let mut m = HashMap::new();
67 m.insert("legacy", "roboticus");
68 m.insert("open_legacy", "roboticus");
69 m.insert("Legacy", "Roboticus");
70 m.insert("olegacy", "roboticus");
71 m
72}
73
74fn walk_files(dir: &Path, extensions: &[&str]) -> Vec<PathBuf> {
75 let mut result = Vec::new();
76 walk_files_inner(dir, extensions, &mut result);
77 result
78}
79
80fn walk_files_inner(dir: &Path, extensions: &[&str], out: &mut Vec<PathBuf>) {
81 let entries = match fs::read_dir(dir) {
82 Ok(e) => e,
83 Err(_) => return,
84 };
85 for entry in entries.flatten() {
86 let path = entry.path();
87 if path.is_dir() {
88 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
90 if name.starts_with('.') || name == "target" || name == "node_modules" {
91 continue;
92 }
93 walk_files_inner(&path, extensions, out);
94 } else if path.is_file() {
95 if extensions.is_empty() {
96 out.push(path);
97 } else if let Some(ext) = path.extension().and_then(|e| e.to_str())
98 && extensions.contains(&ext)
99 {
100 out.push(path);
101 }
102 }
103 }
104}
105
106fn walk_all_entries(dir: &Path) -> Vec<PathBuf> {
107 let mut result = Vec::new();
108 walk_all_entries_inner(dir, &mut result);
109 result
110}
111
112fn walk_all_entries_inner(dir: &Path, out: &mut Vec<PathBuf>) {
113 let entries = match fs::read_dir(dir) {
114 Ok(e) => e,
115 Err(_) => return,
116 };
117 for entry in entries.flatten() {
118 let path = entry.path();
119 out.push(path.clone());
120 if path.is_dir() {
121 walk_all_entries_inner(&path, out);
122 }
123 }
124}
125
126fn is_dir_empty(path: &Path) -> bool {
127 match fs::read_dir(path) {
128 Ok(mut entries) => entries.next().is_none(),
129 Err(_) => false,
130 }
131}
132
133pub fn pass_refs(workspace: &Path) -> Vec<DefragFinding> {
136 let mut findings = Vec::new();
137 let map = migration_map();
138 let extensions = &["md", "sh", "py", "js", "toml", "json"];
139 let files = walk_files(workspace, extensions);
140
141 let pattern = map.keys().copied().collect::<Vec<_>>().join("|");
143 let re = match Regex::new(&pattern) {
144 Ok(r) => r,
145 Err(_) => return findings,
146 };
147
148 for file in files {
149 let content = match fs::read_to_string(&file) {
150 Ok(c) => c,
151 Err(_) => continue,
152 };
153 for (line_num, line) in content.lines().enumerate() {
154 for m in re.find_iter(line) {
155 let old = m.as_str();
156 let new = map.get(old).copied().unwrap_or("roboticus");
157 findings.push(DefragFinding {
158 file: file.clone(),
159 line: Some(line_num + 1),
160 severity: Severity::Warning,
161 message: format!("stale reference '{old}' should be '{new}'"),
162 fix_description: Some(format!("replace '{old}' with '{new}'")),
163 fixable: true,
164 pass_name: "refs".to_string(),
165 });
166 }
167 }
168 }
169 findings
170}
171
172pub fn pass_drift(workspace: &Path) -> Vec<DefragFinding> {
175 let mut findings = Vec::new();
176 let config_path = workspace.join("roboticus.toml");
177 let config_content = match fs::read_to_string(&config_path) {
178 Ok(c) => c,
179 Err(_) => return findings, };
181
182 let config: toml::Value = match config_content.parse() {
183 Ok(v) => v,
184 Err(_) => return findings,
185 };
186
187 let port = config
189 .get("server")
190 .and_then(|s| s.get("port"))
191 .and_then(|p| p.as_integer())
192 .map(|p| p.to_string());
193 let bind = config
194 .get("server")
195 .and_then(|s| s.get("bind"))
196 .and_then(|b| b.as_str())
197 .map(|s| s.to_string());
198 let agent_name = config
199 .get("agent")
200 .and_then(|a| a.get("name"))
201 .and_then(|n| n.as_str())
202 .map(|s| s.to_string());
203
204 let port_re = Regex::new(r"(?:port\s*[=:]\s*|localhost:)(\d{4,5})").ok();
205 let bind_re = Regex::new(r"(?:bind\s*[=:]\s*)([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)").ok();
206 let name_re = Regex::new(r#"(?:agent[_\s-]?name\s*[=:"]\s*)(\w+)"#).ok();
207
208 let md_files = walk_files(workspace, &["md"]);
209 for file in md_files {
210 let content = match fs::read_to_string(&file) {
211 Ok(c) => c,
212 Err(_) => continue,
213 };
214 if let Some(ref cfg_port) = port
216 && let Some(ref port_re) = port_re
217 {
218 for (line_num, line) in content.lines().enumerate() {
219 for cap in port_re.captures_iter(line) {
220 if let Some(found_port) = cap.get(1) {
221 let found = found_port.as_str();
222 if found != cfg_port.as_str() {
223 findings.push(DefragFinding {
224 file: file.clone(),
225 line: Some(line_num + 1),
226 severity: Severity::Info,
227 message: format!(
228 "references port {found} but config uses {cfg_port}"
229 ),
230 fix_description: None,
231 fixable: false,
232 pass_name: "drift".to_string(),
233 });
234 }
235 }
236 }
237 }
238 }
239 if let Some(ref cfg_bind) = bind
241 && let Some(ref bind_re) = bind_re
242 {
243 for (line_num, line) in content.lines().enumerate() {
244 for cap in bind_re.captures_iter(line) {
245 if let Some(found_bind) = cap.get(1) {
246 let found = found_bind.as_str();
247 if found != cfg_bind.as_str() {
248 findings.push(DefragFinding {
249 file: file.clone(),
250 line: Some(line_num + 1),
251 severity: Severity::Info,
252 message: format!(
253 "references bind address {found} but config uses {cfg_bind}"
254 ),
255 fix_description: None,
256 fixable: false,
257 pass_name: "drift".to_string(),
258 });
259 }
260 }
261 }
262 }
263 }
264 if let Some(ref cfg_name) = agent_name
266 && let Some(ref name_re) = name_re
267 {
268 for (line_num, line) in content.lines().enumerate() {
269 for cap in name_re.captures_iter(line) {
270 if let Some(found_name) = cap.get(1) {
271 let found = found_name.as_str();
272 if found != cfg_name.as_str() {
273 findings.push(DefragFinding {
274 file: file.clone(),
275 line: Some(line_num + 1),
276 severity: Severity::Info,
277 message: format!(
278 "references agent name '{found}' but config uses '{cfg_name}'"
279 ),
280 fix_description: None,
281 fixable: false,
282 pass_name: "drift".to_string(),
283 });
284 }
285 }
286 }
287 }
288 }
289 }
290 findings
291}
292
293pub fn pass_artifacts(workspace: &Path) -> Vec<DefragFinding> {
296 let mut findings = Vec::new();
297 let skills_dir = workspace.join("skills");
298 if !skills_dir.is_dir() {
299 return findings;
300 }
301
302 let entries = walk_all_entries(&skills_dir);
303 for path in entries {
304 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
305 let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
306
307 if ext == "log" && path.is_file() {
308 findings.push(DefragFinding {
309 file: path,
310 line: None,
311 severity: Severity::Info,
312 message: "stale log file in skills directory".to_string(),
313 fix_description: Some("delete log file".to_string()),
314 fixable: true,
315 pass_name: "artifacts".to_string(),
316 });
317 } else if name == "__pycache__" && path.is_dir() {
318 findings.push(DefragFinding {
319 file: path,
320 line: None,
321 severity: Severity::Info,
322 message: "__pycache__ directory in skills".to_string(),
323 fix_description: Some("delete __pycache__ directory".to_string()),
324 fixable: true,
325 pass_name: "artifacts".to_string(),
326 });
327 } else if ext == "bak" && path.is_file() {
328 findings.push(DefragFinding {
329 file: path,
330 line: None,
331 severity: Severity::Warning,
332 message: "backup file in skills directory".to_string(),
333 fix_description: Some("delete backup file".to_string()),
334 fixable: true,
335 pass_name: "artifacts".to_string(),
336 });
337 } else if path.is_dir() && is_dir_empty(&path) {
338 findings.push(DefragFinding {
339 file: path,
340 line: None,
341 severity: Severity::Info,
342 message: "empty directory in skills".to_string(),
343 fix_description: Some("remove empty directory".to_string()),
344 fixable: true,
345 pass_name: "artifacts".to_string(),
346 });
347 }
348 }
349 findings
350}
351
352pub fn pass_stale(workspace: &Path) -> Vec<DefragFinding> {
355 let mut findings = Vec::new();
356 let state_path = workspace.join(".roboticus").join("update_state.json");
357 let content = match fs::read_to_string(&state_path) {
358 Ok(c) => c,
359 Err(_) => return findings,
360 };
361
362 let state: serde_json::Value = match serde_json::from_str(&content) {
363 Ok(v) => v,
364 Err(_) => return findings,
365 };
366
367 if let Some(obj) = state.as_object() {
369 for (key, value) in obj {
370 if let Some(path_str) = value.as_str() {
371 let target = workspace.join(path_str);
372 if !target.exists() {
373 findings.push(DefragFinding {
374 file: state_path.clone(),
375 line: None,
376 severity: Severity::Warning,
377 message: format!("ghost entry '{key}' references missing file: {path_str}"),
378 fix_description: Some(format!(
379 "remove entry '{key}' from update_state.json"
380 )),
381 fixable: true,
382 pass_name: "stale".to_string(),
383 });
384 }
385 }
386 if let Some(inner_obj) = value.as_object()
388 && let Some(path_str) = inner_obj.get("path").and_then(|p| p.as_str())
389 {
390 let target = workspace.join(path_str);
391 if !target.exists() {
392 findings.push(DefragFinding {
393 file: state_path.clone(),
394 line: None,
395 severity: Severity::Warning,
396 message: format!("ghost entry '{key}' references missing file: {path_str}"),
397 fix_description: Some(format!(
398 "remove entry '{key}' from update_state.json"
399 )),
400 fixable: true,
401 pass_name: "stale".to_string(),
402 });
403 }
404 }
405 }
406 }
407 findings
408}
409
410pub fn pass_identity(workspace: &Path) -> Vec<DefragFinding> {
413 let mut findings = Vec::new();
414 let map = migration_map();
415 let files = walk_files(workspace, &["toml", "json"]);
416
417 let old_brands: Vec<&str> = map.keys().copied().collect();
418 let brand_pattern = old_brands.join("|");
419 let re = match Regex::new(&format!(
420 r#"(?:generated_by|brand)\s*[=:]\s*["']?({brand_pattern})"#
421 )) {
422 Ok(r) => r,
423 Err(_) => return findings,
424 };
425
426 for file in files {
427 let content = match fs::read_to_string(&file) {
428 Ok(c) => c,
429 Err(_) => continue,
430 };
431 for (line_num, line) in content.lines().enumerate() {
432 for cap in re.captures_iter(line) {
433 if let Some(m) = cap.get(1) {
434 let old = m.as_str();
435 let new = map.get(old).copied().unwrap_or("roboticus");
436 findings.push(DefragFinding {
437 file: file.clone(),
438 line: Some(line_num + 1),
439 severity: Severity::Warning,
440 message: format!("brand identity field references '{old}'"),
441 fix_description: Some(format!("replace '{old}' with '{new}'")),
442 fixable: true,
443 pass_name: "identity".to_string(),
444 });
445 }
446 }
447 }
448 }
449 findings
450}
451
452pub fn pass_scripts(workspace: &Path) -> Vec<DefragFinding> {
455 let mut findings = Vec::new();
456 let skills_dir = workspace.join("skills");
457 if !skills_dir.is_dir() {
458 return findings;
459 }
460
461 let script_files = walk_files(&skills_dir, &["sh", "py", "rb", "pl"]);
462 let shebang_re = Regex::new(r"^#!.+").unwrap();
463 let hardcoded_re = Regex::new(r"/usr/local/bin/legacy").unwrap();
464
465 for file in script_files {
466 let content = match fs::read_to_string(&file) {
467 Ok(c) => c,
468 Err(_) => continue,
469 };
470 let lines: Vec<&str> = content.lines().collect();
471
472 if lines.is_empty() || !shebang_re.is_match(lines[0]) {
474 findings.push(DefragFinding {
475 file: file.clone(),
476 line: Some(1),
477 severity: Severity::Warning,
478 message: "script missing valid shebang line".to_string(),
479 fix_description: None,
480 fixable: false,
481 pass_name: "scripts".to_string(),
482 });
483 }
484
485 for (line_num, line) in lines.iter().enumerate() {
487 if hardcoded_re.is_match(line) {
488 findings.push(DefragFinding {
489 file: file.clone(),
490 line: Some(line_num + 1),
491 severity: Severity::Error,
492 message: "hardcoded path '/usr/local/bin/legacy'".to_string(),
493 fix_description: None,
494 fixable: false,
495 pass_name: "scripts".to_string(),
496 });
497 }
498 }
499 }
500 findings
501}
502
503fn apply_fixes(workspace: &Path, findings: &[DefragFinding]) -> usize {
506 let mut fixed = 0;
507
508 let refs_findings: Vec<&DefragFinding> = findings
510 .iter()
511 .filter(|f| f.fixable && f.pass_name == "refs")
512 .collect();
513 let artifact_findings: Vec<&DefragFinding> = findings
514 .iter()
515 .filter(|f| f.fixable && f.pass_name == "artifacts")
516 .collect();
517 let stale_findings: Vec<&DefragFinding> = findings
518 .iter()
519 .filter(|f| f.fixable && f.pass_name == "stale")
520 .collect();
521 let identity_findings: Vec<&DefragFinding> = findings
522 .iter()
523 .filter(|f| f.fixable && f.pass_name == "identity")
524 .collect();
525
526 if !refs_findings.is_empty() {
528 let map = migration_map();
529 let mut patched_files: HashMap<PathBuf, String> = HashMap::new();
530 for f in &refs_findings {
531 patched_files.entry(f.file.clone()).or_insert_with(|| {
532 match fs::read_to_string(&f.file) {
533 Ok(content) => content,
534 Err(e) => {
535 tracing::warn!(error = %e, path = %f.file.display(), "failed to read file for defrag, skipping");
536 String::new()
537 }
538 }
539 });
540 }
541 patched_files.retain(|_, content| !content.is_empty());
543 for (path, content) in &mut patched_files {
544 let mut updated = content.clone();
545 for (old, new) in &map {
546 updated = updated.replace(old, new);
547 }
548 if updated != *content && fs::write(path, &updated).is_ok() {
549 fixed += 1;
550 }
551 }
552 }
553
554 for f in &artifact_findings {
556 let ok = if f.file.is_dir() {
557 fs::remove_dir_all(&f.file).is_ok()
558 } else {
559 fs::remove_file(&f.file).is_ok()
560 };
561 if ok {
562 fixed += 1;
563 }
564 }
565
566 if !stale_findings.is_empty() {
568 let state_path = workspace.join(".roboticus").join("update_state.json");
569 if let Ok(content) = fs::read_to_string(&state_path)
570 && let Ok(mut state) = serde_json::from_str::<serde_json::Value>(&content)
571 && let Some(obj) = state.as_object_mut()
572 {
573 for f in &stale_findings {
574 if let Some(key) = f
576 .message
577 .strip_prefix("ghost entry '")
578 .and_then(|s| s.split('\'').next())
579 {
580 obj.remove(key);
581 }
582 }
583 if let Ok(json) = serde_json::to_string_pretty(&state)
584 && fs::write(&state_path, json).is_ok()
585 {
586 fixed += stale_findings.len();
587 }
588 }
589 }
590
591 if !identity_findings.is_empty() {
593 let map = migration_map();
594 let mut patched_files: HashMap<PathBuf, String> = HashMap::new();
595 for f in &identity_findings {
596 patched_files
597 .entry(f.file.clone())
598 .or_insert_with(|| fs::read_to_string(&f.file).inspect_err(|e| tracing::warn!(error = %e, path = %f.file.display(), "failed to read file for defrag")).unwrap_or_default());
599 }
600 for (path, content) in &mut patched_files {
601 let mut updated = content.clone();
602 for (old, new) in &map {
603 updated = updated.replace(old, new);
604 }
605 if updated != *content && fs::write(path, &updated).is_ok() {
606 fixed += 1;
607 }
608 }
609 }
610
611 fixed
612}
613
614pub fn cmd_defrag(
617 workspace: &Path,
618 fix: bool,
619 yes: bool,
620 json_output: bool,
621) -> roboticus_core::Result<()> {
622 let (DIM, BOLD, ACCENT, GREEN, YELLOW, RED, CYAN, RESET, MONO) = colors();
623 let (OK, ACTION, WARN, DETAIL, ERR) = icons();
624
625 let pass_names = ["refs", "drift", "artifacts", "stale", "identity", "scripts"];
627 let pass_results: Vec<(&str, Vec<DefragFinding>)> = vec![
628 ("refs", pass_refs(workspace)),
629 ("drift", pass_drift(workspace)),
630 ("artifacts", pass_artifacts(workspace)),
631 ("stale", pass_stale(workspace)),
632 ("identity", pass_identity(workspace)),
633 ("scripts", pass_scripts(workspace)),
634 ];
635
636 let mut all_findings: Vec<DefragFinding> = Vec::new();
638 let mut by_pass: Vec<PassSummary> = Vec::new();
639 let mut severity_counts = SeverityCounts::default();
640
641 for (name, findings) in &pass_results {
642 let count = findings.len();
643 let status = if count == 0 {
644 PassStatus::Clean
645 } else {
646 PassStatus::Findings
647 };
648 by_pass.push(PassSummary {
649 name: name.to_string(),
650 findings: count,
651 status,
652 });
653 for f in findings {
654 match f.severity {
655 Severity::Info => severity_counts.info += 1,
656 Severity::Warning => severity_counts.warning += 1,
657 Severity::Error => severity_counts.error += 1,
658 }
659 }
660 all_findings.extend(findings.iter().cloned());
661 }
662
663 let fixable_count = all_findings.iter().filter(|f| f.fixable).count();
664 let total_findings = all_findings.len();
665
666 let summary = DefragSummary {
667 total_findings,
668 fixable_count,
669 by_severity: severity_counts.clone(),
670 by_pass: by_pass.clone(),
671 };
672
673 if json_output {
674 let output = serde_json::json!({
675 "findings": all_findings,
676 "summary": summary,
677 });
678 let json_str = serde_json::to_string_pretty(&output).unwrap_or_else(|_| "{}".to_string());
679 std::io::stdout()
681 .write_all(json_str.as_bytes())
682 .and_then(|_| std::io::stdout().write_all(b"\n"))
683 .and_then(|_| std::io::stdout().flush())?;
684 return Ok(());
685 }
686
687 eprintln!();
689 eprintln!(" {BOLD}Workspace Defrag{RESET}");
690 eprintln!(" {DIM}{}{RESET}", "\u{2500}".repeat(40));
691 eprintln!();
692
693 for ps in &by_pass {
694 let dots = ".".repeat(20usize.saturating_sub(ps.name.len()));
695 let status_str = if ps.findings == 0 {
696 format!("{GREEN}clean{RESET}")
697 } else {
698 let plural = if ps.findings == 1 {
699 "finding"
700 } else {
701 "findings"
702 };
703 format!("{YELLOW}{} {plural}{RESET}", ps.findings)
704 };
705 eprintln!(
706 " {DIM}\u{25a0}{RESET} {BOLD}{}{RESET} {DIM}{dots}{RESET} {status_str}",
707 ps.name
708 );
709 }
710
711 eprintln!();
712 let fixable_str = if fixable_count > 0 {
713 format!(" ({fixable_count} fixable)")
714 } else {
715 String::new()
716 };
717 eprintln!(" {BOLD}Summary:{RESET} {total_findings} findings{fixable_str}");
718 eprintln!(
719 " {CYAN}info:{RESET} {} {DIM}|{RESET} {YELLOW}warning:{RESET} {} {DIM}|{RESET} {RED}error:{RESET} {}",
720 severity_counts.info, severity_counts.warning, severity_counts.error
721 );
722
723 if !all_findings.is_empty() {
725 eprintln!();
726 for f in &all_findings {
727 let sev_color = match f.severity {
728 Severity::Info => CYAN,
729 Severity::Warning => YELLOW,
730 Severity::Error => RED,
731 };
732 let sev_label = match f.severity {
733 Severity::Info => "info",
734 Severity::Warning => "warn",
735 Severity::Error => "error",
736 };
737 let loc = match f.line {
738 Some(l) => format!("{}:{l}", f.file.display()),
739 None => format!("{}", f.file.display()),
740 };
741 eprintln!(
742 " {sev_color}[{sev_label}]{RESET} {DIM}{}{RESET} {loc}",
743 f.pass_name
744 );
745 eprintln!(" {}", f.message);
746 }
747 }
748
749 if fix && fixable_count > 0 {
751 let proceed = if yes {
752 true
753 } else {
754 eprint!("\n Apply {fixable_count} fixable findings? [y/N] ");
755 std::io::stderr().flush().ok();
756 let mut input = String::new();
757 std::io::stdin().read_line(&mut input).unwrap_or(0);
758 matches!(input.trim(), "y" | "Y" | "yes" | "Yes" | "YES")
759 };
760
761 if proceed {
762 let fixed = apply_fixes(workspace, &all_findings);
763 eprintln!();
764 eprintln!(" {OK} {GREEN}Applied fixes ({fixed} items){RESET}");
765 } else {
766 eprintln!();
767 eprintln!(" {DIM}No changes made.{RESET}");
768 }
769 } else if fix && fixable_count == 0 {
770 eprintln!();
771 eprintln!(" {OK} {GREEN}Nothing to fix{RESET}");
772 }
773
774 eprintln!();
775 Ok(())
776}
777
778#[cfg(test)]
783mod tests {
784 use std::fs;
785
786 use tempfile::TempDir;
787
788 use super::*;
789
790 #[test]
793 fn migration_map_contains_expected_keys() {
794 let m = migration_map();
795 assert!(m.contains_key("legacy"));
796 assert!(m.contains_key("Legacy"));
797 assert!(m.contains_key("open_legacy"));
798 assert!(m.contains_key("olegacy"));
799 }
800
801 #[test]
802 fn migration_map_maps_to_roboticus() {
803 let m = migration_map();
804 assert_eq!(m["legacy"], "roboticus");
805 assert_eq!(m["Legacy"], "Roboticus");
806 assert_eq!(m["open_legacy"], "roboticus");
807 assert_eq!(m["olegacy"], "roboticus");
808 }
809
810 #[test]
813 fn severity_variants_are_distinct() {
814 assert_ne!(Severity::Info, Severity::Warning);
815 assert_ne!(Severity::Warning, Severity::Error);
816 assert_ne!(Severity::Info, Severity::Error);
817 }
818
819 #[test]
820 fn pass_status_variants_are_distinct() {
821 assert_ne!(PassStatus::Clean, PassStatus::Findings);
822 assert_ne!(PassStatus::Findings, PassStatus::Error);
823 assert_ne!(PassStatus::Clean, PassStatus::Error);
824 }
825
826 #[test]
827 fn severity_counts_default_is_zero() {
828 let c = SeverityCounts::default();
829 assert_eq!(c.info, 0);
830 assert_eq!(c.warning, 0);
831 assert_eq!(c.error, 0);
832 }
833
834 #[test]
837 fn defrag_finding_roundtrips_via_serialize() {
838 let finding = DefragFinding {
839 file: std::path::PathBuf::from("/tmp/test.md"),
840 line: Some(42),
841 severity: Severity::Warning,
842 message: "stale reference 'legacy'".to_string(),
843 fix_description: Some("replace with 'roboticus'".to_string()),
844 fixable: true,
845 pass_name: "refs".to_string(),
846 };
847 let json = serde_json::to_string(&finding).unwrap();
848 let parsed: DefragFinding = serde_json::from_str(&json).unwrap();
849 assert_eq!(parsed.line, Some(42));
850 assert_eq!(parsed.severity, Severity::Warning);
851 assert_eq!(parsed.pass_name, "refs");
852 assert!(parsed.fixable);
853 }
854
855 #[test]
856 fn defrag_finding_no_line_serializes_as_null() {
857 let finding = DefragFinding {
858 file: std::path::PathBuf::from("/tmp/test.md"),
859 line: None,
860 severity: Severity::Info,
861 message: "empty dir".to_string(),
862 fix_description: None,
863 fixable: false,
864 pass_name: "artifacts".to_string(),
865 };
866 let json = serde_json::to_string(&finding).unwrap();
867 assert!(json.contains("\"line\":null"));
868 }
869
870 #[test]
873 fn is_dir_empty_returns_true_for_empty_dir() {
874 let tmp = TempDir::new().unwrap();
875 assert!(is_dir_empty(tmp.path()));
876 }
877
878 #[test]
879 fn is_dir_empty_returns_false_for_non_empty_dir() {
880 let tmp = TempDir::new().unwrap();
881 fs::write(tmp.path().join("file.txt"), "content").unwrap();
882 assert!(!is_dir_empty(tmp.path()));
883 }
884
885 #[test]
886 fn is_dir_empty_returns_false_for_nonexistent_path() {
887 assert!(!is_dir_empty(std::path::Path::new("/nonexistent/path/xyz")));
888 }
889
890 #[test]
893 fn walk_files_empty_extensions_returns_all_files() {
894 let tmp = TempDir::new().unwrap();
895 fs::write(tmp.path().join("a.md"), "").unwrap();
896 fs::write(tmp.path().join("b.toml"), "").unwrap();
897 let files = walk_files(tmp.path(), &[]);
898 assert_eq!(files.len(), 2);
899 }
900
901 #[test]
902 fn walk_files_filters_by_extension() {
903 let tmp = TempDir::new().unwrap();
904 fs::write(tmp.path().join("a.md"), "").unwrap();
905 fs::write(tmp.path().join("b.toml"), "").unwrap();
906 fs::write(tmp.path().join("c.json"), "").unwrap();
907 let files = walk_files(tmp.path(), &["md"]);
908 assert_eq!(files.len(), 1);
909 assert!(files[0].ends_with("a.md"));
910 }
911
912 #[test]
913 fn walk_files_skips_hidden_directories() {
914 let tmp = TempDir::new().unwrap();
915 let hidden = tmp.path().join(".hidden");
916 fs::create_dir_all(&hidden).unwrap();
917 fs::write(hidden.join("secret.md"), "hide me").unwrap();
918 fs::write(tmp.path().join("visible.md"), "see me").unwrap();
919 let files = walk_files(tmp.path(), &["md"]);
920 assert_eq!(files.len(), 1);
921 assert!(files[0].ends_with("visible.md"));
922 }
923
924 #[test]
925 fn walk_files_skips_target_directory() {
926 let tmp = TempDir::new().unwrap();
927 let target = tmp.path().join("target");
928 fs::create_dir_all(&target).unwrap();
929 fs::write(target.join("build.toml"), "skip me").unwrap();
930 fs::write(tmp.path().join("real.toml"), "find me").unwrap();
931 let files = walk_files(tmp.path(), &["toml"]);
932 assert_eq!(files.len(), 1);
933 assert!(files[0].ends_with("real.toml"));
934 }
935
936 #[test]
937 fn walk_files_skips_node_modules() {
938 let tmp = TempDir::new().unwrap();
939 let nm = tmp.path().join("node_modules");
940 fs::create_dir_all(&nm).unwrap();
941 fs::write(nm.join("pkg.json"), "skip me").unwrap();
942 fs::write(tmp.path().join("project.json"), "find me").unwrap();
943 let files = walk_files(tmp.path(), &["json"]);
944 assert_eq!(files.len(), 1);
945 }
946
947 #[test]
948 fn walk_files_recurses_into_subdirectories() {
949 let tmp = TempDir::new().unwrap();
950 let sub = tmp.path().join("subdir");
951 fs::create_dir_all(&sub).unwrap();
952 fs::write(sub.join("nested.md"), "nested").unwrap();
953 fs::write(tmp.path().join("top.md"), "top").unwrap();
954 let files = walk_files(tmp.path(), &["md"]);
955 assert_eq!(files.len(), 2);
956 }
957
958 #[test]
961 fn walk_all_entries_includes_dirs_and_files() {
962 let tmp = TempDir::new().unwrap();
963 let sub = tmp.path().join("subdir");
964 fs::create_dir_all(&sub).unwrap();
965 fs::write(sub.join("file.txt"), "").unwrap();
966 let entries = walk_all_entries(tmp.path());
967 assert!(entries.len() >= 2);
969 }
970
971 #[test]
974 fn pass_refs_detects_stale_references() {
975 let tmp = TempDir::new().unwrap();
976 fs::write(
977 tmp.path().join("README.md"),
978 "This is a legacy tool for testing.\n",
979 )
980 .unwrap();
981 let findings = pass_refs(tmp.path());
982 assert!(!findings.is_empty());
983 assert!(findings.iter().all(|f| f.pass_name == "refs"));
984 assert!(findings.iter().all(|f| f.fixable));
985 assert!(findings.iter().all(|f| f.severity == Severity::Warning));
986 }
987
988 #[test]
989 fn pass_refs_no_findings_for_clean_file() {
990 let tmp = TempDir::new().unwrap();
991 fs::write(tmp.path().join("README.md"), "This is a roboticus tool.\n").unwrap();
992 let findings = pass_refs(tmp.path());
993 assert!(findings.is_empty());
994 }
995
996 #[test]
997 fn pass_refs_detects_all_stale_terms() {
998 let tmp = TempDir::new().unwrap();
999 fs::write(
1001 tmp.path().join("notes.md"),
1002 "legacy\nopen_legacy\nLegacy\nolegacy\n",
1003 )
1004 .unwrap();
1005 let findings = pass_refs(tmp.path());
1006 assert!(findings.len() >= 4);
1008 }
1009
1010 #[test]
1011 fn pass_refs_reports_correct_line_numbers() {
1012 let tmp = TempDir::new().unwrap();
1013 fs::write(
1014 tmp.path().join("notes.md"),
1015 "clean line\nlegacy here\nclean\n",
1016 )
1017 .unwrap();
1018 let findings = pass_refs(tmp.path());
1019 assert_eq!(findings.len(), 1);
1020 assert_eq!(findings[0].line, Some(2));
1021 }
1022
1023 #[test]
1026 fn pass_artifacts_finds_log_files() {
1027 let tmp = TempDir::new().unwrap();
1028 let skills = tmp.path().join("skills");
1029 fs::create_dir_all(&skills).unwrap();
1030 fs::write(skills.join("output.log"), "log content").unwrap();
1031 let findings = pass_artifacts(tmp.path());
1032 assert_eq!(findings.len(), 1);
1033 assert_eq!(findings[0].severity, Severity::Info);
1034 assert!(findings[0].fixable);
1035 }
1036
1037 #[test]
1038 fn pass_artifacts_finds_bak_files() {
1039 let tmp = TempDir::new().unwrap();
1040 let skills = tmp.path().join("skills");
1041 fs::create_dir_all(&skills).unwrap();
1042 fs::write(skills.join("skill.bak"), "backup").unwrap();
1043 let findings = pass_artifacts(tmp.path());
1044 assert_eq!(findings.len(), 1);
1045 assert_eq!(findings[0].severity, Severity::Warning);
1046 assert!(findings[0].fixable);
1047 }
1048
1049 #[test]
1050 fn pass_artifacts_finds_pycache_dirs() {
1051 let tmp = TempDir::new().unwrap();
1052 let skills = tmp.path().join("skills");
1053 let pycache = skills.join("__pycache__");
1054 fs::create_dir_all(&pycache).unwrap();
1055 fs::write(pycache.join("cached.pyc"), "bytecode").unwrap();
1056 let findings = pass_artifacts(tmp.path());
1057 assert!(!findings.is_empty());
1058 assert!(findings.iter().any(|f| f.message.contains("__pycache__")));
1059 }
1060
1061 #[test]
1062 fn pass_artifacts_finds_empty_directories() {
1063 let tmp = TempDir::new().unwrap();
1064 let skills = tmp.path().join("skills");
1065 let empty_sub = skills.join("empty_skill");
1066 fs::create_dir_all(&empty_sub).unwrap();
1067 let findings = pass_artifacts(tmp.path());
1068 assert!(!findings.is_empty());
1069 assert!(findings.iter().any(|f| f.message.contains("empty")));
1070 }
1071
1072 #[test]
1073 fn pass_artifacts_returns_empty_when_no_skills_dir() {
1074 let tmp = TempDir::new().unwrap();
1075 let findings = pass_artifacts(tmp.path());
1076 assert!(findings.is_empty());
1077 }
1078
1079 #[test]
1082 fn pass_stale_detects_ghost_string_entries() {
1083 let tmp = TempDir::new().unwrap();
1084 let roboticus_dir = tmp.path().join(".roboticus");
1085 fs::create_dir_all(&roboticus_dir).unwrap();
1086 let state = serde_json::json!({
1087 "existing_file": "real_file.toml",
1088 "missing_file": "nonexistent/path/file.toml"
1089 });
1090 fs::write(
1091 roboticus_dir.join("update_state.json"),
1092 serde_json::to_string(&state).unwrap(),
1093 )
1094 .unwrap();
1095 fs::write(tmp.path().join("real_file.toml"), "content").unwrap();
1097 let findings = pass_stale(tmp.path());
1098 assert_eq!(findings.len(), 1);
1099 assert!(findings[0].message.contains("missing_file"));
1100 assert!(findings[0].fixable);
1101 }
1102
1103 #[test]
1104 fn pass_stale_returns_empty_when_no_state_file() {
1105 let tmp = TempDir::new().unwrap();
1106 let findings = pass_stale(tmp.path());
1107 assert!(findings.is_empty());
1108 }
1109
1110 #[test]
1111 fn pass_stale_returns_empty_when_all_entries_exist() {
1112 let tmp = TempDir::new().unwrap();
1113 let roboticus_dir = tmp.path().join(".roboticus");
1114 fs::create_dir_all(&roboticus_dir).unwrap();
1115 fs::write(tmp.path().join("roboticus.toml"), "content").unwrap();
1116 let state = serde_json::json!({
1117 "config": "roboticus.toml"
1118 });
1119 fs::write(
1120 roboticus_dir.join("update_state.json"),
1121 serde_json::to_string(&state).unwrap(),
1122 )
1123 .unwrap();
1124 let findings = pass_stale(tmp.path());
1125 assert!(findings.is_empty());
1126 }
1127
1128 #[test]
1129 fn pass_stale_detects_ghost_object_entries() {
1130 let tmp = TempDir::new().unwrap();
1131 let roboticus_dir = tmp.path().join(".roboticus");
1132 fs::create_dir_all(&roboticus_dir).unwrap();
1133 let state = serde_json::json!({
1134 "component": { "path": "missing_component.toml" }
1135 });
1136 fs::write(
1137 roboticus_dir.join("update_state.json"),
1138 serde_json::to_string(&state).unwrap(),
1139 )
1140 .unwrap();
1141 let findings = pass_stale(tmp.path());
1142 assert_eq!(findings.len(), 1);
1143 assert!(findings[0].message.contains("component"));
1144 assert_eq!(findings[0].severity, Severity::Warning);
1145 }
1146
1147 #[test]
1150 fn pass_scripts_returns_empty_when_no_skills_dir() {
1151 let tmp = TempDir::new().unwrap();
1152 let findings = pass_scripts(tmp.path());
1153 assert!(findings.is_empty());
1154 }
1155
1156 #[test]
1157 fn pass_scripts_detects_missing_shebang() {
1158 let tmp = TempDir::new().unwrap();
1159 let skills = tmp.path().join("skills");
1160 fs::create_dir_all(&skills).unwrap();
1161 fs::write(skills.join("bad_script.sh"), "echo hello\n").unwrap();
1162 let findings = pass_scripts(tmp.path());
1163 assert_eq!(findings.len(), 1);
1164 assert!(findings[0].message.contains("shebang"));
1165 assert_eq!(findings[0].severity, Severity::Warning);
1166 assert!(!findings[0].fixable);
1167 }
1168
1169 #[test]
1170 fn pass_scripts_accepts_valid_shebang() {
1171 let tmp = TempDir::new().unwrap();
1172 let skills = tmp.path().join("skills");
1173 fs::create_dir_all(&skills).unwrap();
1174 fs::write(skills.join("good_script.sh"), "#!/bin/bash\necho hello\n").unwrap();
1175 let findings = pass_scripts(tmp.path());
1176 assert!(findings.is_empty());
1177 }
1178
1179 #[test]
1180 fn pass_scripts_detects_hardcoded_legacy_path() {
1181 let tmp = TempDir::new().unwrap();
1182 let skills = tmp.path().join("skills");
1183 fs::create_dir_all(&skills).unwrap();
1184 fs::write(
1185 skills.join("broken.sh"),
1186 "#!/bin/bash\n/usr/local/bin/legacy run\n",
1187 )
1188 .unwrap();
1189 let findings = pass_scripts(tmp.path());
1190 assert!(findings.iter().any(|f| f.severity == Severity::Error));
1192 assert!(
1193 findings
1194 .iter()
1195 .any(|f| f.message.contains("hardcoded path"))
1196 );
1197 }
1198
1199 #[test]
1200 fn pass_scripts_handles_empty_script_file() {
1201 let tmp = TempDir::new().unwrap();
1202 let skills = tmp.path().join("skills");
1203 fs::create_dir_all(&skills).unwrap();
1204 fs::write(skills.join("empty.sh"), "").unwrap();
1205 let findings = pass_scripts(tmp.path());
1206 assert_eq!(findings.len(), 1);
1207 assert!(findings[0].message.contains("shebang"));
1208 }
1209
1210 #[test]
1213 fn pass_drift_returns_empty_when_no_config() {
1214 let tmp = TempDir::new().unwrap();
1215 let findings = pass_drift(tmp.path());
1216 assert!(findings.is_empty());
1217 }
1218
1219 #[test]
1220 fn pass_drift_detects_port_mismatch() {
1221 let tmp = TempDir::new().unwrap();
1222 fs::write(
1223 tmp.path().join("roboticus.toml"),
1224 "[server]\nport = 18789\n",
1225 )
1226 .unwrap();
1227 fs::write(
1228 tmp.path().join("README.md"),
1229 "Connect to localhost:9000 for the API.\n",
1230 )
1231 .unwrap();
1232 let findings = pass_drift(tmp.path());
1233 assert!(!findings.is_empty());
1234 assert!(findings.iter().any(|f| f.message.contains("port")));
1235 }
1236
1237 #[test]
1238 fn pass_drift_no_findings_when_port_matches() {
1239 let tmp = TempDir::new().unwrap();
1240 fs::write(
1241 tmp.path().join("roboticus.toml"),
1242 "[server]\nport = 18789\n",
1243 )
1244 .unwrap();
1245 fs::write(
1246 tmp.path().join("README.md"),
1247 "Connect to localhost:18789 for the API.\n",
1248 )
1249 .unwrap();
1250 let findings = pass_drift(tmp.path());
1251 assert!(findings.is_empty());
1252 }
1253
1254 #[test]
1257 fn pass_identity_returns_empty_when_no_files() {
1258 let tmp = TempDir::new().unwrap();
1259 let findings = pass_identity(tmp.path());
1260 assert!(findings.is_empty());
1261 }
1262
1263 #[test]
1264 fn pass_identity_detects_old_brand_in_toml() {
1265 let tmp = TempDir::new().unwrap();
1266 fs::write(
1267 tmp.path().join("config.toml"),
1268 "[meta]\ngenerated_by = \"legacy\"\n",
1269 )
1270 .unwrap();
1271 let findings = pass_identity(tmp.path());
1272 assert!(!findings.is_empty());
1273 assert!(findings.iter().all(|f| f.pass_name == "identity"));
1274 assert!(findings.iter().all(|f| f.fixable));
1275 }
1276
1277 #[test]
1278 fn pass_identity_returns_empty_for_clean_brand() {
1279 let tmp = TempDir::new().unwrap();
1280 fs::write(
1281 tmp.path().join("config.toml"),
1282 "[meta]\ngenerated_by = \"roboticus\"\n",
1283 )
1284 .unwrap();
1285 let findings = pass_identity(tmp.path());
1286 assert!(findings.is_empty());
1287 }
1288
1289 #[test]
1292 fn defrag_summary_serializes_correctly() {
1293 let summary = DefragSummary {
1294 total_findings: 5,
1295 fixable_count: 3,
1296 by_severity: SeverityCounts {
1297 info: 2,
1298 warning: 2,
1299 error: 1,
1300 },
1301 by_pass: vec![PassSummary {
1302 name: "refs".to_string(),
1303 findings: 5,
1304 status: PassStatus::Findings,
1305 }],
1306 };
1307 let json = serde_json::to_string(&summary).unwrap();
1308 let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
1309 assert_eq!(parsed["total_findings"], 5);
1310 assert_eq!(parsed["fixable_count"], 3);
1311 }
1312
1313 #[test]
1316 fn cmd_defrag_runs_on_empty_workspace() {
1317 let tmp = TempDir::new().unwrap();
1318 let result = cmd_defrag(tmp.path(), false, false, false);
1319 assert!(result.is_ok());
1320 }
1321
1322 #[test]
1323 fn cmd_defrag_json_output_on_empty_workspace() {
1324 let tmp = TempDir::new().unwrap();
1325 let result = cmd_defrag(tmp.path(), false, false, true);
1326 assert!(result.is_ok());
1327 }
1328
1329 #[test]
1330 fn cmd_defrag_no_fix_when_no_findings() {
1331 let tmp = TempDir::new().unwrap();
1332 let result = cmd_defrag(tmp.path(), true, true, false);
1334 assert!(result.is_ok());
1335 }
1336}