1#![allow(non_snake_case, unused_variables)]
2
3use std::collections::HashMap;
4use std::fs;
5use std::io::Write;
6use std::path::{Path, PathBuf};
7
8use regex::Regex;
9use serde::{Deserialize, Serialize};
10
11use super::{CRT_DRAW_MS, colors, icons, theme};
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct DefragFinding {
17 pub file: PathBuf,
18 pub line: Option<usize>,
19 pub severity: Severity,
20 pub message: String,
21 pub fix_description: Option<String>,
22 pub fixable: bool,
23 pub pass_name: String,
24}
25
26#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
27#[serde(rename_all = "lowercase")]
28pub enum Severity {
29 Info,
30 Warning,
31 Error,
32}
33
34#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct DefragSummary {
36 pub total_findings: usize,
37 pub fixable_count: usize,
38 pub by_severity: SeverityCounts,
39 pub by_pass: Vec<PassSummary>,
40}
41
42#[derive(Debug, Clone, Default, Serialize, Deserialize)]
43pub struct SeverityCounts {
44 pub info: usize,
45 pub warning: usize,
46 pub error: usize,
47}
48
49#[derive(Debug, Clone, Serialize, Deserialize)]
50pub struct PassSummary {
51 pub name: String,
52 pub findings: usize,
53 pub status: PassStatus,
54}
55
56#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
57pub enum PassStatus {
58 Clean,
59 Findings,
60 Error,
61}
62
63fn migration_map() -> HashMap<&'static str, &'static str> {
66 let mut m = HashMap::new();
67 m.insert("legacy", "roboticus");
68 m.insert("open_legacy", "roboticus");
69 m.insert("Legacy", "Roboticus");
70 m.insert("olegacy", "roboticus");
71 m
72}
73
74fn walk_files(dir: &Path, extensions: &[&str]) -> Vec<PathBuf> {
75 let mut result = Vec::new();
76 walk_files_inner(dir, extensions, &mut result);
77 result
78}
79
80fn walk_files_inner(dir: &Path, extensions: &[&str], out: &mut Vec<PathBuf>) {
81 let entries = match fs::read_dir(dir) {
82 Ok(e) => e,
83 Err(_) => return,
84 };
85 for entry in entries.flatten() {
86 let path = entry.path();
87 if path.is_dir() {
88 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
90 if name.starts_with('.') || name == "target" || name == "node_modules" {
91 continue;
92 }
93 walk_files_inner(&path, extensions, out);
94 } else if path.is_file() {
95 if extensions.is_empty() {
96 out.push(path);
97 } else if let Some(ext) = path.extension().and_then(|e| e.to_str())
98 && extensions.contains(&ext)
99 {
100 out.push(path);
101 }
102 }
103 }
104}
105
106fn walk_all_entries(dir: &Path) -> Vec<PathBuf> {
107 let mut result = Vec::new();
108 walk_all_entries_inner(dir, &mut result);
109 result
110}
111
112fn walk_all_entries_inner(dir: &Path, out: &mut Vec<PathBuf>) {
113 let entries = match fs::read_dir(dir) {
114 Ok(e) => e,
115 Err(_) => return,
116 };
117 for entry in entries.flatten() {
118 let path = entry.path();
119 out.push(path.clone());
120 if path.is_dir() {
121 walk_all_entries_inner(&path, out);
122 }
123 }
124}
125
126fn is_dir_empty(path: &Path) -> bool {
127 match fs::read_dir(path) {
128 Ok(mut entries) => entries.next().is_none(),
129 Err(_) => false,
130 }
131}
132
133pub fn pass_refs(workspace: &Path) -> Vec<DefragFinding> {
136 let mut findings = Vec::new();
137 let map = migration_map();
138 let extensions = &["md", "sh", "py", "js", "toml", "json"];
139 let files = walk_files(workspace, extensions);
140
141 let pattern = map.keys().copied().collect::<Vec<_>>().join("|");
143 let re = match Regex::new(&pattern) {
144 Ok(r) => r,
145 Err(_) => return findings,
146 };
147
148 for file in files {
149 let content = match fs::read_to_string(&file) {
150 Ok(c) => c,
151 Err(_) => continue,
152 };
153 for (line_num, line) in content.lines().enumerate() {
154 for m in re.find_iter(line) {
155 let old = m.as_str();
156 let new = map.get(old).copied().unwrap_or("roboticus");
157 findings.push(DefragFinding {
158 file: file.clone(),
159 line: Some(line_num + 1),
160 severity: Severity::Warning,
161 message: format!("stale reference '{old}' should be '{new}'"),
162 fix_description: Some(format!("replace '{old}' with '{new}'")),
163 fixable: true,
164 pass_name: "refs".to_string(),
165 });
166 }
167 }
168 }
169 findings
170}
171
172pub fn pass_drift(workspace: &Path) -> Vec<DefragFinding> {
175 let mut findings = Vec::new();
176 let config_path = workspace.join("roboticus.toml");
177 let config_content = match fs::read_to_string(&config_path) {
178 Ok(c) => c,
179 Err(_) => return findings, };
181
182 let config: toml::Value = match config_content.parse() {
183 Ok(v) => v,
184 Err(_) => return findings,
185 };
186
187 let port = config
189 .get("server")
190 .and_then(|s| s.get("port"))
191 .and_then(|p| p.as_integer())
192 .map(|p| p.to_string());
193 let bind = config
194 .get("server")
195 .and_then(|s| s.get("bind"))
196 .and_then(|b| b.as_str())
197 .map(|s| s.to_string());
198 let agent_name = config
199 .get("agent")
200 .and_then(|a| a.get("name"))
201 .and_then(|n| n.as_str())
202 .map(|s| s.to_string());
203
204 let port_re = Regex::new(r"(?:port\s*[=:]\s*|localhost:)(\d{4,5})").ok();
205 let bind_re = Regex::new(r"(?:bind\s*[=:]\s*)([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)").ok();
206 let name_re = Regex::new(r#"(?:agent[_\s-]?name\s*[=:"]\s*)(\w+)"#).ok();
207
208 let md_files = walk_files(workspace, &["md"]);
209 for file in md_files {
210 let content = match fs::read_to_string(&file) {
211 Ok(c) => c,
212 Err(_) => continue,
213 };
214 if let Some(ref cfg_port) = port
216 && let Some(ref port_re) = port_re
217 {
218 for (line_num, line) in content.lines().enumerate() {
219 for cap in port_re.captures_iter(line) {
220 if let Some(found_port) = cap.get(1) {
221 let found = found_port.as_str();
222 if found != cfg_port.as_str() {
223 findings.push(DefragFinding {
224 file: file.clone(),
225 line: Some(line_num + 1),
226 severity: Severity::Info,
227 message: format!(
228 "references port {found} but config uses {cfg_port}"
229 ),
230 fix_description: None,
231 fixable: false,
232 pass_name: "drift".to_string(),
233 });
234 }
235 }
236 }
237 }
238 }
239 if let Some(ref cfg_bind) = bind
241 && let Some(ref bind_re) = bind_re
242 {
243 for (line_num, line) in content.lines().enumerate() {
244 for cap in bind_re.captures_iter(line) {
245 if let Some(found_bind) = cap.get(1) {
246 let found = found_bind.as_str();
247 if found != cfg_bind.as_str() {
248 findings.push(DefragFinding {
249 file: file.clone(),
250 line: Some(line_num + 1),
251 severity: Severity::Info,
252 message: format!(
253 "references bind address {found} but config uses {cfg_bind}"
254 ),
255 fix_description: None,
256 fixable: false,
257 pass_name: "drift".to_string(),
258 });
259 }
260 }
261 }
262 }
263 }
264 if let Some(ref cfg_name) = agent_name
266 && let Some(ref name_re) = name_re
267 {
268 for (line_num, line) in content.lines().enumerate() {
269 for cap in name_re.captures_iter(line) {
270 if let Some(found_name) = cap.get(1) {
271 let found = found_name.as_str();
272 if found != cfg_name.as_str() {
273 findings.push(DefragFinding {
274 file: file.clone(),
275 line: Some(line_num + 1),
276 severity: Severity::Info,
277 message: format!(
278 "references agent name '{found}' but config uses '{cfg_name}'"
279 ),
280 fix_description: None,
281 fixable: false,
282 pass_name: "drift".to_string(),
283 });
284 }
285 }
286 }
287 }
288 }
289 }
290 findings
291}
292
293pub fn pass_artifacts(workspace: &Path) -> Vec<DefragFinding> {
296 let mut findings = Vec::new();
297 let skills_dir = workspace.join("skills");
298 if !skills_dir.is_dir() {
299 return findings;
300 }
301
302 let entries = walk_all_entries(&skills_dir);
303 for path in entries {
304 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
305 let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
306
307 if ext == "log" && path.is_file() {
308 findings.push(DefragFinding {
309 file: path,
310 line: None,
311 severity: Severity::Info,
312 message: "stale log file in skills directory".to_string(),
313 fix_description: Some("delete log file".to_string()),
314 fixable: true,
315 pass_name: "artifacts".to_string(),
316 });
317 } else if name == "__pycache__" && path.is_dir() {
318 findings.push(DefragFinding {
319 file: path,
320 line: None,
321 severity: Severity::Info,
322 message: "__pycache__ directory in skills".to_string(),
323 fix_description: Some("delete __pycache__ directory".to_string()),
324 fixable: true,
325 pass_name: "artifacts".to_string(),
326 });
327 } else if ext == "bak" && path.is_file() {
328 findings.push(DefragFinding {
329 file: path,
330 line: None,
331 severity: Severity::Warning,
332 message: "backup file in skills directory".to_string(),
333 fix_description: Some("delete backup file".to_string()),
334 fixable: true,
335 pass_name: "artifacts".to_string(),
336 });
337 } else if path.is_dir() && is_dir_empty(&path) {
338 findings.push(DefragFinding {
339 file: path,
340 line: None,
341 severity: Severity::Info,
342 message: "empty directory in skills".to_string(),
343 fix_description: Some("remove empty directory".to_string()),
344 fixable: true,
345 pass_name: "artifacts".to_string(),
346 });
347 }
348 }
349 findings
350}
351
352pub fn pass_stale(workspace: &Path) -> Vec<DefragFinding> {
355 let mut findings = Vec::new();
356 let state_path = workspace.join(".roboticus").join("update_state.json");
357 let content = match fs::read_to_string(&state_path) {
358 Ok(c) => c,
359 Err(_) => return findings,
360 };
361
362 let state: serde_json::Value = match serde_json::from_str(&content) {
363 Ok(v) => v,
364 Err(_) => return findings,
365 };
366
367 if let Some(obj) = state.as_object() {
369 for (key, value) in obj {
370 if let Some(path_str) = value.as_str() {
371 let target = workspace.join(path_str);
372 if !target.exists() {
373 findings.push(DefragFinding {
374 file: state_path.clone(),
375 line: None,
376 severity: Severity::Warning,
377 message: format!("ghost entry '{key}' references missing file: {path_str}"),
378 fix_description: Some(format!(
379 "remove entry '{key}' from update_state.json"
380 )),
381 fixable: true,
382 pass_name: "stale".to_string(),
383 });
384 }
385 }
386 if let Some(inner_obj) = value.as_object()
388 && let Some(path_str) = inner_obj.get("path").and_then(|p| p.as_str())
389 {
390 let target = workspace.join(path_str);
391 if !target.exists() {
392 findings.push(DefragFinding {
393 file: state_path.clone(),
394 line: None,
395 severity: Severity::Warning,
396 message: format!("ghost entry '{key}' references missing file: {path_str}"),
397 fix_description: Some(format!(
398 "remove entry '{key}' from update_state.json"
399 )),
400 fixable: true,
401 pass_name: "stale".to_string(),
402 });
403 }
404 }
405 }
406 }
407 findings
408}
409
410pub fn pass_identity(workspace: &Path) -> Vec<DefragFinding> {
413 let mut findings = Vec::new();
414 let map = migration_map();
415 let files = walk_files(workspace, &["toml", "json"]);
416
417 let old_brands: Vec<&str> = map.keys().copied().collect();
418 let brand_pattern = old_brands.join("|");
419 let re = match Regex::new(&format!(
420 r#"(?:generated_by|brand)\s*[=:]\s*["']?({brand_pattern})"#
421 )) {
422 Ok(r) => r,
423 Err(_) => return findings,
424 };
425
426 for file in files {
427 let content = match fs::read_to_string(&file) {
428 Ok(c) => c,
429 Err(_) => continue,
430 };
431 for (line_num, line) in content.lines().enumerate() {
432 for cap in re.captures_iter(line) {
433 if let Some(m) = cap.get(1) {
434 let old = m.as_str();
435 let new = map.get(old).copied().unwrap_or("roboticus");
436 findings.push(DefragFinding {
437 file: file.clone(),
438 line: Some(line_num + 1),
439 severity: Severity::Warning,
440 message: format!("brand identity field references '{old}'"),
441 fix_description: Some(format!("replace '{old}' with '{new}'")),
442 fixable: true,
443 pass_name: "identity".to_string(),
444 });
445 }
446 }
447 }
448 }
449 findings
450}
451
452pub fn pass_scripts(workspace: &Path) -> Vec<DefragFinding> {
455 let mut findings = Vec::new();
456 let skills_dir = workspace.join("skills");
457 if !skills_dir.is_dir() {
458 return findings;
459 }
460
461 let script_files = walk_files(&skills_dir, &["sh", "py", "rb", "pl"]);
462 let shebang_re = Regex::new(r"^#!.+").unwrap();
463 let hardcoded_re = Regex::new(r"/usr/local/bin/legacy").unwrap();
464
465 for file in script_files {
466 let content = match fs::read_to_string(&file) {
467 Ok(c) => c,
468 Err(_) => continue,
469 };
470 let lines: Vec<&str> = content.lines().collect();
471
472 if lines.is_empty() || !shebang_re.is_match(lines[0]) {
474 findings.push(DefragFinding {
475 file: file.clone(),
476 line: Some(1),
477 severity: Severity::Warning,
478 message: "script missing valid shebang line".to_string(),
479 fix_description: None,
480 fixable: false,
481 pass_name: "scripts".to_string(),
482 });
483 }
484
485 for (line_num, line) in lines.iter().enumerate() {
487 if hardcoded_re.is_match(line) {
488 findings.push(DefragFinding {
489 file: file.clone(),
490 line: Some(line_num + 1),
491 severity: Severity::Error,
492 message: "hardcoded path '/usr/local/bin/legacy'".to_string(),
493 fix_description: None,
494 fixable: false,
495 pass_name: "scripts".to_string(),
496 });
497 }
498 }
499 }
500 findings
501}
502
503fn apply_fixes(workspace: &Path, findings: &[DefragFinding]) -> usize {
506 let mut fixed = 0;
507
508 let refs_findings: Vec<&DefragFinding> = findings
510 .iter()
511 .filter(|f| f.fixable && f.pass_name == "refs")
512 .collect();
513 let artifact_findings: Vec<&DefragFinding> = findings
514 .iter()
515 .filter(|f| f.fixable && f.pass_name == "artifacts")
516 .collect();
517 let stale_findings: Vec<&DefragFinding> = findings
518 .iter()
519 .filter(|f| f.fixable && f.pass_name == "stale")
520 .collect();
521 let identity_findings: Vec<&DefragFinding> = findings
522 .iter()
523 .filter(|f| f.fixable && f.pass_name == "identity")
524 .collect();
525
526 if !refs_findings.is_empty() {
528 let map = migration_map();
529 let mut patched_files: HashMap<PathBuf, String> = HashMap::new();
530 for f in &refs_findings {
531 patched_files.entry(f.file.clone()).or_insert_with(|| {
532 match fs::read_to_string(&f.file) {
533 Ok(content) => content,
534 Err(e) => {
535 tracing::warn!(error = %e, path = %f.file.display(), "failed to read file for defrag, skipping");
536 String::new()
537 }
538 }
539 });
540 }
541 patched_files.retain(|_, content| !content.is_empty());
543 for (path, content) in &mut patched_files {
544 let mut updated = content.clone();
545 for (old, new) in &map {
546 updated = updated.replace(old, new);
547 }
548 if updated != *content && fs::write(path, &updated).is_ok() {
549 fixed += 1;
550 }
551 }
552 }
553
554 for f in &artifact_findings {
556 let ok = if f.file.is_dir() {
557 fs::remove_dir_all(&f.file).is_ok()
558 } else {
559 fs::remove_file(&f.file).is_ok()
560 };
561 if ok {
562 fixed += 1;
563 }
564 }
565
566 if !stale_findings.is_empty() {
568 let state_path = workspace.join(".roboticus").join("update_state.json");
569 if let Ok(content) = fs::read_to_string(&state_path)
570 && let Ok(mut state) = serde_json::from_str::<serde_json::Value>(&content)
571 && let Some(obj) = state.as_object_mut()
572 {
573 for f in &stale_findings {
574 if let Some(key) = f
576 .message
577 .strip_prefix("ghost entry '")
578 .and_then(|s| s.split('\'').next())
579 {
580 obj.remove(key);
581 }
582 }
583 if let Ok(json) = serde_json::to_string_pretty(&state)
584 && fs::write(&state_path, json).is_ok()
585 {
586 fixed += stale_findings.len();
587 }
588 }
589 }
590
591 if !identity_findings.is_empty() {
593 let map = migration_map();
594 let mut patched_files: HashMap<PathBuf, String> = HashMap::new();
595 for f in &identity_findings {
596 patched_files
597 .entry(f.file.clone())
598 .or_insert_with(|| fs::read_to_string(&f.file).inspect_err(|e| tracing::warn!(error = %e, path = %f.file.display(), "failed to read file for defrag")).unwrap_or_default());
599 }
600 for (path, content) in &mut patched_files {
601 let mut updated = content.clone();
602 for (old, new) in &map {
603 updated = updated.replace(old, new);
604 }
605 if updated != *content && fs::write(path, &updated).is_ok() {
606 fixed += 1;
607 }
608 }
609 }
610
611 fixed
612}
613
614pub fn cmd_defrag(
617 workspace: &Path,
618 fix: bool,
619 yes: bool,
620 json_output: bool,
621) -> roboticus_core::Result<()> {
622 let (DIM, BOLD, ACCENT, GREEN, YELLOW, RED, CYAN, RESET, MONO) = colors();
623 let (OK, ACTION, WARN, DETAIL, ERR) = icons();
624
625 let pass_names = ["refs", "drift", "artifacts", "stale", "identity", "scripts"];
627 let pass_results: Vec<(&str, Vec<DefragFinding>)> = vec![
628 ("refs", pass_refs(workspace)),
629 ("drift", pass_drift(workspace)),
630 ("artifacts", pass_artifacts(workspace)),
631 ("stale", pass_stale(workspace)),
632 ("identity", pass_identity(workspace)),
633 ("scripts", pass_scripts(workspace)),
634 ];
635
636 let mut all_findings: Vec<DefragFinding> = Vec::new();
638 let mut by_pass: Vec<PassSummary> = Vec::new();
639 let mut severity_counts = SeverityCounts::default();
640
641 for (name, findings) in &pass_results {
642 let count = findings.len();
643 let status = if count == 0 {
644 PassStatus::Clean
645 } else {
646 PassStatus::Findings
647 };
648 by_pass.push(PassSummary {
649 name: name.to_string(),
650 findings: count,
651 status,
652 });
653 for f in findings {
654 match f.severity {
655 Severity::Info => severity_counts.info += 1,
656 Severity::Warning => severity_counts.warning += 1,
657 Severity::Error => severity_counts.error += 1,
658 }
659 }
660 all_findings.extend(findings.iter().cloned());
661 }
662
663 let fixable_count = all_findings.iter().filter(|f| f.fixable).count();
664 let total_findings = all_findings.len();
665
666 let summary = DefragSummary {
667 total_findings,
668 fixable_count,
669 by_severity: severity_counts.clone(),
670 by_pass: by_pass.clone(),
671 };
672
673 if json_output {
674 let output = serde_json::json!({
675 "findings": all_findings,
676 "summary": summary,
677 });
678 let json_str = serde_json::to_string_pretty(&output).unwrap_or_else(|_| "{}".to_string());
679 std::io::stdout()
681 .write_all(json_str.as_bytes())
682 .and_then(|_| std::io::stdout().write_all(b"\n"))
683 .and_then(|_| std::io::stdout().flush())?;
684 return Ok(());
685 }
686
687 eprintln!();
689 eprintln!(" {BOLD}Workspace Defrag{RESET}");
690 eprintln!(" {DIM}{}{RESET}", "\u{2500}".repeat(40));
691 eprintln!();
692
693 for ps in &by_pass {
694 let dots = ".".repeat(20usize.saturating_sub(ps.name.len()));
695 let status_str = if ps.findings == 0 {
696 format!("{GREEN}clean{RESET}")
697 } else {
698 let plural = if ps.findings == 1 {
699 "finding"
700 } else {
701 "findings"
702 };
703 format!("{YELLOW}{} {plural}{RESET}", ps.findings)
704 };
705 eprintln!(
706 " {DIM}\u{25a0}{RESET} {BOLD}{}{RESET} {DIM}{dots}{RESET} {status_str}",
707 ps.name
708 );
709 }
710
711 eprintln!();
712 let fixable_str = if fixable_count > 0 {
713 format!(" ({fixable_count} fixable)")
714 } else {
715 String::new()
716 };
717 eprintln!(" {BOLD}Summary:{RESET} {total_findings} findings{fixable_str}");
718 eprintln!(
719 " {CYAN}info:{RESET} {} {DIM}|{RESET} {YELLOW}warning:{RESET} {} {DIM}|{RESET} {RED}error:{RESET} {}",
720 severity_counts.info, severity_counts.warning, severity_counts.error
721 );
722
723 if !all_findings.is_empty() {
725 eprintln!();
726 for f in &all_findings {
727 let sev_color = match f.severity {
728 Severity::Info => CYAN,
729 Severity::Warning => YELLOW,
730 Severity::Error => RED,
731 };
732 let sev_label = match f.severity {
733 Severity::Info => "info",
734 Severity::Warning => "warn",
735 Severity::Error => "error",
736 };
737 let loc = match f.line {
738 Some(l) => format!("{}:{l}", f.file.display()),
739 None => format!("{}", f.file.display()),
740 };
741 eprintln!(
742 " {sev_color}[{sev_label}]{RESET} {DIM}{}{RESET} {loc}",
743 f.pass_name
744 );
745 eprintln!(" {}", f.message);
746 }
747 }
748
749 if fix && fixable_count > 0 {
751 let proceed = if yes {
752 true
753 } else {
754 eprint!("\n Apply {fixable_count} fixable findings? [y/N] ");
755 std::io::stderr().flush().ok();
756 let mut input = String::new();
757 std::io::stdin().read_line(&mut input).unwrap_or(0);
758 matches!(input.trim(), "y" | "Y" | "yes" | "Yes" | "YES")
759 };
760
761 if proceed {
762 let fixed = apply_fixes(workspace, &all_findings);
763 eprintln!();
764 eprintln!(" {OK} {GREEN}Applied fixes ({fixed} items){RESET}");
765 } else {
766 eprintln!();
767 eprintln!(" {DIM}No changes made.{RESET}");
768 }
769 } else if fix && fixable_count == 0 {
770 eprintln!();
771 eprintln!(" {OK} {GREEN}Nothing to fix{RESET}");
772 }
773
774 eprintln!();
775 Ok(())
776}