1use crate::{analyzer::analyze_monorepo, generator};
2
3pub fn handle_generate(
4 path: std::path::PathBuf,
5 _output: Option<std::path::PathBuf>,
6 dockerfile: bool,
7 compose: bool,
8 terraform: bool,
9 all: bool,
10 dry_run: bool,
11 _force: bool,
12) -> crate::Result<()> {
13 println!("π Analyzing project for generation: {}", path.display());
14
15 let monorepo_analysis = analyze_monorepo(&path)?;
16
17 println!("β
Analysis complete. Generating IaC files...");
18
19 if monorepo_analysis.is_monorepo {
20 println!(
21 "π¦ Detected monorepo with {} projects",
22 monorepo_analysis.projects.len()
23 );
24 println!(
25 "π§ Monorepo IaC generation is coming soon! For now, generating for the overall structure."
26 );
27 println!(
28 "π‘ Tip: You can run generate commands on individual project directories for now."
29 );
30 }
31
32 let main_project = &monorepo_analysis.projects[0];
35
36 let generate_all = all || (!dockerfile && !compose && !terraform);
37
38 if generate_all || dockerfile {
39 println!("\nπ³ Generating Dockerfile...");
40 let dockerfile_content = generator::generate_dockerfile(&main_project.analysis)?;
41
42 if dry_run {
43 println!("--- Dockerfile (dry run) ---");
44 println!("{}", dockerfile_content);
45 } else {
46 std::fs::write("Dockerfile", dockerfile_content)?;
47 println!("β
Dockerfile generated successfully!");
48 }
49 }
50
51 if generate_all || compose {
52 println!("\nπ Generating Docker Compose file...");
53 let compose_content = generator::generate_compose(&main_project.analysis)?;
54
55 if dry_run {
56 println!("--- docker-compose.yml (dry run) ---");
57 println!("{}", compose_content);
58 } else {
59 std::fs::write("docker-compose.yml", compose_content)?;
60 println!("β
Docker Compose file generated successfully!");
61 }
62 }
63
64 if generate_all || terraform {
65 println!("\nποΈ Generating Terraform configuration...");
66 let terraform_content = generator::generate_terraform(&main_project.analysis)?;
67
68 if dry_run {
69 println!("--- main.tf (dry run) ---");
70 println!("{}", terraform_content);
71 } else {
72 std::fs::write("main.tf", terraform_content)?;
73 println!("β
Terraform configuration generated successfully!");
74 }
75 }
76
77 if !dry_run {
78 println!("\nπ Generation complete! IaC files have been created in the current directory.");
79
80 if monorepo_analysis.is_monorepo {
81 println!("π§ Note: Generated files are based on the main project structure.");
82 println!(" Advanced monorepo support with per-project generation is coming soon!");
83 }
84 }
85
86 Ok(())
87}
88
89pub fn handle_validate(
90 path: std::path::PathBuf,
91 types: Option<Vec<String>>,
92 fix: bool,
93 quiet: bool,
94) -> crate::Result<String> {
95 use crate::analyzer::{dclint, hadolint, helmlint, kubelint};
96 use std::path::Path;
97
98 let project_path = path.canonicalize().unwrap_or_else(|_| path.clone());
99
100 if !quiet {
101 println!("π Validating IaC files in: {}", project_path.display());
102 }
103
104 let type_filter: Option<Vec<String>> = types.map(|t| {
105 t.iter()
106 .flat_map(|s| s.split(','))
107 .map(|s| s.trim().to_lowercase())
108 .collect()
109 });
110 let check_all = type_filter.is_none();
111 let should_check = |name: &str| {
112 check_all
113 || type_filter
114 .as_ref()
115 .map_or(false, |f| f.iter().any(|t| t == name))
116 };
117
118 let mut all_results: Vec<serde_json::Value> = Vec::new();
119 let mut total_errors = 0usize;
120 let mut total_warnings = 0usize;
121 let mut total_info = 0usize;
122 let mut files_checked = 0usize;
123
124 if should_check("dockerfile") {
126 let dockerfiles = find_dockerfiles(&project_path);
127 if !dockerfiles.is_empty() {
128 if !quiet {
129 println!("\nπ³ Checking {} Dockerfile(s)...", dockerfiles.len());
130 }
131 let config = hadolint::HadolintConfig::default();
132 for df in &dockerfiles {
133 let result = hadolint::lint_file(df, &config);
134 files_checked += 1;
135 let rel = df.strip_prefix(&project_path).unwrap_or(df);
136 let (e, w, i) = count_severities_hadolint(&result);
137 total_errors += e;
138 total_warnings += w;
139 total_info += i;
140 if !quiet && result.has_failures() {
141 println!(" {} β {} error(s), {} warning(s)", rel.display(), e, w);
142 }
143 for f in &result.failures {
144 all_results.push(serde_json::json!({
145 "type": "dockerfile",
146 "file": rel.display().to_string(),
147 "line": f.line,
148 "code": f.code.to_string(),
149 "severity": format!("{:?}", f.severity),
150 "message": f.message,
151 }));
152 }
153 }
154 }
155 }
156
157 if should_check("compose") {
159 let compose_files = find_compose_files(&project_path);
160 if !compose_files.is_empty() {
161 if !quiet {
162 println!("\nπ Checking {} Compose file(s)...", compose_files.len());
163 }
164 let config = dclint::DclintConfig::default();
165 for cf in &compose_files {
166 let result = dclint::lint_file(cf, &config);
167 files_checked += 1;
168 let rel = cf.strip_prefix(&project_path).unwrap_or(cf);
169 let (e, w, i) = count_severities_dclint(&result);
170 total_errors += e;
171 total_warnings += w;
172 total_info += i;
173 if !quiet && result.has_failures() {
174 println!(" {} β {} error(s), {} warning(s)", rel.display(), e, w);
175 }
176 for f in &result.failures {
177 all_results.push(serde_json::json!({
178 "type": "compose",
179 "file": rel.display().to_string(),
180 "line": f.line,
181 "code": f.code.to_string(),
182 "severity": format!("{:?}", f.severity),
183 "message": f.message,
184 }));
185 }
186
187 if fix {
189 if let Ok(Some(fixed)) = dclint::fix_file(cf, &config, false) {
190 if !quiet {
191 println!(" β
Auto-fixed {}", rel.display());
192 }
193 let _ = fixed; }
195 }
196 }
197 }
198 }
199
200 if should_check("kubernetes") || should_check("k8s") {
202 let k8s_dirs = find_k8s_dirs(&project_path);
203 if !k8s_dirs.is_empty() {
204 if !quiet {
205 println!(
206 "\nβΈοΈ Checking {} K8s manifest location(s)...",
207 k8s_dirs.len()
208 );
209 }
210 let config = kubelint::KubelintConfig::default();
211 for dir in &k8s_dirs {
212 let result = kubelint::lint(dir, &config);
213 let rel = dir.strip_prefix(&project_path).unwrap_or(dir);
214 files_checked += result.summary.objects_analyzed;
215 let (e, w, i) = count_severities_kubelint(&result);
216 total_errors += e;
217 total_warnings += w;
218 total_info += i;
219 if !quiet && result.has_failures() {
220 println!(" {} β {} error(s), {} warning(s)", rel.display(), e, w);
221 }
222 for f in &result.failures {
223 all_results.push(serde_json::json!({
224 "type": "kubernetes",
225 "object": format!("{}/{}", f.object_kind, f.object_name),
226 "file": f.file_path.display().to_string(),
227 "code": f.code.to_string(),
228 "severity": format!("{:?}", f.severity),
229 "message": f.message,
230 "remediation": f.remediation,
231 }));
232 }
233 }
234 }
235 }
236
237 if should_check("helm") {
239 let helm_charts = find_helm_charts_validate(&project_path);
240 if !helm_charts.is_empty() {
241 if !quiet {
242 println!("\nβ Checking {} Helm chart(s)...", helm_charts.len());
243 }
244 let config = helmlint::HelmlintConfig::default();
245 for chart in &helm_charts {
246 let result = helmlint::lint_chart(chart, &config);
247 files_checked += result.files_checked;
248 let rel = chart.strip_prefix(&project_path).unwrap_or(chart);
249 let (e, w, i) = count_severities_helmlint(&result);
250 total_errors += e;
251 total_warnings += w;
252 total_info += i;
253 if !quiet && result.has_failures() {
254 println!(" {} β {} error(s), {} warning(s)", rel.display(), e, w);
255 }
256 for f in &result.failures {
257 all_results.push(serde_json::json!({
258 "type": "helm",
259 "file": f.file.display().to_string(),
260 "line": f.line,
261 "code": f.code.to_string(),
262 "severity": format!("{:?}", f.severity),
263 "message": f.message,
264 }));
265 }
266 }
267 }
268 }
269
270 if files_checked == 0 {
271 if !quiet {
272 println!("\nβ οΈ No IaC files found to validate.");
273 }
274 let output = serde_json::json!({
275 "status": "NO_FILES",
276 "message": "No IaC files found. Use sync-ctl analyze to check what IaC exists.",
277 "files_checked": 0,
278 "violations": []
279 });
280 return Ok(serde_json::to_string_pretty(&output)?);
281 }
282
283 if !quiet {
285 println!("\n{}", "β".repeat(60));
286 println!(
287 "π {} file(s) checked β {} error(s), {} warning(s), {} info",
288 files_checked, total_errors, total_warnings, total_info
289 );
290 if total_errors == 0 && total_warnings == 0 {
291 println!("β
All checks passed!");
292 }
293 }
294
295 let output = serde_json::json!({
296 "files_checked": files_checked,
297 "total_errors": total_errors,
298 "total_warnings": total_warnings,
299 "total_info": total_info,
300 "violations": all_results,
301 });
302
303 Ok(serde_json::to_string_pretty(&output)?)
304}
305
306fn find_dockerfiles(root: &std::path::Path) -> Vec<std::path::PathBuf> {
309 let mut files = Vec::new();
310 let names = ["Dockerfile", "dockerfile", "Containerfile"];
311 walk_for_files(root, 0, 4, &mut files, &|name| {
312 names
313 .iter()
314 .any(|n| name == *n || name.starts_with(&format!("{}.", n)))
315 });
316 files
317}
318
319fn find_compose_files(root: &std::path::Path) -> Vec<std::path::PathBuf> {
320 let mut files = Vec::new();
321 walk_for_files(root, 0, 4, &mut files, &|name| {
322 let n = name.to_lowercase();
323 n == "docker-compose.yml"
324 || n == "docker-compose.yaml"
325 || n == "compose.yml"
326 || n == "compose.yaml"
327 });
328 files
329}
330
331fn find_k8s_dirs(root: &std::path::Path) -> Vec<std::path::PathBuf> {
332 let k8s_dir_names = [
334 "k8s",
335 "kubernetes",
336 "manifests",
337 "deploy",
338 "deployments",
339 "kube",
340 ];
341 let mut dirs = Vec::new();
342 if let Ok(entries) = std::fs::read_dir(root) {
343 for entry in entries.flatten() {
344 let p = entry.path();
345 if p.is_dir() {
346 let name = p.file_name().and_then(|n| n.to_str()).unwrap_or("");
347 if k8s_dir_names.contains(&name.to_lowercase().as_str()) {
348 dirs.push(p);
349 }
350 }
351 }
352 }
353 if has_k8s_files(root) && dirs.is_empty() {
355 dirs.push(root.to_path_buf());
356 }
357 dirs
358}
359
360fn has_k8s_files(dir: &std::path::Path) -> bool {
361 if let Ok(entries) = std::fs::read_dir(dir) {
362 for entry in entries.flatten() {
363 let p = entry.path();
364 if let Some(ext) = p.extension().and_then(|e| e.to_str()) {
365 if (ext == "yml" || ext == "yaml") && !is_compose_file(&p) {
366 if let Ok(content) = std::fs::read_to_string(&p) {
367 if content.contains("apiVersion:") && content.contains("kind:") {
368 return true;
369 }
370 }
371 }
372 }
373 }
374 }
375 false
376}
377
378fn is_compose_file(p: &std::path::Path) -> bool {
379 let name = p
380 .file_name()
381 .and_then(|n| n.to_str())
382 .unwrap_or("")
383 .to_lowercase();
384 name.contains("compose") || name.contains("docker-compose")
385}
386
387fn find_helm_charts_validate(root: &std::path::Path) -> Vec<std::path::PathBuf> {
388 let mut charts = Vec::new();
389 if root.join("Chart.yaml").exists() {
390 charts.push(root.to_path_buf());
391 return charts;
392 }
393 walk_for_dirs(root, 0, 3, &mut charts, &|dir| {
394 dir.join("Chart.yaml").exists()
395 });
396 charts
397}
398
399fn walk_for_files(
400 dir: &std::path::Path,
401 depth: usize,
402 max_depth: usize,
403 out: &mut Vec<std::path::PathBuf>,
404 matcher: &dyn Fn(&str) -> bool,
405) {
406 if depth >= max_depth {
407 return;
408 }
409 let skip = [
410 "node_modules",
411 "target",
412 ".git",
413 "vendor",
414 "dist",
415 "build",
416 "__pycache__",
417 ];
418 let entries = match std::fs::read_dir(dir) {
419 Ok(e) => e,
420 Err(_) => return,
421 };
422 for entry in entries.flatten() {
423 let p = entry.path();
424 if p.is_file() {
425 if let Some(name) = p.file_name().and_then(|n| n.to_str()) {
426 if matcher(name) {
427 out.push(p);
428 }
429 }
430 } else if p.is_dir() {
431 let name = p.file_name().and_then(|n| n.to_str()).unwrap_or("");
432 if !name.starts_with('.') && !skip.contains(&name) {
433 walk_for_files(&p, depth + 1, max_depth, out, matcher);
434 }
435 }
436 }
437}
438
439fn walk_for_dirs(
440 dir: &std::path::Path,
441 depth: usize,
442 max_depth: usize,
443 out: &mut Vec<std::path::PathBuf>,
444 matcher: &dyn Fn(&std::path::Path) -> bool,
445) {
446 if depth >= max_depth {
447 return;
448 }
449 let skip = ["node_modules", "target", ".git", "vendor"];
450 let entries = match std::fs::read_dir(dir) {
451 Ok(e) => e,
452 Err(_) => return,
453 };
454 for entry in entries.flatten() {
455 let p = entry.path();
456 if p.is_dir() {
457 let name = p.file_name().and_then(|n| n.to_str()).unwrap_or("");
458 if !name.starts_with('.') && !skip.contains(&name) {
459 if matcher(&p) {
460 out.push(p.clone());
461 }
462 walk_for_dirs(&p, depth + 1, max_depth, out, matcher);
463 }
464 }
465 }
466}
467
468fn count_severities_hadolint(
471 result: &crate::analyzer::hadolint::LintResult,
472) -> (usize, usize, usize) {
473 use crate::analyzer::hadolint::Severity;
474 let (mut e, mut w, mut i) = (0, 0, 0);
475 for f in &result.failures {
476 match f.severity {
477 Severity::Error => e += 1,
478 Severity::Warning => w += 1,
479 Severity::Info | Severity::Style | Severity::Ignore => i += 1,
480 }
481 }
482 (e, w, i)
483}
484
485fn count_severities_dclint(result: &crate::analyzer::dclint::LintResult) -> (usize, usize, usize) {
486 use crate::analyzer::dclint::Severity;
487 let (mut e, mut w, mut i) = (0, 0, 0);
488 for f in &result.failures {
489 match f.severity {
490 Severity::Error => e += 1,
491 Severity::Warning => w += 1,
492 Severity::Info | Severity::Style => i += 1,
493 }
494 }
495 (e, w, i)
496}
497
498fn count_severities_kubelint(
499 result: &crate::analyzer::kubelint::LintResult,
500) -> (usize, usize, usize) {
501 use crate::analyzer::kubelint::Severity;
502 let (mut e, mut w, mut i) = (0, 0, 0);
503 for f in &result.failures {
504 match f.severity {
505 Severity::Error => e += 1,
506 Severity::Warning => w += 1,
507 Severity::Info => i += 1,
508 }
509 }
510 (e, w, i)
511}
512
513fn count_severities_helmlint(
514 result: &crate::analyzer::helmlint::LintResult,
515) -> (usize, usize, usize) {
516 use crate::analyzer::helmlint::Severity;
517 let (mut e, mut w, mut i) = (0, 0, 0);
518 for f in &result.failures {
519 match f.severity {
520 Severity::Error => e += 1,
521 Severity::Warning => w += 1,
522 Severity::Info | Severity::Style | Severity::Ignore => i += 1,
523 }
524 }
525 (e, w, i)
526}