1use rig::completion::ToolDefinition;
13use rig::tool::Tool;
14use serde::{Deserialize, Serialize};
15use serde_json::json;
16use std::path::PathBuf;
17
18use super::error::{ErrorCategory, format_error_for_llm};
19use crate::analyzer::hadolint::{HadolintConfig, LintResult, Severity, lint, lint_file};
20
21#[derive(Debug, Deserialize)]
23pub struct HadolintArgs {
24 #[serde(default)]
26 pub dockerfile: Option<String>,
27
28 #[serde(default)]
30 pub content: Option<String>,
31
32 #[serde(default)]
34 pub ignore: Vec<String>,
35
36 #[serde(default)]
38 pub threshold: Option<String>,
39}
40
41#[derive(Debug, thiserror::Error)]
43#[error("Hadolint error: {0}")]
44pub struct HadolintError(String);
45
46#[derive(Debug, Clone, Serialize, Deserialize)]
48pub struct HadolintTool {
49 project_path: PathBuf,
50}
51
52impl HadolintTool {
53 pub fn new(project_path: PathBuf) -> Self {
54 Self { project_path }
55 }
56
57 fn parse_threshold(threshold: &str) -> Severity {
58 match threshold.to_lowercase().as_str() {
59 "error" => Severity::Error,
60 "warning" => Severity::Warning,
61 "info" => Severity::Info,
62 "style" => Severity::Style,
63 _ => Severity::Warning, }
65 }
66
67 fn get_rule_category(code: &str) -> &'static str {
69 match code {
70 "DL3000" | "DL3002" | "DL3004" | "DL3047" => "security",
72 "DL3003" | "DL3006" | "DL3007" | "DL3008" | "DL3009" | "DL3013" | "DL3014"
74 | "DL3015" | "DL3016" | "DL3018" | "DL3019" | "DL3020" | "DL3025" | "DL3027"
75 | "DL3028" | "DL3033" | "DL3042" | "DL3059" => "best-practice",
76 "DL3005" | "DL3010" | "DL3021" | "DL3022" | "DL3023" | "DL3024" | "DL3026"
78 | "DL3029" | "DL3030" | "DL3032" | "DL3034" | "DL3035" | "DL3036" | "DL3044"
79 | "DL3045" | "DL3048" | "DL3049" | "DL3050" | "DL3051" | "DL3052" | "DL3053"
80 | "DL3054" | "DL3055" | "DL3056" | "DL3057" | "DL3058" | "DL3060" | "DL3061" => {
81 "maintainability"
82 }
83 "DL3001" | "DL3011" | "DL3017" | "DL3031" | "DL3037" | "DL3038" | "DL3039"
85 | "DL3040" | "DL3041" | "DL3046" | "DL3062" => "performance",
86 "DL4000" | "DL4001" | "DL4003" | "DL4005" | "DL4006" => "deprecated",
88 _ if code.starts_with("SC") => "shell",
90 _ => "other",
91 }
92 }
93
94 fn get_priority(severity: Severity, category: &str) -> &'static str {
96 match (severity, category) {
97 (Severity::Error, "security") => "critical",
98 (Severity::Error, _) => "high",
99 (Severity::Warning, "security") => "high",
100 (Severity::Warning, "best-practice") => "medium",
101 (Severity::Warning, _) => "medium",
102 (Severity::Info, _) => "low",
103 (Severity::Style, _) => "low",
104 (Severity::Ignore, _) => "info",
105 }
106 }
107
108 fn get_fix_recommendation(code: &str) -> &'static str {
110 match code {
111 "DL3000" => "Use absolute WORKDIR paths like '/app' instead of relative paths.",
112 "DL3001" => "Remove commands that have no effect in Docker (like 'ssh', 'mount').",
113 "DL3002" => {
114 "Remove the last USER instruction setting root, or add 'USER <non-root>' at the end."
115 }
116 "DL3003" => "Use WORKDIR to change directories instead of 'cd' in RUN commands.",
117 "DL3004" => {
118 "Remove 'sudo' from RUN commands. Docker runs as root by default, or use proper USER switching."
119 }
120 "DL3005" => {
121 "Remove 'apt-get upgrade' or 'dist-upgrade'. Pin packages instead for reproducibility."
122 }
123 "DL3006" => {
124 "Add explicit version tag to base image, e.g., 'FROM node:18-alpine' instead of 'FROM node'."
125 }
126 "DL3007" => "Use specific version tag instead of ':latest', e.g., 'nginx:1.25-alpine'.",
127 "DL3008" => {
128 "Pin apt package versions: 'apt-get install package=version' or use '--no-install-recommends'."
129 }
130 "DL3009" => {
131 "Add 'rm -rf /var/lib/apt/lists/*' after apt-get install to reduce image size."
132 }
133 "DL3010" => "Use ADD only for extracting archives. For other files, use COPY.",
134 "DL3011" => "Use valid port numbers (0-65535) in EXPOSE.",
135 "DL3013" => "Pin pip package versions: 'pip install package==version'.",
136 "DL3014" => "Add '-y' flag to apt-get install for non-interactive mode.",
137 "DL3015" => "Add '--no-install-recommends' to apt-get install to minimize image size.",
138 "DL3016" => "Pin npm package versions: 'npm install package@version'.",
139 "DL3017" => "Remove 'apt-get upgrade'. Pin specific package versions instead.",
140 "DL3018" => "Pin apk package versions: 'apk add package=version'.",
141 "DL3019" => "Add '--no-cache' to apk add instead of separate cache cleanup.",
142 "DL3020" => {
143 "Use COPY instead of ADD for files from build context. ADD is for URLs and archives."
144 }
145 "DL3021" => {
146 "Use COPY with --from for multi-stage builds instead of COPY from external images."
147 }
148 "DL3022" => "Use COPY --from=stage instead of --from=image for multi-stage builds.",
149 "DL3023" => "Reference build stage by name instead of number in COPY --from.",
150 "DL3024" => "Use lowercase for 'as' in multi-stage builds: 'FROM image AS builder'.",
151 "DL3025" => "Use JSON array format for CMD/ENTRYPOINT: CMD [\"executable\", \"arg1\"].",
152 "DL3026" => {
153 "Use official Docker images when possible, or document why unofficial is needed."
154 }
155 "DL3027" => "Remove 'apt' and use 'apt-get' for scripting in Dockerfiles.",
156 "DL3028" => "Pin gem versions: 'gem install package:version'.",
157 "DL3029" => "Specify --platform explicitly for multi-arch builds.",
158 "DL3030" => "Pin yum/dnf package versions: 'yum install package-version'.",
159 "DL3032" => "Replace 'yum clean all' with 'dnf clean all' for newer distros.",
160 "DL3033" => "Add 'yum clean all' after yum install to reduce image size.",
161 "DL3034" => "Add '--setopt=install_weak_deps=False' to dnf install.",
162 "DL3035" => "Add 'dnf clean all' after dnf install to reduce image size.",
163 "DL3036" => "Pin zypper package versions: 'zypper install package=version'.",
164 "DL3037" => "Add 'zypper clean' after zypper install.",
165 "DL3038" => "Add '--no-recommends' to zypper install.",
166 "DL3039" => "Add 'zypper clean' after zypper install.",
167 "DL3040" => "Add 'dnf clean all && rm -rf /var/cache/dnf' after dnf install.",
168 "DL3041" => "Add 'microdnf clean all' after microdnf install.",
169 "DL3042" => {
170 "Avoid pip cache in builds. Use '--no-cache-dir' or set PIP_NO_CACHE_DIR=1."
171 }
172 "DL3044" => "Only use 'HEALTHCHECK' once per Dockerfile, or it won't work correctly.",
173 "DL3045" => "Use COPY instead of ADD for local files.",
174 "DL3046" => "Use 'useradd' instead of 'adduser' for better compatibility.",
175 "DL3047" => {
176 "Add 'wget --progress=dot:giga' or 'curl --progress-bar' to show progress during download."
177 }
178 "DL3048" => "Prefer setting flag with 'SHELL' instruction instead of inline in RUN.",
179 "DL3049" => "Add a 'LABEL maintainer=\"name\"' for documentation.",
180 "DL3050" => "Add 'LABEL version=\"x.y\"' for versioning.",
181 "DL3051" => "Add 'LABEL description=\"...\"' for documentation.",
182 "DL3052" => "Prefer relative paths with LABEL for better portability.",
183 "DL3053" => "Remove unused LABEL instructions.",
184 "DL3054" => "Use recommended labels from OCI spec (org.opencontainers.image.*).",
185 "DL3055" => "Add 'LABEL org.opencontainers.image.created' with ISO 8601 date.",
186 "DL3056" => "Add 'LABEL org.opencontainers.image.description'.",
187 "DL3057" => "Add a HEALTHCHECK instruction for container health monitoring.",
188 "DL3058" => "Add 'LABEL org.opencontainers.image.title'.",
189 "DL3059" => "Combine consecutive RUN instructions with '&&' to reduce layers.",
190 "DL3060" => "Pin package versions in yarn add: 'yarn add package@version'.",
191 "DL3061" => "Use specific image digest or tag instead of implicit latest.",
192 "DL3062" => "Prefer single RUN with '&&' over multiple RUN for related commands.",
193 "DL4000" => "Replace MAINTAINER with 'LABEL maintainer=\"name <email>\"'.",
194 "DL4001" => "Use wget or curl instead of ADD for downloading from URLs.",
195 "DL4003" => "Use 'ENTRYPOINT' and 'CMD' together properly for container startup.",
196 "DL4005" => "Prefer JSON notation for SHELL: SHELL [\"/bin/bash\", \"-c\"].",
197 "DL4006" => {
198 "Add 'SHELL [\"/bin/bash\", \"-o\", \"pipefail\", \"-c\"]' before RUN with pipes."
199 }
200 _ if code.starts_with("SC") => "See ShellCheck wiki for shell scripting fix.",
201 _ => "Review the rule documentation for specific guidance.",
202 }
203 }
204
205 fn get_rule_url(code: &str) -> String {
207 if code.starts_with("DL") || code.starts_with("SC") {
208 if code.starts_with("SC") {
209 format!("https://www.shellcheck.net/wiki/{}", code)
210 } else {
211 format!("https://github.com/hadolint/hadolint/wiki/{}", code)
212 }
213 } else {
214 String::new()
215 }
216 }
217
218 fn format_result(result: &LintResult, filename: &str) -> String {
220 let enriched_failures: Vec<serde_json::Value> = result
222 .failures
223 .iter()
224 .map(|f| {
225 let code = f.code.as_str();
226 let category = Self::get_rule_category(code);
227 let priority = Self::get_priority(f.severity, category);
228
229 json!({
230 "code": code,
231 "severity": format!("{:?}", f.severity).to_lowercase(),
232 "priority": priority,
233 "category": category,
234 "message": f.message,
235 "line": f.line,
236 "column": f.column,
237 "fix": Self::get_fix_recommendation(code),
238 "docs": Self::get_rule_url(code),
239 })
240 })
241 .collect();
242
243 let critical: Vec<_> = enriched_failures
245 .iter()
246 .filter(|f| f["priority"] == "critical")
247 .cloned()
248 .collect();
249 let high: Vec<_> = enriched_failures
250 .iter()
251 .filter(|f| f["priority"] == "high")
252 .cloned()
253 .collect();
254 let medium: Vec<_> = enriched_failures
255 .iter()
256 .filter(|f| f["priority"] == "medium")
257 .cloned()
258 .collect();
259 let low: Vec<_> = enriched_failures
260 .iter()
261 .filter(|f| f["priority"] == "low")
262 .cloned()
263 .collect();
264
265 let mut by_category: std::collections::HashMap<&str, Vec<_>> =
267 std::collections::HashMap::new();
268 for f in &enriched_failures {
269 let cat = f["category"].as_str().unwrap_or("other");
270 by_category.entry(cat).or_default().push(f.clone());
271 }
272
273 let decision_context = if critical.is_empty() && high.is_empty() {
275 if medium.is_empty() && low.is_empty() {
276 "Dockerfile follows best practices. No issues found."
277 } else if medium.is_empty() {
278 "Minor improvements possible. Low priority issues only."
279 } else {
280 "Good baseline. Medium priority improvements recommended."
281 }
282 } else if !critical.is_empty() {
283 "Critical issues found. Address security/error issues first before deployment."
284 } else {
285 "High priority issues found. Review and fix before production use."
286 };
287
288 let mut output = json!({
290 "file": filename,
291 "success": !result.has_errors(),
292 "decision_context": decision_context,
293 "summary": {
294 "total": result.failures.len(),
295 "by_priority": {
296 "critical": critical.len(),
297 "high": high.len(),
298 "medium": medium.len(),
299 "low": low.len(),
300 },
301 "by_severity": {
302 "errors": result.failures.iter().filter(|f| f.severity == Severity::Error).count(),
303 "warnings": result.failures.iter().filter(|f| f.severity == Severity::Warning).count(),
304 "info": result.failures.iter().filter(|f| f.severity == Severity::Info).count(),
305 },
306 "by_category": by_category.iter().map(|(k, v)| (k.to_string(), v.len())).collect::<std::collections::HashMap<_, _>>(),
307 },
308 "action_plan": {
309 "critical": critical,
310 "high": high,
311 "medium": medium,
312 "low": low,
313 },
314 });
315
316 if !enriched_failures.is_empty() {
318 let quick_fixes: Vec<String> = enriched_failures
319 .iter()
320 .filter(|f| f["priority"] == "critical" || f["priority"] == "high")
321 .take(5)
322 .map(|f| {
323 format!(
324 "Line {}: {} - {}",
325 f["line"],
326 f["code"].as_str().unwrap_or(""),
327 f["fix"].as_str().unwrap_or("")
328 )
329 })
330 .collect();
331
332 if !quick_fixes.is_empty() {
333 output["quick_fixes"] = json!(quick_fixes);
334 }
335 }
336
337 if !result.parse_errors.is_empty() {
338 output["parse_errors"] = json!(result.parse_errors);
339 }
340
341 serde_json::to_string_pretty(&output).unwrap_or_else(|_| "{}".to_string())
342 }
343}
344
345impl Tool for HadolintTool {
346 const NAME: &'static str = "hadolint";
347
348 type Error = HadolintError;
349 type Args = HadolintArgs;
350 type Output = String;
351
352 async fn definition(&self, _prompt: String) -> ToolDefinition {
353 ToolDefinition {
354 name: Self::NAME.to_string(),
355 description: "Native Dockerfile linting with AI-optimized output. No external binary required.
356
357Analyzes Dockerfiles for:
358- Security issues (privileged operations, user permissions, sudo usage)
359- Best practices (pinned versions, package cleanup, layer optimization)
360- Maintainability (instruction ordering, LABEL usage, multi-stage patterns)
361- Performance (build caching, combined RUN commands, cache cleanup)
362- Deprecated instructions (MAINTAINER, ADD for URLs)
363
364Returns prioritized issues with fix recommendations. Prefer this over shell hadolint for structured output the agent can act on.
365
366Output format:
367- 'decision_context': Quick summary for assessment
368- 'action_plan': Issues grouped by priority (critical/high/medium/low)
369- 'quick_fixes': Top 5 high-priority fixes with line numbers
370- 'summary': Counts by priority, severity, and category
371
372Supports inline pragmas for rule ignoring: '# hadolint ignore=DL3008,DL3013'"
373 .to_string(),
374 parameters: json!({
375 "type": "object",
376 "properties": {
377 "dockerfile": {
378 "type": "string",
379 "description": "Path to Dockerfile relative to project root (e.g., 'Dockerfile', 'docker/Dockerfile.prod'). If not specified and no content provided, looks for 'Dockerfile' in project root."
380 },
381 "content": {
382 "type": "string",
383 "description": "Inline Dockerfile content to lint directly. Use this to validate generated Dockerfile content before writing to disk, or to lint content without a file."
384 },
385 "ignore": {
386 "type": "array",
387 "items": { "type": "string" },
388 "description": "Rule codes to ignore globally (e.g., ['DL3008', 'DL3013']). For file-specific ignores, use inline pragmas instead."
389 },
390 "threshold": {
391 "type": "string",
392 "enum": ["error", "warning", "info", "style"],
393 "description": "Minimum severity to report. 'error' shows only errors, 'style' shows everything. Default: 'warning'."
394 }
395 }
396 }),
397 }
398 }
399
400 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
401 let mut config = HadolintConfig::default();
403
404 for rule in &args.ignore {
406 config = config.ignore(rule.as_str());
407 }
408
409 if let Some(threshold) = &args.threshold {
411 config = config.with_threshold(Self::parse_threshold(threshold));
412 }
413
414 let (result, filename) = if args.content.as_ref().is_some_and(|c| !c.trim().is_empty()) {
417 (
419 lint(args.content.as_ref().unwrap(), &config),
420 "<inline>".to_string(),
421 )
422 } else if let Some(dockerfile) = &args.dockerfile {
423 let path = self.project_path.join(dockerfile);
425
426 if let Ok(canonical) = path.canonicalize() {
428 if let Ok(project_canonical) = self.project_path.canonicalize() {
429 if !canonical.starts_with(&project_canonical) {
430 return Ok(format_error_for_llm(
431 "hadolint",
432 ErrorCategory::PathOutsideBoundary,
433 &format!("Path '{}' is outside project boundary", dockerfile),
434 Some(vec![
435 "Provide a path relative to the project root",
436 "Use list_directory to explore valid paths",
437 ]),
438 ));
439 }
440 }
441 }
442
443 if !path.exists() {
445 return Ok(format_error_for_llm(
446 "hadolint",
447 ErrorCategory::FileNotFound,
448 &format!("Dockerfile not found: {}", dockerfile),
449 Some(vec![
450 "Check if the path is correct",
451 "Use list_directory to find Dockerfiles",
452 "Provide content parameter for inline linting",
453 ]),
454 ));
455 }
456
457 match std::fs::metadata(&path) {
459 Ok(meta) => {
460 if !meta.is_file() {
461 return Ok(format_error_for_llm(
462 "hadolint",
463 ErrorCategory::ValidationFailed,
464 &format!("Path '{}' is not a file", dockerfile),
465 Some(vec![
466 "Provide the path to a Dockerfile, not a directory",
467 "Use list_directory to find Dockerfiles in the directory",
468 ]),
469 ));
470 }
471 }
472 Err(e) if e.kind() == std::io::ErrorKind::PermissionDenied => {
473 return Ok(format_error_for_llm(
474 "hadolint",
475 ErrorCategory::PermissionDenied,
476 &format!("Permission denied reading: {}", dockerfile),
477 Some(vec![
478 "Check file permissions",
479 "Ensure the file is readable",
480 ]),
481 ));
482 }
483 Err(_) => {} }
485
486 (lint_file(&path, &config), dockerfile.clone())
487 } else {
488 let path = self.project_path.join("Dockerfile");
490 if path.exists() {
491 (lint_file(&path, &config), "Dockerfile".to_string())
492 } else {
493 return Ok(format_error_for_llm(
494 "hadolint",
495 ErrorCategory::FileNotFound,
496 "No Dockerfile specified and no Dockerfile found in project root",
497 Some(vec![
498 "Specify a dockerfile path relative to project root",
499 "Use content parameter for inline linting",
500 "Use list_directory to find Dockerfiles in the project",
501 ]),
502 ));
503 }
504 };
505
506 if !result.parse_errors.is_empty() {
508 log::warn!("Dockerfile parse errors: {:?}", result.parse_errors);
509 }
510
511 Ok(Self::format_result(&result, &filename))
512 }
513}
514
515#[cfg(test)]
516mod tests {
517 use super::*;
518 use std::env::temp_dir;
519 use std::fs;
520
521 fn collect_all_issues(parsed: &serde_json::Value) -> Vec<serde_json::Value> {
523 let mut all = Vec::new();
524 for priority in ["critical", "high", "medium", "low"] {
525 if let Some(arr) = parsed["action_plan"][priority].as_array() {
526 all.extend(arr.clone());
527 }
528 }
529 all
530 }
531
532 #[tokio::test]
533 async fn test_hadolint_inline_content() {
534 let tool = HadolintTool::new(temp_dir());
535 let args = HadolintArgs {
536 dockerfile: None,
537 content: Some("FROM ubuntu:latest\nRUN sudo apt-get update".to_string()),
538 ignore: vec![],
539 threshold: None,
540 };
541
542 let result = tool.call(args).await.unwrap();
543 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
544
545 assert!(!parsed["success"].as_bool().unwrap_or(true));
547 assert!(parsed["summary"]["total"].as_u64().unwrap_or(0) >= 2);
548
549 assert!(parsed["decision_context"].is_string());
551 assert!(parsed["action_plan"].is_object());
552
553 let issues = collect_all_issues(&parsed);
555 assert!(
556 issues
557 .iter()
558 .all(|i| i["fix"].is_string() && !i["fix"].as_str().unwrap().is_empty())
559 );
560 }
561
562 #[tokio::test]
563 async fn test_hadolint_ignore_rules() {
564 let tool = HadolintTool::new(temp_dir());
565 let args = HadolintArgs {
566 dockerfile: None,
567 content: Some("FROM ubuntu:latest".to_string()),
568 ignore: vec!["DL3007".to_string()],
569 threshold: None,
570 };
571
572 let result = tool.call(args).await.unwrap();
573 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
574
575 let all_issues = collect_all_issues(&parsed);
577 assert!(!all_issues.iter().any(|f| f["code"] == "DL3007"));
578 }
579
580 #[tokio::test]
581 async fn test_hadolint_threshold() {
582 let tool = HadolintTool::new(temp_dir());
583 let args = HadolintArgs {
584 dockerfile: None,
585 content: Some("FROM ubuntu\nMAINTAINER test".to_string()),
586 ignore: vec![],
587 threshold: Some("error".to_string()),
588 };
589
590 let result = tool.call(args).await.unwrap();
591 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
592
593 let all_issues = collect_all_issues(&parsed);
596 assert!(all_issues.iter().all(|f| f["severity"] == "error"));
597 }
598
599 #[tokio::test]
600 async fn test_hadolint_file() {
601 let temp = temp_dir().join("hadolint_test");
602 fs::create_dir_all(&temp).unwrap();
603 let dockerfile = temp.join("Dockerfile");
604 fs::write(
605 &dockerfile,
606 "FROM node:18-alpine\nWORKDIR /app\nCOPY . .\nCMD [\"node\", \"app.js\"]",
607 )
608 .unwrap();
609
610 let tool = HadolintTool::new(temp.clone());
611 let args = HadolintArgs {
612 dockerfile: Some("Dockerfile".to_string()),
613 content: None,
614 ignore: vec![],
615 threshold: None,
616 };
617
618 let result = tool.call(args).await.unwrap();
619 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
620
621 assert!(parsed["success"].as_bool().unwrap_or(false));
623 assert_eq!(parsed["file"], "Dockerfile");
624
625 fs::remove_dir_all(&temp).ok();
627 }
628
629 #[tokio::test]
630 async fn test_hadolint_valid_dockerfile() {
631 let tool = HadolintTool::new(temp_dir());
632 let dockerfile = r#"
633FROM node:18-alpine AS builder
634WORKDIR /app
635COPY package*.json ./
636RUN npm ci --only=production
637COPY . .
638RUN npm run build
639
640FROM node:18-alpine
641WORKDIR /app
642COPY --from=builder /app/dist ./dist
643USER node
644EXPOSE 3000
645CMD ["node", "dist/index.js"]
646"#;
647
648 let args = HadolintArgs {
649 dockerfile: None,
650 content: Some(dockerfile.to_string()),
651 ignore: vec![],
652 threshold: None,
653 };
654
655 let result = tool.call(args).await.unwrap();
656 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
657
658 assert!(parsed["success"].as_bool().unwrap_or(false));
660 assert!(parsed["decision_context"].is_string());
662 assert_eq!(
664 parsed["summary"]["by_priority"]["critical"]
665 .as_u64()
666 .unwrap_or(99),
667 0
668 );
669 assert_eq!(
670 parsed["summary"]["by_priority"]["high"]
671 .as_u64()
672 .unwrap_or(99),
673 0
674 );
675 }
676
677 #[tokio::test]
678 async fn test_hadolint_priority_categorization() {
679 let tool = HadolintTool::new(temp_dir());
680 let args = HadolintArgs {
681 dockerfile: None,
682 content: Some("FROM ubuntu\nRUN sudo apt-get update\nMAINTAINER test".to_string()),
683 ignore: vec![],
684 threshold: None,
685 };
686
687 let result = tool.call(args).await.unwrap();
688 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
689
690 assert!(parsed["summary"]["by_priority"]["critical"].is_number());
692 assert!(parsed["summary"]["by_priority"]["high"].is_number());
693 assert!(parsed["summary"]["by_priority"]["medium"].is_number());
694
695 assert!(parsed["summary"]["by_category"].is_object());
697
698 let all_issues = collect_all_issues(&parsed);
700 let sudo_issue = all_issues.iter().find(|i| i["code"] == "DL3004");
701 assert!(sudo_issue.is_some());
702 assert_eq!(sudo_issue.unwrap()["category"], "security");
703 }
704
705 #[tokio::test]
706 async fn test_hadolint_quick_fixes() {
707 let tool = HadolintTool::new(temp_dir());
708 let args = HadolintArgs {
709 dockerfile: None,
710 content: Some("FROM ubuntu\nRUN sudo rm -rf /".to_string()),
711 ignore: vec![],
712 threshold: None,
713 };
714
715 let result = tool.call(args).await.unwrap();
716 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
717
718 if parsed["summary"]["by_priority"]["high"]
720 .as_u64()
721 .unwrap_or(0)
722 > 0
723 || parsed["summary"]["by_priority"]["critical"]
724 .as_u64()
725 .unwrap_or(0)
726 > 0
727 {
728 assert!(parsed["quick_fixes"].is_array());
729 }
730 }
731
732 #[test]
735 fn test_parse_threshold() {
736 assert_eq!(HadolintTool::parse_threshold("error"), Severity::Error);
737 assert_eq!(HadolintTool::parse_threshold("warning"), Severity::Warning);
738 assert_eq!(HadolintTool::parse_threshold("info"), Severity::Info);
739 assert_eq!(HadolintTool::parse_threshold("style"), Severity::Style);
740 assert_eq!(HadolintTool::parse_threshold("ERROR"), Severity::Error);
742 assert_eq!(HadolintTool::parse_threshold("Warning"), Severity::Warning);
743 assert_eq!(HadolintTool::parse_threshold("invalid"), Severity::Warning);
745 assert_eq!(HadolintTool::parse_threshold(""), Severity::Warning);
746 }
747
748 #[test]
749 fn test_get_rule_category() {
750 assert_eq!(HadolintTool::get_rule_category("DL3000"), "security");
752 assert_eq!(HadolintTool::get_rule_category("DL3002"), "security");
753 assert_eq!(HadolintTool::get_rule_category("DL3004"), "security");
754 assert_eq!(HadolintTool::get_rule_category("DL3047"), "security");
755
756 assert_eq!(HadolintTool::get_rule_category("DL3008"), "best-practice");
758 assert_eq!(HadolintTool::get_rule_category("DL3013"), "best-practice");
759 assert_eq!(HadolintTool::get_rule_category("DL3015"), "best-practice");
760
761 assert_eq!(HadolintTool::get_rule_category("DL3005"), "maintainability");
763 assert_eq!(HadolintTool::get_rule_category("DL3010"), "maintainability");
764
765 assert_eq!(HadolintTool::get_rule_category("DL3001"), "performance");
767 assert_eq!(HadolintTool::get_rule_category("DL3011"), "performance");
768
769 assert_eq!(HadolintTool::get_rule_category("DL4000"), "deprecated");
771 assert_eq!(HadolintTool::get_rule_category("DL4001"), "deprecated");
772
773 assert_eq!(HadolintTool::get_rule_category("SC1000"), "shell");
775 assert_eq!(HadolintTool::get_rule_category("SC2086"), "shell");
776
777 assert_eq!(HadolintTool::get_rule_category("XX9999"), "other");
779 }
780
781 #[test]
782 fn test_get_priority() {
783 assert_eq!(
785 HadolintTool::get_priority(Severity::Error, "security"),
786 "critical"
787 );
788
789 assert_eq!(
791 HadolintTool::get_priority(Severity::Error, "best-practice"),
792 "high"
793 );
794 assert_eq!(
795 HadolintTool::get_priority(Severity::Error, "maintainability"),
796 "high"
797 );
798 assert_eq!(
799 HadolintTool::get_priority(Severity::Warning, "security"),
800 "high"
801 );
802
803 assert_eq!(
805 HadolintTool::get_priority(Severity::Warning, "best-practice"),
806 "medium"
807 );
808 assert_eq!(
809 HadolintTool::get_priority(Severity::Warning, "maintainability"),
810 "medium"
811 );
812 assert_eq!(
813 HadolintTool::get_priority(Severity::Warning, "performance"),
814 "medium"
815 );
816
817 assert_eq!(
819 HadolintTool::get_priority(Severity::Info, "security"),
820 "low"
821 );
822 assert_eq!(
823 HadolintTool::get_priority(Severity::Info, "best-practice"),
824 "low"
825 );
826 assert_eq!(HadolintTool::get_priority(Severity::Style, "any"), "low");
827
828 assert_eq!(
830 HadolintTool::get_priority(Severity::Ignore, "security"),
831 "info"
832 );
833 }
834
835 #[tokio::test]
836 async fn test_hadolint_file_not_found_error() {
837 let tool = HadolintTool::new(temp_dir());
838 let args = HadolintArgs {
839 dockerfile: Some("nonexistent/Dockerfile".to_string()),
840 content: None,
841 ignore: vec![],
842 threshold: None,
843 };
844
845 let result = tool.call(args).await.unwrap();
846 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
847
848 assert_eq!(parsed["error"], true);
850 assert_eq!(parsed["tool"], "hadolint");
851 assert_eq!(parsed["code"], "FILE_NOT_FOUND");
852 assert!(parsed["suggestions"].is_array());
853 }
854
855 #[tokio::test]
856 async fn test_hadolint_no_dockerfile_error() {
857 let temp = temp_dir().join("hadolint_no_dockerfile_test");
859 fs::create_dir_all(&temp).ok();
860
861 let tool = HadolintTool::new(temp.clone());
862 let args = HadolintArgs {
863 dockerfile: None,
864 content: None,
865 ignore: vec![],
866 threshold: None,
867 };
868
869 let result = tool.call(args).await.unwrap();
870 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
871
872 assert_eq!(parsed["error"], true);
874 assert_eq!(parsed["code"], "FILE_NOT_FOUND");
875 assert!(
876 parsed["message"]
877 .as_str()
878 .unwrap()
879 .contains("No Dockerfile specified")
880 );
881
882 fs::remove_dir_all(&temp).ok();
884 }
885
886 #[tokio::test]
887 async fn test_hadolint_directory_not_file_error() {
888 let temp = temp_dir().join("hadolint_dir_test");
890 let subdir = temp.join("docker");
891 fs::create_dir_all(&subdir).ok();
892
893 let tool = HadolintTool::new(temp.clone());
894 let args = HadolintArgs {
895 dockerfile: Some("docker".to_string()), content: None,
897 ignore: vec![],
898 threshold: None,
899 };
900
901 let result = tool.call(args).await.unwrap();
902 let parsed: serde_json::Value = serde_json::from_str(&result).unwrap();
903
904 assert_eq!(parsed["error"], true);
906 assert_eq!(parsed["code"], "VALIDATION_FAILED");
907 assert!(parsed["message"].as_str().unwrap().contains("not a file"));
908
909 fs::remove_dir_all(&temp).ok();
911 }
912}