adk_doc_audit/
orchestrator.rs

1//! Audit orchestrator that coordinates all validation components.
2//!
3//! This module provides the main orchestration logic for running documentation audits.
4//! It coordinates between the parser, analyzer, validator, and reporter components to
5//! provide comprehensive documentation validation.
6
7use crate::{
8    AuditConfig, AuditError, AuditIssue, AuditReport, AuditSummary, CodeAnalyzer,
9    DocumentationParser, ExampleValidator, FileAuditResult, IssueCategory, IssueSeverity,
10    ReportGenerator, Result, SuggestionEngine, VersionValidator, reporter::AuditReportConfig,
11};
12use chrono::Utc;
13use sha2::{Digest, Sha256};
14use std::fs;
15use std::path::{Path, PathBuf};
16use std::time::Instant;
17use tracing::{debug, error, info, instrument, warn};
18use walkdir::WalkDir;
19
20/// Main orchestrator that coordinates the audit process.
21pub struct AuditOrchestrator {
22    /// Configuration for the audit.
23    config: AuditConfig,
24    /// Documentation parser for extracting information from markdown files.
25    parser: DocumentationParser,
26    /// Code analyzer for validating API references.
27    analyzer: CodeAnalyzer,
28    /// Example validator for testing code compilation.
29    validator: ExampleValidator,
30    /// Version validator for checking version consistency.
31    version_validator: VersionValidator,
32    /// Suggestion engine for generating fix recommendations.
33    _suggestion_engine: SuggestionEngine,
34    /// Report generator for creating audit reports.
35    _report_generator: ReportGenerator,
36}
37
38impl AuditOrchestrator {
39    /// Create a new audit orchestrator with the given configuration.
40    #[instrument(skip(config))]
41    pub async fn new(config: AuditConfig) -> Result<Self> {
42        info!("Initializing audit orchestrator");
43        debug!("Configuration: {:?}", config);
44
45        // Basic validation - check paths exist
46        if !config.workspace_path.exists() {
47            return Err(AuditError::WorkspaceNotFound { path: config.workspace_path.clone() });
48        }
49
50        // Initialize all components
51        info!("Initializing documentation parser");
52        let parser = DocumentationParser::new("0.1.0".to_string(), "1.85.0".to_string())?;
53
54        info!("Initializing code analyzer");
55        let analyzer = CodeAnalyzer::new(config.workspace_path.clone());
56
57        info!("Initializing example validator");
58        let validator =
59            ExampleValidator::new("0.1.0".to_string(), config.workspace_path.clone()).await?;
60
61        info!("Initializing version validator");
62        let version_validator = VersionValidator::new(&config.workspace_path).await?;
63
64        info!("Initializing suggestion engine");
65        let suggestion_engine = SuggestionEngine::new_empty();
66
67        info!("Initializing report generator");
68        let report_generator = ReportGenerator::new(crate::reporter::OutputFormat::Console);
69
70        info!("Audit orchestrator initialized successfully");
71
72        Ok(Self {
73            config,
74            parser,
75            analyzer,
76            validator,
77            version_validator,
78            _suggestion_engine: suggestion_engine,
79            _report_generator: report_generator,
80        })
81    }
82
83    /// Run a full audit of all documentation files.
84    #[instrument(skip(self))]
85    pub async fn run_full_audit(&mut self) -> Result<AuditReport> {
86        info!("Starting full documentation audit");
87        let start_time = Instant::now();
88
89        // Discover all documentation files
90        let doc_files = self.discover_documentation_files().await?;
91        info!("Found {} documentation files to audit", doc_files.len());
92
93        // Process each documentation file
94        let mut file_results = Vec::new();
95        let mut all_issues = Vec::new();
96        let all_recommendations = Vec::new();
97
98        for doc_file in &doc_files {
99            if self.should_skip_file(doc_file) {
100                debug!("Skipping excluded file: {}", doc_file.display());
101                continue;
102            }
103
104            info!("Processing file: {}", doc_file.display());
105
106            match self.process_documentation_file(doc_file).await {
107                Ok((file_result, mut issues, _recommendations)) => {
108                    file_results.push(file_result);
109                    all_issues.append(&mut issues);
110                }
111                Err(e) => {
112                    error!("Failed to process file {}: {}", doc_file.display(), e);
113
114                    // Create a failed file result
115                    let file_result = FileAuditResult {
116                        file_path: doc_file.clone(),
117                        file_hash: self
118                            .calculate_file_hash(doc_file)
119                            .unwrap_or_else(|_| "error".to_string()),
120                        last_modified: Utc::now(),
121                        issues_count: 1,
122                        issues: vec![self.create_processing_error_issue(doc_file, &e)],
123                        passed: false,
124                        audit_duration_ms: 0,
125                    };
126
127                    file_results.push(file_result);
128                    all_issues.push(self.create_processing_error_issue(doc_file, &e));
129                }
130            }
131        }
132
133        // Create audit summary
134        let summary = self.create_audit_summary(&file_results, &all_issues);
135
136        let total_time = start_time.elapsed();
137        info!("Full audit completed in {:?}", total_time);
138        info!("Found {} total issues across {} files", all_issues.len(), file_results.len());
139
140        // Generate the final report
141        let report = AuditReport {
142            summary,
143            file_results,
144            issues: all_issues,
145            recommendations: all_recommendations,
146            timestamp: Utc::now(),
147            audit_config: AuditReportConfig::default(),
148        };
149
150        Ok(report)
151    }
152
153    /// Run an incremental audit on only the specified changed files.
154    #[instrument(skip(self, changed_files))]
155    pub async fn run_incremental_audit(
156        &mut self,
157        changed_files: &[PathBuf],
158    ) -> Result<AuditReport> {
159        info!("Starting incremental audit on {} files", changed_files.len());
160        let start_time = Instant::now();
161
162        // Filter to only documentation files that exist
163        let mut doc_files = Vec::new();
164        for file in changed_files {
165            if self.is_documentation_file(file) && file.exists() {
166                doc_files.push(file.clone());
167            } else {
168                debug!("Skipping non-documentation file: {}", file.display());
169            }
170        }
171
172        if doc_files.is_empty() {
173            info!("No documentation files to audit in changed files");
174            return Ok(AuditReport {
175                summary: AuditSummary {
176                    total_files: 0,
177                    files_with_issues: 0,
178                    total_issues: 0,
179                    critical_issues: 0,
180                    warning_issues: 0,
181                    info_issues: 0,
182                    coverage_percentage: 100.0,
183                    average_issues_per_file: 0.0,
184                    most_common_issue: None,
185                    problematic_files: Vec::new(),
186                },
187                file_results: Vec::new(),
188                issues: Vec::new(),
189                recommendations: Vec::new(),
190                timestamp: Utc::now(),
191                audit_config: AuditReportConfig::default(),
192            });
193        }
194
195        info!("Processing {} documentation files", doc_files.len());
196
197        // Process each changed documentation file
198        let mut file_results = Vec::new();
199        let mut all_issues = Vec::new();
200        let all_recommendations = Vec::new();
201
202        for doc_file in &doc_files {
203            if self.should_skip_file(doc_file) {
204                debug!("Skipping excluded file: {}", doc_file.display());
205                continue;
206            }
207
208            info!("Processing changed file: {}", doc_file.display());
209
210            match self.process_documentation_file(doc_file).await {
211                Ok((file_result, mut issues, _recommendations)) => {
212                    file_results.push(file_result);
213                    all_issues.append(&mut issues);
214                }
215                Err(e) => {
216                    error!("Failed to process file {}: {}", doc_file.display(), e);
217
218                    // Create a failed file result
219                    let file_result = FileAuditResult {
220                        file_path: doc_file.clone(),
221                        file_hash: self
222                            .calculate_file_hash(doc_file)
223                            .unwrap_or_else(|_| "error".to_string()),
224                        last_modified: Utc::now(),
225                        issues_count: 1,
226                        issues: vec![self.create_processing_error_issue(doc_file, &e)],
227                        passed: false,
228                        audit_duration_ms: 0,
229                    };
230
231                    file_results.push(file_result);
232                    all_issues.push(self.create_processing_error_issue(doc_file, &e));
233                }
234            }
235        }
236
237        // Create summary
238        let summary = self.create_audit_summary(&file_results, &all_issues);
239
240        let total_time = start_time.elapsed();
241        info!("Incremental audit completed in {:?}", total_time);
242
243        Ok(AuditReport {
244            summary,
245            file_results,
246            issues: all_issues,
247            recommendations: all_recommendations,
248            timestamp: Utc::now(),
249            audit_config: AuditReportConfig::default(),
250        })
251    }
252
253    /// Validate a single documentation file.
254    #[instrument(skip(self))]
255    pub async fn validate_file(&mut self, file_path: &Path) -> Result<FileAuditResult> {
256        info!("Validating single file: {}", file_path.display());
257
258        if !file_path.exists() {
259            return Err(AuditError::FileNotFound { path: file_path.to_path_buf() });
260        }
261
262        if !self.is_documentation_file(file_path) {
263            return Err(AuditError::InvalidFileType {
264                path: file_path.to_path_buf(),
265                expected: "markdown documentation file".to_string(),
266            });
267        }
268
269        // Process the single file
270        match self.process_documentation_file(file_path).await {
271            Ok((file_result, _issues, _recommendations)) => Ok(file_result),
272            Err(e) => {
273                error!("Failed to validate file {}: {}", file_path.display(), e);
274
275                // Return a failed file result
276                Ok(FileAuditResult {
277                    file_path: file_path.to_path_buf(),
278                    file_hash: self
279                        .calculate_file_hash(file_path)
280                        .unwrap_or_else(|_| "error".to_string()),
281                    last_modified: Utc::now(),
282                    issues_count: 1,
283                    issues: vec![self.create_processing_error_issue(file_path, &e)],
284                    passed: false,
285                    audit_duration_ms: 0,
286                })
287            }
288        }
289    }
290
291    /// Process a single documentation file through all validation stages.
292    #[instrument(skip(self))]
293    async fn process_documentation_file(
294        &mut self,
295        file_path: &Path,
296    ) -> Result<(FileAuditResult, Vec<AuditIssue>, Vec<crate::Recommendation>)> {
297        let file_start_time = Instant::now();
298        debug!("Processing documentation file: {}", file_path.display());
299
300        // Calculate file hash and metadata
301        let file_hash = self.calculate_file_hash(file_path)?;
302        let last_modified = self.get_file_modified_time(file_path)?;
303
304        // Parse the documentation file
305        debug!("Parsing documentation file");
306        let parsed_doc = self.parser.parse_file(file_path).await?;
307
308        let mut all_issues = Vec::new();
309        let mut all_recommendations = Vec::new();
310
311        // Stage 1: API Reference Validation
312        debug!("Validating API references");
313        for api_ref in &parsed_doc.api_references {
314            match self.analyzer.validate_api_reference(api_ref).await {
315                Ok(result) => {
316                    if !result.success {
317                        all_issues.push(AuditIssue {
318                            id: format!("api-ref-{}", api_ref.item_path),
319                            file_path: file_path.to_path_buf(),
320                            line_number: Some(api_ref.line_number),
321                            column_number: None,
322                            severity: IssueSeverity::Warning,
323                            category: IssueCategory::ApiMismatch,
324                            message: format!(
325                                "API reference '{}' not found in crate",
326                                api_ref.item_path
327                            ),
328                            suggestion: Some(format!(
329                                "Check if '{}' is correctly spelled and exported",
330                                api_ref.item_path
331                            )),
332                            context: Some(api_ref.context.clone()),
333                            code_snippet: None,
334                            related_issues: Vec::new(),
335                        });
336                    }
337                }
338                Err(e) => {
339                    debug!("Error validating API reference '{}': {}", api_ref.item_path, e);
340                }
341            }
342        }
343
344        // Stage 2: Code Example Validation
345        debug!("Validating code examples");
346        for example in &parsed_doc.code_examples {
347            if example.is_runnable {
348                match self.validator.validate_example(example).await {
349                    Ok(result) => {
350                        if !result.success {
351                            all_issues.push(AuditIssue {
352                                id: format!("example-{}", example.line_number),
353                                file_path: file_path.to_path_buf(),
354                                line_number: Some(example.line_number),
355                                column_number: None,
356                                severity: IssueSeverity::Critical,
357                                category: IssueCategory::CompilationError,
358                                message: "Code example does not compile".to_string(),
359                                suggestion: result.suggestions.first().cloned(),
360                                context: Some(example.content.clone()),
361                                code_snippet: Some(example.content.clone()),
362                                related_issues: Vec::new(),
363                            });
364                        }
365
366                        // Check for warnings (potential async pattern issues)
367                        for warning in &result.warnings {
368                            all_issues.push(AuditIssue {
369                                id: format!("async-{}", example.line_number),
370                                file_path: file_path.to_path_buf(),
371                                line_number: Some(example.line_number),
372                                column_number: None,
373                                severity: IssueSeverity::Warning,
374                                category: IssueCategory::AsyncPatternError,
375                                message: warning.clone(),
376                                suggestion: Some(
377                                    "Consider using proper async patterns".to_string(),
378                                ),
379                                context: Some(example.content.clone()),
380                                code_snippet: Some(example.content.clone()),
381                                related_issues: Vec::new(),
382                            });
383                        }
384                    }
385                    Err(e) => {
386                        debug!("Error validating example at line {}: {}", example.line_number, e);
387                    }
388                }
389            }
390        }
391
392        // Stage 3: Version Consistency Validation
393        debug!("Validating version references");
394        let version_config = crate::version::VersionValidationConfig::default();
395        for version_ref in &parsed_doc.version_references {
396            match self.version_validator.validate_version_reference(version_ref, &version_config) {
397                Ok(result) => {
398                    if !result.is_valid {
399                        all_issues.push(AuditIssue {
400                            id: format!("version-{}", version_ref.line_number),
401                            file_path: file_path.to_path_buf(),
402                            line_number: Some(version_ref.line_number),
403                            column_number: None,
404                            severity: IssueSeverity::Warning,
405                            category: IssueCategory::VersionInconsistency,
406                            message: format!(
407                                "Version '{}' does not match workspace version",
408                                version_ref.version
409                            ),
410                            suggestion: Some(
411                                "Update version to match workspace Cargo.toml".to_string(),
412                            ),
413                            context: Some(version_ref.context.clone()),
414                            code_snippet: None,
415                            related_issues: Vec::new(),
416                        });
417                    }
418                }
419                Err(e) => {
420                    debug!("Error validating version reference '{}': {}", version_ref.version, e);
421                }
422            }
423        }
424
425        // Stage 4: Internal Link Validation
426        debug!("Validating internal links");
427        for link in &parsed_doc.internal_links {
428            if !self.validate_internal_link(link, file_path) {
429                all_issues.push(AuditIssue {
430                    id: format!("link-{}", link.line_number),
431                    file_path: file_path.to_path_buf(),
432                    line_number: Some(link.line_number),
433                    column_number: None,
434                    severity: IssueSeverity::Info,
435                    category: IssueCategory::BrokenLink,
436                    message: format!("Internal link '{}' may be broken", link.target),
437                    suggestion: Some("Check if the target file or section exists".to_string()),
438                    context: Some(link.text.clone()),
439                    code_snippet: None,
440                    related_issues: Vec::new(),
441                });
442            }
443        }
444
445        // Stage 5: Feature Flag Validation
446        debug!("Validating feature flags");
447        for feature in &parsed_doc.feature_mentions {
448            let result = self.version_validator.validate_feature_flag(
449                &feature.feature_name,
450                feature.crate_name.as_deref().unwrap_or(""),
451            );
452            if !result.is_valid {
453                all_issues.push(AuditIssue {
454                    id: format!("feature-{}", feature.line_number),
455                    file_path: file_path.to_path_buf(),
456                    line_number: Some(feature.line_number),
457                    column_number: None,
458                    severity: IssueSeverity::Warning,
459                    category: IssueCategory::InvalidFeatureFlag,
460                    message: format!(
461                        "Feature flag '{}' not found in any crate",
462                        feature.feature_name
463                    ),
464                    suggestion: Some(
465                        "Check if feature name is correct or add to Cargo.toml".to_string(),
466                    ),
467                    context: Some(feature.context.clone()),
468                    code_snippet: None,
469                    related_issues: Vec::new(),
470                });
471            }
472        }
473
474        // Generate suggestions for found issues (simplified for now)
475        if !all_issues.is_empty() {
476            debug!("Found {} issues, generating basic recommendations", all_issues.len());
477            // For now, just create a simple recommendation
478            all_recommendations.push(crate::Recommendation {
479                id: "general-fix".to_string(),
480                recommendation_type: crate::RecommendationType::FixIssue,
481                priority: 3, // Medium priority
482                title: "Fix Documentation Issues".to_string(),
483                description: format!(
484                    "Fix {} documentation issues found in {}",
485                    all_issues.len(),
486                    file_path.file_name().unwrap_or_default().to_string_lossy()
487                ),
488                affected_files: vec![file_path.to_path_buf()],
489                estimated_effort_hours: Some(1.0),
490                resolves_issues: all_issues.iter().map(|i| i.id.clone()).collect(),
491            });
492        }
493
494        let processing_time = file_start_time.elapsed();
495
496        // Create file audit result
497        let file_result = FileAuditResult {
498            file_path: file_path.to_path_buf(),
499            file_hash,
500            last_modified,
501            issues_count: all_issues.len(),
502            issues: all_issues.clone(),
503            passed: all_issues.iter().all(|issue| issue.severity != IssueSeverity::Critical),
504            audit_duration_ms: processing_time.as_millis() as u64,
505        };
506
507        debug!(
508            "Completed processing file {} in {:?} with {} issues",
509            file_path.display(),
510            processing_time,
511            all_issues.len()
512        );
513
514        Ok((file_result, all_issues, all_recommendations))
515    }
516
517    /// Validate an internal link within the documentation.
518    fn validate_internal_link(&self, link: &crate::InternalLink, current_file: &Path) -> bool {
519        // Simple validation - check if target file exists
520        if link.target.starts_with("http://") || link.target.starts_with("https://") {
521            return true; // External links are not validated here
522        }
523
524        // Handle relative paths
525        let target_path = if link.target.starts_with('/') {
526            // Absolute path from docs root
527            self.config.docs_path.join(&link.target[1..])
528        } else {
529            // Relative path from current file
530            current_file.parent().unwrap_or(&self.config.docs_path).join(&link.target)
531        };
532
533        target_path.exists()
534    }
535
536    /// Create a processing error issue for a file.
537    fn create_processing_error_issue(&self, file_path: &Path, error: &AuditError) -> AuditIssue {
538        AuditIssue {
539            id: format!(
540                "processing-error-{}",
541                file_path.file_name().unwrap_or_default().to_string_lossy()
542            ),
543            file_path: file_path.to_path_buf(),
544            line_number: None,
545            column_number: None,
546            severity: IssueSeverity::Critical,
547            category: IssueCategory::ProcessingError,
548            message: format!("Failed to process file: {}", error),
549            suggestion: None,
550            context: None,
551            code_snippet: None,
552            related_issues: Vec::new(),
553        }
554    }
555
556    /// Discover all documentation files in the docs directory.
557    async fn discover_documentation_files(&self) -> Result<Vec<PathBuf>> {
558        let mut files = Vec::new();
559
560        if !self.config.docs_path.exists() {
561            warn!("Documentation directory does not exist: {}", self.config.docs_path.display());
562            return Ok(files);
563        }
564
565        for entry in WalkDir::new(&self.config.docs_path)
566            .follow_links(true)
567            .into_iter()
568            .filter_map(|e| e.ok())
569        {
570            let path = entry.path();
571            if self.is_documentation_file(path) {
572                files.push(path.to_path_buf());
573            }
574        }
575
576        debug!("Discovered {} documentation files", files.len());
577        Ok(files)
578    }
579
580    /// Check if a file is a documentation file (markdown).
581    fn is_documentation_file(&self, path: &Path) -> bool {
582        path.extension()
583            .and_then(|ext| ext.to_str())
584            .map(|ext| ext.eq_ignore_ascii_case("md") || ext.eq_ignore_ascii_case("markdown"))
585            .unwrap_or(false)
586    }
587
588    /// Check if a file should be skipped based on exclusion patterns.
589    fn should_skip_file(&self, path: &Path) -> bool {
590        let path_str = path.to_string_lossy();
591
592        for pattern in &self.config.excluded_files {
593            if glob_match::glob_match(pattern, &path_str) {
594                return true;
595            }
596        }
597
598        false
599    }
600
601    /// Create audit summary from file results and issues.
602    fn create_audit_summary(
603        &self,
604        file_results: &[FileAuditResult],
605        issues: &[AuditIssue],
606    ) -> AuditSummary {
607        let total_files = file_results.len();
608        let files_with_issues = file_results.iter().filter(|r| !r.issues.is_empty()).count();
609        let total_issues = issues.len();
610
611        let critical_issues =
612            issues.iter().filter(|i| i.severity == IssueSeverity::Critical).count();
613        let warning_issues = issues.iter().filter(|i| i.severity == IssueSeverity::Warning).count();
614        let info_issues = issues.iter().filter(|i| i.severity == IssueSeverity::Info).count();
615
616        let coverage_percentage = if total_files > 0 {
617            ((total_files - files_with_issues) as f64 / total_files as f64) * 100.0
618        } else {
619            100.0
620        };
621
622        let average_issues_per_file =
623            if total_files > 0 { total_issues as f64 / total_files as f64 } else { 0.0 };
624
625        AuditSummary {
626            total_files,
627            files_with_issues,
628            total_issues,
629            critical_issues,
630            warning_issues,
631            info_issues,
632            coverage_percentage,
633            average_issues_per_file,
634            most_common_issue: None,
635            problematic_files: Vec::new(),
636        }
637    }
638
639    /// Calculate SHA256 hash of a file for change detection.
640    fn calculate_file_hash(&self, file_path: &Path) -> Result<String> {
641        let content = fs::read(file_path).map_err(|e| AuditError::IoError {
642            path: file_path.to_path_buf(),
643            details: e.to_string(),
644        })?;
645
646        let mut hasher = Sha256::new();
647        hasher.update(&content);
648        let hash = hasher.finalize();
649        Ok(format!("{:x}", hash))
650    }
651
652    /// Get the last modified time of a file.
653    fn get_file_modified_time(&self, file_path: &Path) -> Result<chrono::DateTime<Utc>> {
654        let metadata = fs::metadata(file_path).map_err(|e| AuditError::IoError {
655            path: file_path.to_path_buf(),
656            details: e.to_string(),
657        })?;
658
659        let modified = metadata.modified().map_err(|e| AuditError::IoError {
660            path: file_path.to_path_buf(),
661            details: e.to_string(),
662        })?;
663
664        Ok(chrono::DateTime::from(modified))
665    }
666}
667
668// Simple glob matching implementation
669mod glob_match {
670    pub fn glob_match(pattern: &str, text: &str) -> bool {
671        // Simple implementation - in a real system you'd use a proper glob library
672        if pattern.contains('*') {
673            // Handle ** patterns (recursive directory matching)
674            if pattern.contains("**") {
675                let pattern = pattern.replace("**", "*");
676                return glob_match_simple(&pattern, text);
677            } else {
678                return glob_match_simple(pattern, text);
679            }
680        }
681
682        pattern == text
683    }
684
685    fn glob_match_simple(pattern: &str, text: &str) -> bool {
686        let parts: Vec<&str> = pattern.split('*').collect();
687
688        if parts.len() == 1 {
689            // No wildcards
690            return pattern == text;
691        }
692
693        if parts.len() == 2 {
694            // Single wildcard
695            let prefix = parts[0];
696            let suffix = parts[1];
697            return text.starts_with(prefix)
698                && text.ends_with(suffix)
699                && text.len() >= prefix.len() + suffix.len();
700        }
701
702        // Multiple wildcards - more complex matching
703        let mut text_pos = 0;
704
705        for (i, part) in parts.iter().enumerate() {
706            if part.is_empty() {
707                continue;
708            }
709
710            if i == 0 {
711                // First part must match at the beginning
712                if !text[text_pos..].starts_with(part) {
713                    return false;
714                }
715                text_pos += part.len();
716            } else if i == parts.len() - 1 {
717                // Last part must match at the end
718                return text[text_pos..].ends_with(part);
719            } else {
720                // Middle parts can match anywhere after current position
721                if let Some(pos) = text[text_pos..].find(part) {
722                    text_pos += pos + part.len();
723                } else {
724                    return false;
725                }
726            }
727        }
728
729        true
730    }
731}
732
733#[cfg(test)]
734mod tests {
735    use super::*;
736    use std::fs;
737    use tempfile::TempDir;
738
739    async fn create_test_orchestrator() -> (AuditOrchestrator, TempDir) {
740        let temp_dir = TempDir::new().unwrap();
741        let workspace_path = temp_dir.path().to_path_buf();
742        let docs_path = workspace_path.join("docs");
743
744        // Create basic directory structure
745        fs::create_dir_all(&docs_path).unwrap();
746
747        // Create a simple Cargo.toml
748        let cargo_toml = r#"
749[package]
750name = "test-crate"
751version = "0.1.0"
752edition = "2021"
753"#;
754        fs::write(workspace_path.join("Cargo.toml"), cargo_toml).unwrap();
755
756        let config = AuditConfig::builder()
757            .workspace_path(&workspace_path)
758            .docs_path(&docs_path)
759            .build()
760            .unwrap();
761
762        let orchestrator = AuditOrchestrator::new(config).await.unwrap();
763        (orchestrator, temp_dir)
764    }
765
766    #[tokio::test]
767    async fn test_orchestrator_creation() {
768        let (_orchestrator, _temp_dir) = create_test_orchestrator().await;
769        // If we get here, orchestrator was created successfully
770    }
771
772    #[tokio::test]
773    async fn test_discover_documentation_files() {
774        let (orchestrator, temp_dir) = create_test_orchestrator().await;
775
776        // Create some test files
777        let docs_path = temp_dir.path().join("docs");
778        fs::write(docs_path.join("test1.md"), "# Test 1").unwrap();
779        fs::write(docs_path.join("test2.markdown"), "# Test 2").unwrap();
780        fs::write(docs_path.join("not_docs.txt"), "Not docs").unwrap();
781
782        let files = orchestrator.discover_documentation_files().await.unwrap();
783
784        assert_eq!(files.len(), 2);
785        assert!(files.iter().any(|f| f.file_name().unwrap() == "test1.md"));
786        assert!(files.iter().any(|f| f.file_name().unwrap() == "test2.markdown"));
787    }
788
789    #[tokio::test]
790    async fn test_is_documentation_file() {
791        let (orchestrator, _temp_dir) = create_test_orchestrator().await;
792
793        assert!(orchestrator.is_documentation_file(Path::new("test.md")));
794        assert!(orchestrator.is_documentation_file(Path::new("test.markdown")));
795        assert!(orchestrator.is_documentation_file(Path::new("test.MD")));
796        assert!(!orchestrator.is_documentation_file(Path::new("test.txt")));
797        assert!(!orchestrator.is_documentation_file(Path::new("test.rs")));
798    }
799
800    #[tokio::test]
801    async fn test_should_skip_file() {
802        let temp_dir = TempDir::new().unwrap();
803        let workspace_path = temp_dir.path().to_path_buf();
804        let docs_path = workspace_path.join("docs");
805        fs::create_dir_all(&docs_path).unwrap();
806
807        // Create a simple Cargo.toml
808        let cargo_toml = r#"
809[package]
810name = "test-crate"
811version = "0.1.0"
812edition = "2021"
813"#;
814        fs::write(workspace_path.join("Cargo.toml"), cargo_toml).unwrap();
815
816        let config = AuditConfig::builder()
817            .workspace_path(&workspace_path)
818            .docs_path(&docs_path)
819            .exclude_files(vec!["**/internal/**".to_string(), "draft_*.md".to_string()])
820            .build()
821            .unwrap();
822
823        let orchestrator = AuditOrchestrator::new(config).await.unwrap();
824
825        assert!(orchestrator.should_skip_file(Path::new("docs/internal/secret.md")));
826        assert!(orchestrator.should_skip_file(Path::new("draft_feature.md")));
827        assert!(!orchestrator.should_skip_file(Path::new("docs/public.md")));
828    }
829
830    #[tokio::test]
831    async fn test_empty_incremental_audit() {
832        let (mut orchestrator, _temp_dir) = create_test_orchestrator().await;
833
834        let result = orchestrator.run_incremental_audit(&[]).await.unwrap();
835
836        assert_eq!(result.summary.total_files, 0);
837        assert_eq!(result.summary.total_issues, 0);
838        assert_eq!(result.file_results.len(), 0);
839    }
840}