Skip to main content

lingora_core/audit/
engine.rs

1use std::{ffi::OsStr, path::PathBuf};
2
3use walkdir::WalkDir;
4
5use crate::{
6    audit::{AuditResult, Pipeline, Workspace},
7    config::LingoraToml,
8    error::LingoraError,
9    fluent::FluentFile,
10    rust::RustFile,
11};
12
13/// The main engine that drives the Lingora audit process.
14///
15/// `AuditEngine` is responsible for:
16/// 1. Discovering Fluent (`.ftl`) and Rust (`.rs`) files from configured paths
17/// 2. Building a `Workspace` model
18/// 3. Running the analysis `Pipeline` (parsing → document collection → classification → auditing)
19/// 4. Returning a complete `AuditResult` containing issues and classified documents
20#[derive(Debug)]
21pub struct AuditEngine {
22    workspace: Workspace,
23}
24
25impl AuditEngine {
26    /// Executes the full audit pipeline and returns the result.
27    ///
28    /// Steps performed (via `Pipeline`):
29    /// - Parse all Fluent and Rust files
30    /// - Aggregate entries into `FluentDocument`s per locale
31    /// - Classify documents as Canonical / Primary / Variant / Orphan
32    /// - Compare canonical vs targets (missing keys, redundants, signatures, etc.)
33    /// - Validate Rust macro usage against canonical identifiers
34    ///
35    /// Returns `Ok(AuditResult)` on success, even if issues are found (use `AuditResult::is_ok()` to check cleanliness).
36    pub fn run(&self) -> Result<AuditResult, LingoraError> {
37        let workspace = &self.workspace;
38
39        let fluent_files = workspace.fluent_files();
40        let rust_files = workspace.rust_files();
41
42        let canonical_locale = workspace.canonical_locale();
43        let primary_locales = Vec::from_iter(workspace.primary_locales().cloned());
44
45        let audit_result = Pipeline::default()
46            .parse_files(fluent_files, rust_files)?
47            .collect_documents_by_locale()
48            .classify_documents(canonical_locale, &primary_locales)
49            .audit()
50            .get_result(workspace);
51
52        Ok(audit_result)
53    }
54}
55
56impl TryFrom<&LingoraToml> for AuditEngine {
57    type Error = LingoraError;
58
59    fn try_from(settings: &LingoraToml) -> Result<Self, Self::Error> {
60        let fluent_files = collate_fluent_files(&settings.lingora.fluent_sources)?;
61
62        let canonical = settings.lingora.canonical.clone();
63        let primaries = settings.lingora.primaries.clone();
64
65        let rust_files = collate_rust_files(&settings.dioxus_i18n.rust_sources)?;
66
67        let workspace = Workspace::new(fluent_files, canonical, primaries, rust_files);
68
69        Ok(AuditEngine { workspace })
70    }
71}
72
73fn collate_fluent_files(fluent_paths: &[PathBuf]) -> Result<Vec<FluentFile>, LingoraError> {
74    collate_files(fluent_paths, "ftl")
75        .map(|p| FluentFile::try_from(p.as_path()))
76        .collect()
77}
78
79fn collate_rust_files(rust_paths: &[PathBuf]) -> Result<Vec<RustFile>, LingoraError> {
80    collate_files(rust_paths, "rs")
81        .map(|p| RustFile::try_from(p.as_path()))
82        .collect()
83}
84
85fn collate_files(paths: &[PathBuf], ext: &str) -> impl Iterator<Item = PathBuf> {
86    let ext = Some(OsStr::new(ext));
87    paths
88        .iter()
89        .fold(Vec::new(), |mut acc, path| {
90            if path.is_file() && path.extension() == ext {
91                acc.push(path.clone());
92            } else if path.is_dir() {
93                WalkDir::new(path)
94                    .into_iter()
95                    .filter_map(Result::ok)
96                    .filter_map(|e| {
97                        (e.file_type().is_file() && e.path().extension() == ext)
98                            .then_some(e.path().to_path_buf())
99                    })
100                    .for_each(|p| acc.push(p));
101            };
102
103            acc
104        })
105        .into_iter()
106}
107
108#[cfg(test)]
109mod test {
110    use std::path::Path;
111
112    use super::*;
113
114    #[test]
115    fn fluent_files_will_be_collated_from_provided_paths() {
116        let paths = &[Path::new("./tests/data/i18n").to_path_buf()];
117        let files = collate_fluent_files(paths).unwrap();
118
119        let expected_files = [
120            Path::new("./tests/data/i18n/en/en-GB.ftl"),
121            Path::new("./tests/data/i18n/en/en-AU.ftl"),
122            Path::new("./tests/data/i18n/fr/fr-FR.ftl"),
123            Path::new("./tests/data/i18n/it/it-IT.ftl"),
124            Path::new("./tests/data/i18n/sr-Cyrl/sr-Cyrl-RS.ftl"),
125            Path::new("./tests/data/i18n/sr-Cyrl/sr-Cyrl-BA.ftl"),
126        ]
127        .into_iter()
128        .map(|p| FluentFile::try_from(p).unwrap())
129        .collect::<Vec<_>>();
130
131        assert_eq!(files.len(), expected_files.len());
132
133        expected_files
134            .iter()
135            .for_each(|f| assert!(files.contains(f)));
136    }
137
138    #[test]
139    fn audit_engine_should_produce_a_report() {
140        let toml = LingoraToml::try_from(Path::new("./tests/data/toml/Lingora_audit_engine.toml"))
141            .unwrap();
142        let engine = AuditEngine::try_from(&toml).unwrap();
143        assert!(engine.run().is_ok());
144    }
145}