Skip to main content

provenant/assembly/
mod.rs

1mod assemblers;
2#[cfg(test)]
3mod assembly_golden_test;
4#[cfg(test)]
5mod assembly_test;
6mod cargo_resource_assign;
7mod cargo_workspace_merge;
8mod composer_resource_assign;
9mod conda_rootfs_merge;
10pub mod file_ref_resolve;
11mod hackage_merge;
12mod nested_merge;
13mod npm_resource_assign;
14mod npm_workspace_merge;
15mod nuget_cpm_resolve;
16mod ruby_resource_assign;
17mod sibling_merge;
18mod swift_merge;
19
20use std::collections::{HashMap, HashSet};
21use std::path::PathBuf;
22use std::sync::LazyLock;
23
24use crate::models::{DatasourceId, FileInfo, Package, TopLevelDependency};
25
26pub use assemblers::ASSEMBLERS;
27
28type DirectoryMergeOutput = (Option<Package>, Vec<TopLevelDependency>, Vec<usize>);
29
30/// Pre-computed lookup: DatasourceId → config key (first DatasourceId in config).
31/// Built once on first use, avoiding HashMap allocation on every `assemble()` call.
32static ASSEMBLER_LOOKUP: LazyLock<HashMap<DatasourceId, DatasourceId>> = LazyLock::new(|| {
33    let mut lookup = HashMap::new();
34    for config in ASSEMBLERS {
35        let key = *config
36            .datasource_ids
37            .first()
38            .expect("assembler must have at least one datasource_id");
39        for &dsid in config.datasource_ids {
40            lookup.insert(dsid, key);
41        }
42    }
43    lookup
44});
45
46static ASSEMBLER_CONFIG_LOOKUP: LazyLock<HashMap<DatasourceId, &'static AssemblerConfig>> =
47    LazyLock::new(|| {
48        let mut lookup = HashMap::new();
49        for config in ASSEMBLERS {
50            let key = *config
51                .datasource_ids
52                .first()
53                .expect("assembler must have at least one datasource_id");
54            lookup.insert(key, config);
55        }
56        lookup
57    });
58
59/// Result of the assembly phase: top-level packages and dependencies,
60/// plus updated file-to-package associations.
61#[derive(serde::Serialize)]
62pub struct AssemblyResult {
63    pub packages: Vec<Package>,
64    pub dependencies: Vec<TopLevelDependency>,
65}
66
67/// How an assembler groups PackageData into Packages.
68#[derive(Debug, Clone, Copy, PartialEq, Eq)]
69pub enum AssemblyMode {
70    /// Merge related files in the same directory (or nested) into one Package.
71    SiblingMerge,
72    /// Each PackageData becomes its own independent Package (e.g., database files
73    /// containing many installed packages like Alpine DB, RPM DB, Debian status).
74    OnePerPackageData,
75}
76
77pub struct AssemblerConfig {
78    pub datasource_ids: &'static [DatasourceId],
79    pub sibling_file_patterns: &'static [&'static str],
80    pub mode: AssemblyMode,
81}
82
83/// Run the assembly phase over all scanned files.
84///
85/// Groups files by parent directory, finds related manifests/lockfiles,
86/// merges them into top-level `Package` objects, and hoists dependencies.
87/// Updates each `FileInfo.for_packages` with the UIDs of packages it belongs to.
88pub fn assemble(files: &mut [FileInfo]) -> AssemblyResult {
89    let assembler_lookup = &*ASSEMBLER_LOOKUP;
90    let assembler_config_lookup = &*ASSEMBLER_CONFIG_LOOKUP;
91    let mut packages = Vec::new();
92    let mut dependencies = Vec::new();
93
94    let dir_files = group_files_by_directory(files);
95
96    for file_indices in dir_files.values() {
97        let mut groups: HashSet<DatasourceId> = HashSet::new();
98
99        for &idx in file_indices {
100            for pkg_data in &files[idx].package_data {
101                if let Some(dsid) = pkg_data.datasource_id
102                    && let Some(&config_key) = assembler_lookup.get(&dsid)
103                {
104                    groups.insert(config_key);
105                }
106            }
107        }
108
109        for &config_key in &groups {
110            let config = assembler_config_lookup
111                .get(&config_key)
112                .copied()
113                .expect("assembler config must exist");
114
115            if let Some(special_merger) = assemblers::special_directory_merger_for(config_key) {
116                let results = special_merger.run(files, file_indices);
117                apply_directory_merge_results(files, &mut packages, &mut dependencies, results);
118                continue;
119            }
120
121            match config.mode {
122                AssemblyMode::SiblingMerge => {
123                    let results = sibling_merge::assemble_siblings(config, files, file_indices)
124                        .into_iter()
125                        .collect();
126                    apply_directory_merge_results(files, &mut packages, &mut dependencies, results);
127                }
128                AssemblyMode::OnePerPackageData => {
129                    let results = assemble_one_per_package_data(config, files, file_indices)
130                        .into_iter()
131                        .map(|(pkg, deps, affected_idx)| (Some(pkg), deps, vec![affected_idx]))
132                        .collect();
133                    apply_directory_merge_results(files, &mut packages, &mut dependencies, results);
134                }
135            }
136        }
137    }
138
139    for config in ASSEMBLERS {
140        if config.mode != AssemblyMode::SiblingMerge {
141            continue;
142        }
143        if let Some((pkg, deps, affected_indices)) =
144            nested_merge::assemble_nested_patterns(files, config)
145        {
146            let package_uid = pkg.package_uid.clone();
147            let purl = pkg.purl.clone();
148            let removed_package_uids: Vec<String> = packages
149                .iter()
150                .filter(|p| p.purl == purl)
151                .map(|p| p.package_uid.clone())
152                .collect();
153
154            packages.retain(|p| p.purl != purl);
155            dependencies.retain(|d| {
156                d.for_package_uid.as_ref() != Some(&package_uid)
157                    && !removed_package_uids
158                        .iter()
159                        .any(|old_uid| d.for_package_uid.as_ref() == Some(old_uid))
160            });
161
162            for idx in &affected_indices {
163                files[*idx].for_packages.clear();
164                files[*idx].for_packages.push(package_uid.clone());
165            }
166
167            packages.push(pkg);
168            dependencies.extend(deps);
169        }
170    }
171
172    assemblers::run_post_assembly_passes(files, &mut packages, &mut dependencies);
173
174    for package in &mut packages {
175        package.datafile_paths.sort();
176        package.datafile_paths.dedup();
177        package.datasource_ids.sort_by_key(|left| left.to_string());
178        package.datasource_ids.dedup();
179    }
180
181    for file in files.iter_mut() {
182        file.for_packages
183            .sort_by(|left, right| stable_uid_key(left).cmp(stable_uid_key(right)));
184        file.for_packages.dedup();
185    }
186
187    packages
188        .sort_by(|left, right| stable_package_sort_key(left).cmp(&stable_package_sort_key(right)));
189    dependencies.sort_by(|left, right| {
190        left.purl
191            .as_deref()
192            .cmp(&right.purl.as_deref())
193            .then_with(|| {
194                left.extracted_requirement
195                    .as_deref()
196                    .cmp(&right.extracted_requirement.as_deref())
197            })
198            .then_with(|| left.scope.as_deref().cmp(&right.scope.as_deref()))
199            .then_with(|| left.datafile_path.cmp(&right.datafile_path))
200            .then_with(|| {
201                left.datasource_id
202                    .to_string()
203                    .cmp(&right.datasource_id.to_string())
204            })
205            .then_with(|| {
206                left.for_package_uid
207                    .as_deref()
208                    .map(stable_uid_key)
209                    .cmp(&right.for_package_uid.as_deref().map(stable_uid_key))
210            })
211    });
212
213    AssemblyResult {
214        packages,
215        dependencies,
216    }
217}
218
219fn apply_directory_merge_results(
220    files: &mut [FileInfo],
221    packages: &mut Vec<Package>,
222    dependencies: &mut Vec<TopLevelDependency>,
223    results: Vec<DirectoryMergeOutput>,
224) {
225    for (package, deps, affected_indices) in results {
226        if let Some(package) = package {
227            let package_uid = package.package_uid.clone();
228            for idx in &affected_indices {
229                if !files[*idx].for_packages.contains(&package_uid) {
230                    files[*idx].for_packages.push(package_uid.clone());
231                }
232            }
233            packages.push(package);
234        }
235        dependencies.extend(deps);
236    }
237}
238
239fn stable_package_sort_key(package: &Package) -> (Option<&str>, Option<&str>, Option<&str>, &str) {
240    (
241        package.purl.as_deref(),
242        package.name.as_deref(),
243        package.version.as_deref(),
244        package
245            .datafile_paths
246            .first()
247            .map(String::as_str)
248            .unwrap_or(""),
249    )
250}
251
252fn stable_uid_key(uid: &str) -> &str {
253    uid.split_once("?uuid=")
254        .map(|(prefix, _)| prefix)
255        .or_else(|| uid.split_once("&uuid=").map(|(prefix, _)| prefix))
256        .unwrap_or(uid)
257}
258
259fn assemble_one_per_package_data(
260    config: &AssemblerConfig,
261    files: &[FileInfo],
262    file_indices: &[usize],
263) -> Vec<(Package, Vec<TopLevelDependency>, usize)> {
264    let mut results = Vec::new();
265
266    for &idx in file_indices {
267        let file = &files[idx];
268        for pkg_data in &file.package_data {
269            let dsid_matches = pkg_data
270                .datasource_id
271                .is_some_and(|dsid| config.datasource_ids.contains(&dsid));
272
273            if !dsid_matches || pkg_data.purl.is_none() {
274                continue;
275            }
276
277            let datafile_path = file.path.clone();
278            let datasource_id = pkg_data.datasource_id.expect("datasource_id must be Some");
279            let pkg = Package::from_package_data(pkg_data, datafile_path.clone());
280            let for_package_uid = Some(pkg.package_uid.clone());
281
282            let deps: Vec<TopLevelDependency> = pkg_data
283                .dependencies
284                .iter()
285                .filter(|dep| dep.purl.is_some())
286                .map(|dep| {
287                    TopLevelDependency::from_dependency(
288                        dep,
289                        datafile_path.clone(),
290                        datasource_id,
291                        for_package_uid.clone(),
292                    )
293                })
294                .collect();
295
296            results.push((pkg, deps, idx));
297        }
298    }
299
300    results
301}
302
303/// Group file indices by their parent directory path.
304fn group_files_by_directory(files: &[FileInfo]) -> HashMap<PathBuf, Vec<usize>> {
305    let mut groups: HashMap<PathBuf, Vec<usize>> = HashMap::new();
306    for (idx, file) in files.iter().enumerate() {
307        if let Some(parent) = std::path::Path::new(&file.path).parent() {
308            groups.entry(parent.to_path_buf()).or_default().push(idx);
309        }
310    }
311    groups
312}