aosp_missing_blobs/
lib.rs1use goblin::{self, Object};
2use ignore::WalkBuilder;
3use std::collections::HashMap;
4use std::fs;
5use std::path::{Path, PathBuf};
6
7pub struct MissingBlobs {
9 recursive: bool,
10}
11
12impl MissingBlobs {
13 pub fn builder() -> MissingBlobsBuilder {
15 MissingBlobsBuilder::default()
16 }
17
18 pub fn run(&self, paths: &[&str]) {
20 let file_paths: Vec<PathBuf> = if self.recursive {
21 find_files_recursively(&paths)
22 } else {
23 find_files(&paths)
24 };
25
26 let blob_paths: Vec<&PathBuf> = file_paths
27 .iter()
28 .filter(|path| match path.extension() {
29 Some(ext) => ext == "so",
31 None => false,
32 })
33 .collect();
34
35 let blobs_to_dependencies = get_dependencies(&blob_paths);
36 let missing_blobs = identify_missing(&blobs_to_dependencies);
37 display_missing_blobs(&missing_blobs);
38 }
39}
40
41pub struct MissingBlobsBuilder {
43 recursive: bool,
44}
45
46impl Default for MissingBlobsBuilder {
47 fn default() -> Self {
48 Self { recursive: false }
49 }
50}
51
52impl MissingBlobsBuilder {
53 pub fn build(&self) -> MissingBlobs {
55 MissingBlobs {
56 recursive: self.recursive,
57 }
58 }
59
60 pub fn recursive(mut self, enable: bool) -> Self {
62 self.recursive = enable;
63 self
64 }
65}
66
67fn find_files(paths: &[&str]) -> Vec<PathBuf> {
68 let dirs = paths
69 .iter()
70 .map(Path::new)
71 .filter(|path| path.is_dir())
72 .collect::<Vec<_>>();
73
74 let file_paths: Vec<PathBuf> = dirs
75 .iter()
76 .map(|dir| fs::read_dir(dir).expect("Could not read directory."))
77 .flat_map(|read_dir| {
78 read_dir.map(|dir_entry| dir_entry.expect("Could not read directory entry.").path())
79 })
80 .collect();
81
82 file_paths
83}
84
85fn find_files_recursively(paths: &[&str]) -> Vec<PathBuf> {
86 let mut walker = WalkBuilder::new(paths[0]);
87 for path in &paths[1..] {
88 walker.add(path);
89 }
90
91 walker
93 .ignore(false)
94 .git_ignore(false)
95 .git_exclude(false)
96 .git_global(false);
97
98 walker
99 .build()
100 .map(|dir_entry| {
101 dir_entry
102 .expect("Could not read directory entry.")
103 .into_path()
104 })
105 .collect()
106}
107
108fn get_dependencies(blob_paths: &[&PathBuf]) -> HashMap<String, Vec<String>> {
109 let mut dependencies: HashMap<String, Vec<String>> = HashMap::new();
110
111 blob_paths.iter().for_each(|path| {
112 let filename = path
113 .file_name()
114 .expect("Could not get file name.")
115 .to_str()
116 .expect("Could not convert to string.")
117 .to_owned();
118
119 let buffer;
120 match fs::read(&path) {
121 Ok(b) => buffer = b,
122 Err(_) => {
123 eprintln!("Warning: Could not read file: {}", path.display());
124 return;
125 },
126 }
127
128 let obj = goblin::Object::parse(&buffer);
129
130 if let Ok(Object::Elf(elf)) = obj {
131 let deps: Vec<String> = elf.libraries.iter().map(|dep| dep.to_string()).collect();
132 dependencies.insert(filename, deps);
133 }
134 });
135
136 dependencies
137}
138
139fn identify_missing(
140 blobs_to_dependencies: &HashMap<String, Vec<String>>,
141) -> HashMap<String, Vec<String>> {
142 let mut dependencies_to_blobs: HashMap<String, Vec<String>> = HashMap::new();
143 blobs_to_dependencies.iter().for_each(|(blob, deps)| {
144 deps.iter().for_each(
145 |dependency| match dependencies_to_blobs.get_mut(dependency) {
146 Some(dependants) => {
147 dependants.push(blob.to_owned());
148 }
149 None => {
150 dependencies_to_blobs.insert(dependency.to_owned(), vec![blob.to_owned()]);
151 }
152 },
153 )
154 });
155
156 let mut missing_blobs: HashMap<String, Vec<String>> = HashMap::new();
157
158 for dep in dependencies_to_blobs.keys() {
159 if !blobs_to_dependencies.contains_key(dep) {
160 let missing_dep = dep.to_owned();
162 let blobs_requiring_missing_dep = dependencies_to_blobs[dep].to_owned();
163 missing_blobs.insert(missing_dep, blobs_requiring_missing_dep);
164 }
165 }
166
167 missing_blobs
168}
169
170fn display_missing_blobs(missing_blobs: &HashMap<String, Vec<String>>) {
171 for blob in missing_blobs.keys() {
172 println!("{} required by: {}", blob, missing_blobs[blob].join("; "));
173 }
174}