1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
use goblin::{self, Object};
use ignore::WalkBuilder;
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
pub struct MissingBlobs {
recursive: bool,
}
impl MissingBlobs {
pub fn new(recursive: bool) -> Self {
Self { recursive }
}
pub fn run(&self, paths: &[&str]) {
let file_paths: Vec<PathBuf> = if self.recursive {
find_files_recursively(&paths)
} else {
find_files(&paths)
};
let blob_paths: Vec<&PathBuf> = file_paths
.iter()
.filter(|path| match path.extension() {
Some(ext) => ext == "so",
None => false,
})
.collect();
let blobs_to_dependencies = get_dependencies(&blob_paths);
let missing_blobs = identify_missing(&blobs_to_dependencies);
display_missing_blobs(&missing_blobs);
}
}
fn find_files(paths: &[&str]) -> Vec<PathBuf> {
let dirs = paths
.iter()
.map(Path::new)
.filter(|path| path.is_dir())
.collect::<Vec<_>>();
let file_paths: Vec<PathBuf> = dirs
.iter()
.map(|dir| fs::read_dir(dir).expect("Could not read directory."))
.flat_map(|read_dir| {
read_dir.map(|dir_entry| dir_entry.expect("Could not read directory entry.").path())
})
.collect();
file_paths
}
fn find_files_recursively(paths: &[&str]) -> Vec<PathBuf> {
let mut walker = WalkBuilder::new(paths[0]);
for path in &paths[1..] {
walker.add(path);
}
walker
.ignore(false)
.git_ignore(false)
.git_exclude(false)
.git_global(false);
walker
.build()
.map(|dir_entry| {
dir_entry
.expect("Could not read directory entry.")
.into_path()
})
.collect()
}
fn get_dependencies(blob_paths: &[&PathBuf]) -> HashMap<String, Vec<String>> {
let mut dependencies: HashMap<String, Vec<String>> = HashMap::new();
blob_paths.iter().for_each(|path| {
let filename = path
.file_name()
.expect("Could not get file name.")
.to_str()
.expect("Could not convert to string.")
.to_owned();
let buffer = fs::read(&path).expect("Could not read path.");
let obj = goblin::Object::parse(&buffer);
if let Ok(Object::Elf(elf)) = obj {
let deps: Vec<String> = elf.libraries.iter().map(|dep| dep.to_string()).collect();
dependencies.insert(filename, deps);
}
});
dependencies
}
fn identify_missing(
blobs_to_dependencies: &HashMap<String, Vec<String>>,
) -> HashMap<String, Vec<String>> {
let mut dependencies_to_blobs: HashMap<String, Vec<String>> = HashMap::new();
blobs_to_dependencies.iter().for_each(|(blob, deps)| {
deps.iter().for_each(
|dependency| match dependencies_to_blobs.get_mut(dependency) {
Some(dependants) => {
dependants.push(blob.to_owned());
}
None => {
dependencies_to_blobs.insert(dependency.to_owned(), vec![blob.to_owned()]);
}
},
)
});
let mut missing_blobs: HashMap<String, Vec<String>> = HashMap::new();
for dep in dependencies_to_blobs.keys() {
if !blobs_to_dependencies.contains_key(dep) {
let missing_dep = dep.to_owned();
let blobs_requiring_missing_dep = dependencies_to_blobs[dep].to_owned();
missing_blobs.insert(missing_dep, blobs_requiring_missing_dep);
}
}
missing_blobs
}
fn display_missing_blobs(missing_blobs: &HashMap<String, Vec<String>>) {
for blob in missing_blobs.keys() {
println!("{} required by: {}", blob, missing_blobs[blob].join("; "));
}
}