1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
use crate::message;
use flate2::{read::GzDecoder, write::GzEncoder, Compression};
use regex::Regex;
use rust_apt::cache::{Cache as AptCache, PackageSort};
use serde::{Deserialize, Serialize};
use std::io::prelude::*;
use std::{collections::HashMap, fs, time::SystemTime};
///////////////////////////
// Stuff for MPR caches. //
///////////////////////////
#[derive(Deserialize, Serialize, PartialEq)]
pub struct MprPackage {
#[serde(rename = "Name")]
pub pkgname: String,
#[serde(rename = "PackageBase")]
pub pkgbase: String,
#[serde(rename = "Version")]
pub version: String,
#[serde(rename = "Description")]
pub pkgdesc: Option<String>,
#[serde(rename = "Maintainer")]
pub maintainer: Option<String>,
#[serde(rename = "NumVotes")]
pub num_votes: u32,
#[serde(rename = "Popularity")]
pub popularity: f32,
#[serde(rename = "OutOfDate")]
pub ood: Option<u32>,
}
pub struct MprCache {
pub packages: Vec<MprPackage>,
}
impl MprCache {
pub fn new(mpr_url: &str) -> MprCache {
// Get the XDG cache directory.
let cache_dir = match dirs::cache_dir() {
Some(dir) => dir,
None => {
message::error("Unable to find the xdg cache directory.");
quit::with_code(exitcode::UNAVAILABLE);
}
};
// Make sure the directory exists.
let mut mpr_cache_dir = cache_dir;
mpr_cache_dir.push("mpr-cli");
if !mpr_cache_dir.exists() {
match fs::create_dir_all(mpr_cache_dir.clone()) {
Ok(()) => (),
Err(err) => {
message::error(&format!(
"Encountered an unknown error while creating the cache directory. [{}]",
err
));
quit::with_code(exitcode::UNAVAILABLE);
}
}
} else if !mpr_cache_dir.is_dir() {
message::error(&format!(
"Cache path '{}' isn't a directory.",
mpr_cache_dir.display()
));
quit::with_code(exitcode::OSERR);
}
// Try reading the cache file. If it doesn't exist or it's older than five minutes, we have to
// update the cache file.
let mut mpr_cache_file = mpr_cache_dir;
mpr_cache_file.push("cache.gz");
let mut update_cache = false;
match fs::metadata(mpr_cache_file.clone()) {
// The file exists. Make sure it's been updated in the last five minutes.
Ok(metadata) => {
let five_minutes = 60 * 5; // The MPR updates package archives every five minutes.
let current_time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let file_last_modified = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if (current_time - file_last_modified) > five_minutes {
update_cache = true;
};
}
// The file doesn't exist. We need to create it.
Err(err) => {
if err.raw_os_error().unwrap() != 2 {
message::error(&format!(
"Encountered an unknown error while reading cache. [{}]",
err
));
quit::with_code(exitcode::OSFILE);
} else {
update_cache = true;
match fs::File::create(mpr_cache_file.clone()) {
Ok(_) => (),
Err(err) => {
message::error(&format!(
"Encountered an unknown error while reading cache. [{}]",
err
));
quit::with_code(exitcode::OSFILE);
}
}
}
}
};
// If we need to, update the cache file.
if update_cache {
// Download the archive.
let resp =
match reqwest::blocking::get(format!("{}/packages-meta-ext-v2.json.gz", mpr_url)) {
Ok(resp) => resp,
Err(err) => {
message::error(&format!("Unable to make request. [{}]", err));
quit::with_code(exitcode::UNAVAILABLE);
}
};
if !resp.status().is_success() {
message::error(&format!(
"Failed to download package archive from the MPR. [{}]",
resp.status()
));
quit::with_code(exitcode::TEMPFAIL);
}
// Decompress the archive.
let cache = match valid_archive(resp) {
Ok(cache) => cache,
Err(num) => {
if num == 1 {
message::error("Failed to decompress package archive from the MPR.");
quit::with_code(exitcode::TEMPFAIL);
} else {
message::error(
"Failed to verify integrity of package archive from the MPR.",
);
quit::with_code(exitcode::TEMPFAIL);
}
}
};
// Now that the JSON has been verified, let's write out the archive to the cache file.
let mut config_compressor = GzEncoder::new(Vec::new(), Compression::default());
config_compressor
.write_all(serde_json::to_string(&cache).unwrap().as_bytes())
.unwrap();
let config_gz = config_compressor.finish().unwrap();
match fs::write(mpr_cache_file, config_gz) {
Ok(()) => (),
Err(err) => {
message::error(&format!(
"Failed to write updated package archive. [{}]",
err
));
quit::with_code(exitcode::IOERR);
}
}
// Return the new cache object.
MprCache { packages: cache }
} else {
// The cache is less than 5 minutes old. We still need to validate that the cache is valid
// though.
let cache_file = match fs::File::open(mpr_cache_file.clone()) {
Ok(file) => file,
Err(err) => {
message::error(&format!(
"Failed to write updated package archive. [{}]",
err
));
quit::with_code(exitcode::IOERR);
}
};
match valid_archive(cache_file) {
Ok(file) => MprCache { packages: file },
Err(_) => {
// On an error, let's just remove the cache file and regenerate it by recalling
// this function.
fs::remove_file(mpr_cache_file).unwrap();
self::MprCache::new(mpr_url)
}
}
}
}
}
fn valid_archive(file: impl Read) -> Result<Vec<MprPackage>, u32> {
let mut resp_gz = GzDecoder::new(file);
let mut resp_json = String::new();
match resp_gz.read_to_string(&mut resp_json) {
Ok(_) => (),
Err(_) => return Err(1),
}
// Feed the JSON into our struct.
let cache = match serde_json::from_str::<Vec<MprPackage>>(&resp_json) {
Ok(json) => json,
Err(_) => return Err(2),
};
Ok(cache)
}
/////////////////////////////////////////////
// Stuff to handled shared APT/MPR caches. //
/////////////////////////////////////////////
//
// Some of these fields only make sense to one type of package, but this kind of cache allows us to
// combine both types when needed, such as when providing search results.
#[derive(PartialEq)]
pub enum CachePackageSource {
Apt,
Mpr,
}
#[derive(PartialEq)]
pub struct CachePackage {
pub pkgname: String,
pub pkgbase: Option<String>,
pub version: String,
pub pkgdesc: Option<String>,
pub arch: Option<String>,
pub maintainer: Option<String>,
pub num_votes: Option<u32>,
pub popularity: Option<f32>,
pub ood: Option<u32>,
pub current_state: Option<u8>,
pub source: CachePackageSource,
pub is_installed: Option<bool>,
}
pub struct Cache {
pub packages: Vec<CachePackage>,
_initialized: bool,
}
// Create a new cache.
impl Cache {
pub fn new(apt_cache: &AptCache, mpr_cache: &MprCache) -> Self {
let mut packages: Vec<CachePackage> = Vec::new();
// Add APT packages.
let re = Regex::new(r":.*$").unwrap();
for pkg in apt_cache.packages(&PackageSort::default().names()) {
// Foreign architecture have ':{arch}' appended to the package name, but we don't want
// that since pkg.arch() contains that needed information anyway.
let pkgname = re.replace(&pkg.name(), "").to_string();
let version = pkg.candidate().unwrap();
packages.push(CachePackage {
pkgname,
pkgbase: None,
version: version.version(),
pkgdesc: Some(version.summary()),
arch: Some(version.arch()),
maintainer: None,
num_votes: None,
popularity: None,
ood: None,
current_state: Some(pkg.current_state()),
is_installed: Some(pkg.is_installed()),
source: CachePackageSource::Apt,
});
}
// Add MPR packages.
for pkg in &mpr_cache.packages {
packages.push(CachePackage {
pkgname: pkg.pkgname.clone(),
pkgbase: Some(pkg.pkgbase.clone()),
version: pkg.version.clone(),
pkgdesc: pkg.pkgdesc.clone(),
arch: None,
maintainer: pkg.maintainer.clone(),
num_votes: Some(pkg.num_votes),
popularity: Some(pkg.popularity),
ood: pkg.ood,
current_state: None,
is_installed: None,
source: CachePackageSource::Mpr,
});
}
Cache {
packages,
_initialized: true,
}
}
// Get a list of unique pkgnames - if a package exists in both APT repos and the MPR, they'll
// be duplicated in the 'Cache.packages' list otherwise.
pub fn get_unique_pkgnames(&self) -> Vec<&String> {
let mut packages: Vec<&String> = Vec::new();
for pkg in &self.packages {
packages.push(&pkg.pkgname);
}
packages.sort_unstable();
packages.dedup();
packages
}
// Get a list of CachePackage objects that matche a certain pkgname.
pub fn package_map(&self) -> HashMap<&String, Vec<&CachePackage>> {
let mut packages: HashMap<&String, Vec<&CachePackage>> = HashMap::new();
for pkg in &self.packages {
match packages.get_mut(&&pkg.pkgname) {
Some(vec) => vec.push(pkg),
None => {
packages.insert(&pkg.pkgname, vec![pkg]);
}
}
}
packages
}
// See if a package is available via APT.
// package_map is available from the package_map() function above.
pub fn available_apt(
&self,
package_map: &HashMap<&String, Vec<&CachePackage>>,
pkgname: &String,
) -> bool {
match package_map.get(pkgname) {
Some(packages) => {
for pkg in packages {
match pkg.source {
CachePackageSource::Apt => return true,
_ => continue,
}
}
false
}
None => false,
}
}
// See if a package is available on the MPR.
// package_map is available from the package_map() function above.
pub fn available_mpr(
&self,
package_map: &HashMap<&String, Vec<&CachePackage>>,
pkgname: &String,
) -> bool {
match package_map.get(pkgname) {
Some(packages) => {
for pkg in packages {
match pkg.source {
CachePackageSource::Mpr => return true,
_ => continue,
}
}
false
}
None => false,
}
}
}