1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
use atom_syndication::Feed;
use hyper::Client;
use std::io::Read;
use std::thread;
use std::fs::{ File, create_dir };
use std::io::prelude::*;
const BOUND_VOL_URL: &'static str = "http://api.data.parliament.uk/resources/files/feed?dataset=14";
const BASE: &'static str = "./data/";
fn get_save_zip(url: String) -> thread::JoinHandle<()> {
thread::spawn(move || {
let mut zip_buf = Vec::new();
println!("Getting: {}", url);
if let Err(e) = Client::new()
.get(url.as_str())
.send().unwrap()
.read_to_end(&mut zip_buf) {
println!("Error: {:?}", e);
return;
}
let split_path = url.split("/").collect::<Vec<&str>>();
let file_name = split_path.last().unwrap();
println!("Saving:{}", file_name);
let mut file = File::create(format!("{}/{}", BASE, file_name)).unwrap();
file.write_all(zip_buf.as_slice()).unwrap();
})
}
pub fn retrieve() {
let mut atom_str = String::new();
Client::new()
.get(BOUND_VOL_URL)
.send().unwrap()
.read_to_string(&mut atom_str).unwrap();
let feed = atom_str.parse::<Feed>().unwrap();
if let Err(e) = create_dir(BASE) {
println!("{}", e);
}
let vol_urls = feed.entries.iter()
.map(|e| e.links.first().unwrap().href.clone())
.collect::<Vec<String>>();
let handles = vol_urls.iter()
.map(|url| get_save_zip(url.clone()))
.collect::<Vec<thread::JoinHandle<()>>>();
for h in handles {
h.join().unwrap();
}
}