hrdf_routing_engine/isochrone/
externals.rs

1use std::env;
2use std::error::Error;
3use std::fs::{self, File};
4use std::io::{BufReader, Cursor};
5use std::path::Path;
6
7use bincode::config;
8use geo::{BooleanOps, MultiPolygon, Polygon};
9use geojson::{FeatureCollection, GeoJson};
10use serde::{Deserialize, Serialize};
11use sha2::{Digest, Sha256};
12use url::Url;
13
14#[cfg(feature = "hectare")]
15use zip::ZipArchive;
16
17use super::utils::lv95_to_wgs84;
18
19pub const LAKES_GEOJSON_URLS: [&str; 20] = [
20    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-baldegg.geojson",
21    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-biel.geojson",
22    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-brienz.geojson",
23    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-constance.geojson",
24    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-geneva.geojson",
25    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-hallwil.geojson",
26    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-lac-de-joux.geojson",
27    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-lucerne.geojson",
28    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-lugano.geojson",
29    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-maggiore.geojson",
30    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-morat.geojson",
31    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-neuchatel.geojson",
32    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-of-gruyere.geojson",
33    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-sarnen.geojson",
34    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-sempach.geojson",
35    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-sihl.geojson",
36    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-thun.geojson",
37    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-wagitalersee.geojson",
38    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-walensee.geojson",
39    "https://raw.githubusercontent.com/ZHB/switzerland-geojson/05cc91014860ddd8a6c1704f4a421f1e9b1f0080/lakes/lake-zurich.geojson",
40];
41
42fn parse_geojson_file(path: &str) -> Result<MultiPolygon, Box<dyn Error>> {
43    let file = File::open(path)?;
44    let reader = BufReader::new(file);
45
46    // Parse the GeoJSON file
47    let geojson: GeoJson = serde_json::from_reader(reader)?;
48
49    let polygons = FeatureCollection::try_from(geojson)?
50        .into_iter()
51        .filter_map(|feature| {
52            feature.geometry.and_then(|geometry| {
53                if let geojson::Value::Polygon(exteriors) = geometry.value {
54                    let polygons: MultiPolygon = exteriors
55                        .into_iter()
56                        .map(|exterior| {
57                            Polygon::new(
58                                exterior
59                                    .into_iter()
60                                    // The coordinates are inverted. It's normal
61                                    .map(|coords| (coords[1], coords[0]))
62                                    .collect(),
63                                vec![],
64                            )
65                        })
66                        .collect();
67                    Some(polygons)
68                } else {
69                    None
70                }
71            })
72        })
73        .fold(MultiPolygon::new(vec![]), |res, p| res.union(&p));
74    Ok(polygons)
75}
76
77pub struct ExcludedPolygons;
78
79impl ExcludedPolygons {
80    fn build_cache(multis: &MultiPolygon, path: &str) -> Result<(), Box<dyn Error>> {
81        let data = bincode::serde::encode_to_vec(multis, config::standard())?;
82        std::fs::write(path, data)?;
83        Ok(())
84    }
85
86    fn load_from_cache(path: &str) -> Result<MultiPolygon, Box<dyn Error>> {
87        let data = std::fs::read(path)?;
88        let (multis, _) = bincode::serde::decode_from_slice(&data, config::standard())?;
89        Ok(multis)
90    }
91
92    pub async fn try_new(
93        urls: &[&str],
94        force_rebuild_cache: bool,
95        cache_prefix: Option<String>,
96    ) -> Result<MultiPolygon, Box<dyn Error>> {
97        let cache_path = format!(
98            "{}/{:x}.cache",
99            cache_prefix.unwrap_or("./".to_string()),
100            Sha256::digest(
101                urls.iter()
102                    .fold(String::new(), |res, &s| res + s)
103                    .as_bytes(),
104            )
105        )
106        .replace("//", "/");
107
108        let multis = if !force_rebuild_cache && Path::new(&cache_path).exists() {
109            Self::load_from_cache(&cache_path)?
110        } else {
111            let mut multis = Vec::new();
112            for &url in urls {
113                let unique_filename = format!("{:x}", Sha256::digest(url.as_bytes()));
114
115                // The cache must be built.
116                // If cache loading has failed, the cache must be rebuilt.
117                let data_path = if Url::parse(url).is_ok() {
118                    let data_path = format!("/tmp/{unique_filename}");
119
120                    if !Path::new(&data_path).exists() {
121                        // The data must be downloaded.
122                        log::info!("Downloading GeoJson data to {data_path}...");
123                        let response = reqwest::get(url).await?;
124                        let mut file = std::fs::File::create(&data_path)?;
125                        let mut content = Cursor::new(response.bytes().await?);
126                        std::io::copy(&mut content, &mut file)?;
127                    }
128
129                    data_path
130                } else {
131                    url.to_string()
132                };
133
134                log::info!("Parsing ExcludedPolygons data from {data_path}...");
135                let local = parse_geojson_file(&data_path)?;
136
137                multis.push(local);
138            }
139
140            let multis = multis
141                .into_iter()
142                .fold(MultiPolygon::new(vec![]), |poly, p| poly.union(&p));
143            Self::build_cache(&multis, &cache_path)?;
144            multis
145        };
146
147        Ok(multis)
148    }
149}
150
151#[cfg(feature = "hectare")]
152#[derive(Debug, Serialize, Deserialize)]
153pub struct HectareData {
154    data: Vec<HectareRecord>,
155}
156
157#[cfg(feature = "hectare")]
158impl HectareData {
159    /// Loads and parses the data.
160    /// If an URL is provided, the data containing the population per hectare is loaded from the specified URL which is downloaded automatically.
161    /// If a path is provided, it must absolutely point to an valid archive (ZIP file).
162    /// The ZIP archive is automatically decompressed into the temp_dir of the OS folder.
163    pub async fn new(
164        url_or_path: &str,
165        force_rebuild_cache: bool,
166        cache_prefix: Option<String>,
167    ) -> Result<Self, Box<dyn Error>> {
168        let unique_filename = format!("{:x}", Sha256::digest(url_or_path.as_bytes()));
169        let cache_path = format!(
170            "{}/{unique_filename}.cache",
171            cache_prefix.unwrap_or(String::from("./"))
172        )
173        .replace("//", "/");
174
175        let hectare = if Path::new(&cache_path).exists() && !force_rebuild_cache {
176            // Loading from cache.
177            log::info!("Loading Hectare data from cache ({cache_path})...");
178
179            // If loading from cache fails, None is returned.
180            HectareData::load_from_cache(&cache_path).ok()
181        } else {
182            // No loading from cache.
183            None
184        };
185
186        let hectare = if let Some(hectare) = hectare {
187            // The cache has been loaded without error.
188            hectare
189        } else {
190            // The cache must be built.
191            // If cache loading has failed, the cache must be rebuilt.
192            let compressed_data_path = if Url::parse(url_or_path).is_ok() {
193                let compressed_data_path = env::temp_dir()
194                    .join(format!("{unique_filename}.zip"))
195                    .into_os_string()
196                    .into_string()
197                    .expect("Could not convert to string.");
198
199                if !Path::new(&compressed_data_path).exists() {
200                    // The data must be downloaded.
201                    log::info!("Downloading HECTARE data to {compressed_data_path}...");
202                    let response = reqwest::get(url_or_path).await?;
203                    let mut file = std::fs::File::create(&compressed_data_path)?;
204                    let mut content = Cursor::new(response.bytes().await?);
205                    std::io::copy(&mut content, &mut file)?;
206                }
207
208                compressed_data_path
209            } else {
210                url_or_path.to_string()
211            };
212
213            let decompressed_data_path = env::temp_dir()
214                .join(unique_filename)
215                .into_os_string()
216                .into_string()
217                .expect("Could not convert to string.");
218
219            if !Path::new(&decompressed_data_path).exists() {
220                // The data must be decompressed.
221                log::info!("Unzipping HECTARE archive into {decompressed_data_path}...");
222                let file = File::open(&compressed_data_path)?;
223                let mut archive = ZipArchive::new(BufReader::new(file))?;
224                archive.extract(&decompressed_data_path)?;
225            }
226
227            log::info!("Parsing HECTARE data from {decompressed_data_path}...");
228
229            let hectare = Self {
230                data: Self::parse(&decompressed_data_path)?,
231            };
232
233            log::info!("Building cache...");
234            hectare.build_cache(&cache_path)?;
235            hectare
236        };
237
238        Ok(hectare)
239    }
240
241    fn parse(decompressed_data_path: &str) -> Result<Vec<HectareRecord>, Box<dyn Error>> {
242        let path = format!("{decompressed_data_path}/ag-b-00.03-vz2023statpop/STATPOP2023.csv");
243        let file = File::open(path)?;
244
245        let mut rdr = csv::ReaderBuilder::new().delimiter(b';').from_reader(file);
246        rdr.records()
247            .map(|result| {
248                let record = result?;
249
250                let reli: u64 = record[2].parse()?;
251                let easting: f64 = record[3].parse()?;
252                let northing: f64 = record[4].parse()?;
253                let population: u64 = record[5].parse()?;
254
255                let (latitude, longitude) = lv95_to_wgs84(easting, northing);
256                // println!("{latitude}, {longitude}");
257                Ok(HectareRecord {
258                    reli,
259                    longitude,
260                    latitude,
261                    population,
262                    area: None,
263                })
264            })
265            .collect()
266    }
267
268    pub fn data(self) -> Vec<HectareRecord> {
269        self.data
270    }
271
272    fn build_cache(&self, path: &str) -> Result<(), Box<dyn Error>> {
273        let data = bincode::serde::encode_to_vec(self, config::standard())?;
274        fs::write(path, data)?;
275        Ok(())
276    }
277
278    fn load_from_cache(path: &str) -> Result<Self, Box<dyn Error>> {
279        let data = fs::read(path)?;
280        let (hrdf, _) = bincode::serde::decode_from_slice(&data, config::standard())?;
281        Ok(hrdf)
282    }
283}
284
285#[cfg(feature = "hectare")]
286#[derive(Debug, Clone, Serialize, Deserialize)]
287pub struct HectareRecord {
288    pub reli: u64,
289    pub longitude: f64,
290    pub latitude: f64,
291    pub population: u64,
292    pub area: Option<f64>,
293}