progscrape_scrapers/backends/legacy/
mod.rs1use std::{
2 fs::File,
3 io::{BufRead, BufReader},
4 path::Path,
5};
6
7use super::TypedScrape;
8use thiserror::Error;
9
10#[derive(Debug, Error)]
11pub enum LegacyError {
12 #[error("I/O error")]
13 IOError(#[from] std::io::Error),
14 #[error("UTF8 error")]
15 UTF8Error(#[from] std::string::FromUtf8Error),
16 #[error("JSON error")]
17 JSONError(#[from] serde_json::Error),
18 #[error("Field was missing or invalid")]
19 MissingField,
20 #[error("Field {0} was missing or invalid ({1:?})")]
21 InvalidField(&'static str, Option<String>),
22 #[error("CBOR error")]
23 CBORError(#[from] serde_cbor::Error),
24}
25
26pub fn import_backup(file: &Path) -> Result<Vec<TypedScrape>, LegacyError> {
28 let mut f = BufReader::new(File::open(file)?);
29 let mut out: Vec<TypedScrape> = vec![];
30 'outer: loop {
31 let mut buf = vec![];
32 while !buf.ends_with("}\n".as_bytes()) {
33 let read = f.read_until(b'\n', &mut buf)?;
34 if read == 0 {
35 break 'outer;
36 }
37 }
38 let json = String::from_utf8(buf)?;
39 let scrape = serde_json::from_str(&json)?;
40 out.push(scrape);
41 }
42
43 Ok(out)
44}