use super::download_file_async;
use super::download_to_string;
use crate::utils::datadir;
use serde_json::Value;
use std::path::PathBuf;
use std::thread::JoinHandle;
use anyhow::{bail, Result};
fn download_from_url_json(json_url: String, basedir: &std::path::Path) -> Result<()> {
let json_base: Value = serde_json::from_str(download_to_string(json_url.as_str())?.as_str())?;
let arr = json_base
.as_array()
.ok_or_else(|| anyhow::anyhow!("Expected JSON array of file URLs"))?;
let vresult: Vec<std::thread::JoinHandle<Result<bool>>> = arr
.iter()
.map(|url| -> Result<JoinHandle<Result<bool>>> {
let url_str = url
.as_str()
.ok_or_else(|| anyhow::anyhow!("Expected string URL"))?;
Ok(download_file_async(url_str.to_string(), basedir, true))
})
.collect::<Result<Vec<_>>>()?;
for jh in vresult {
jh.join().unwrap()?;
}
Ok(())
}
fn download_from_json(
v: &Value,
basedir: std::path::PathBuf,
baseurl: String,
overwrite: &bool,
thandles: &mut Vec<JoinHandle<Result<bool>>>,
) -> Result<()> {
if let Some(obj) = v.as_object() {
let r1: Vec<Result<()>> = obj
.iter()
.map(|(key, val)| -> Result<()> {
let pbnew = basedir.join(key);
if !pbnew.is_dir() {
std::fs::create_dir_all(pbnew.clone())?;
}
let mut newurl = baseurl.clone();
newurl.push_str(format!("/{key}").as_str());
download_from_json(val, pbnew, newurl, overwrite, thandles)?;
Ok(())
})
.filter(|res| res.is_err())
.collect();
if !r1.is_empty() {
bail!("Could not parse entries");
}
} else if let Some(arr) = v.as_array() {
let r2: Vec<Result<()>> = arr
.iter()
.map(|val| -> Result<()> {
download_from_json(val, basedir.clone(), baseurl.clone(), overwrite, thandles)?;
Ok(())
})
.filter(|res| res.is_err())
.collect();
if !r2.is_empty() {
bail!("could not parse array entries");
}
} else if let Some(s) = v.as_str() {
let mut newurl = baseurl;
newurl.push_str(format!("/{s}").as_str());
thandles.push(download_file_async(newurl, &basedir, *overwrite));
} else {
bail!("invalid json for downloading files??!!");
}
Ok(())
}
fn download_datadir(basedir: PathBuf, baseurl: String, overwrite: &bool) -> Result<()> {
if !basedir.is_dir() {
std::fs::create_dir_all(basedir.clone())?;
}
let mut fileurl = baseurl.clone();
fileurl.push_str("/files.json");
let json_base: Value = serde_json::from_str(download_to_string(fileurl.as_str())?.as_str())?;
let mut thandles: Vec<JoinHandle<Result<bool>>> = Vec::new();
download_from_json(&json_base, basedir, baseurl, overwrite, &mut thandles)?;
for jh in thandles {
jh.join().unwrap()?;
}
Ok(())
}
pub fn update_datafiles(dir: Option<PathBuf>, overwrite_if_exists: bool) -> Result<()> {
let downloaddir = match dir {
Some(d) => d,
None => datadir()?,
};
if downloaddir.metadata()?.permissions().readonly() {
bail!(
r#"
Data directory is read-only.
Try setting SATKIT_DATA environment
variable to a writeable directory and re-starting
"#
);
}
println!(
"Downloading data files to {}",
downloaddir.to_str().unwrap()
);
download_datadir(
downloaddir.clone(),
String::from("https://storage.googleapis.com/astrokit-astro-data"),
&overwrite_if_exists,
)?;
println!("Now downloading files that are regularly updated:");
println!(" Space Weather & Earth Orientation Parameters");
download_from_url_json(
String::from("https://storage.googleapis.com/astrokit-astro-data/files_refresh.json"),
&downloaddir,
)?;
println!(" Solar Cycle Forecast");
if let Err(e) = crate::solar_cycle_forecast::update() {
eprintln!("Warning: could not download solar cycle forecast: {e}");
}
Ok(())
}