use anyhow::{anyhow, Context, Result};
use atty::Stream;
use chrono::{DateTime, Duration, Local, NaiveDate, NaiveDateTime, TimeZone, Utc};
use clap::Parser;
use human_panic::setup_panic;
use standard_paths::{LocationType, StandardPaths};
use std::{
fs::{self, File},
path::{Path, PathBuf},
};
use usage_tracker::*;
const PATH_CONVERT_ERROR: &str =
"could not convert file name for other error message. WTF have you done?!";
const JSON_FORMAT_ERROR: &str = "could not serialize JSON output";
#[derive(Debug, Parser)]
#[clap(about, author, version)]
struct Opt {
#[clap(subcommand)]
cmd: Commands,
#[clap(parse(from_os_str), verbatim_doc_comment)]
data_file: Option<PathBuf>,
#[clap(long)]
no_backup: bool,
}
#[derive(Debug, Parser)]
#[structopt(about)]
enum Commands {
Add {
name: String,
},
Clear {
#[clap(long = "i-am-sure")]
confirmation: bool,
},
List {
#[clap(long, short)]
verbose: bool,
},
Prune {
#[clap(short, long, parse(try_from_str = parse_date), verbatim_doc_comment)]
before: Option<DateTime<Utc>>,
name: String,
},
Remove {
name: String,
},
Show {
name: String,
},
Usage {
name: String,
duration: i64,
#[clap(verbatim_doc_comment)]
duration_type: char,
},
Use {
#[clap(long = "add")]
add_if_new: bool,
name: String,
},
}
fn main() -> Result<()> {
setup_panic!(Metadata {
authors: env!("CARGO_PKG_AUTHORS").into(),
homepage: env!("CARGO_PKG_HOMEPAGE").into(),
name: env!("CARGO_PKG_NAME").into(),
version: env!("CARGO_PKG_VERSION").into(),
});
let opt = Opt::parse();
let sp = StandardPaths::new("usage-tracker", "tfld");
let initial_info = match &opt.data_file {
Some(df) => load_from_file(&df)?,
None => load_from_default_files(&sp)?,
};
let mut info = initial_info.clone();
match opt.cmd {
Commands::Add { name } => info.add(&name)?,
Commands::Clear { confirmation } => {
if confirmation {
info.clear()
} else {
return Err(anyhow!("please confirm operation with `--i-am-sure`"));
}
}
Commands::List { verbose } => {
if info.list_verbose().len() == 0 {
return Err(anyhow!("no objects are currently tracked"));
}
if !verbose {
let data = info.list();
if atty::is(Stream::Stdout) {
for (i, k) in data.iter().enumerate() {
println!("{}: {}", i, k);
}
} else {
println!(
"{}",
serde_json::to_string(&data).context(JSON_FORMAT_ERROR)?
);
}
} else {
let data = info.list_verbose();
if atty::is(Stream::Stdout) {
for (i, (k, v)) in data.iter().enumerate() {
println!("{}: {}", i, k);
for u in v.list() {
println!(" {}", u.with_timezone(&Local));
}
}
} else {
let mut output = Vec::new();
for (k, v) in data.iter() {
output.push(serde_json::json!({"name": k, "usages": v.list()}));
}
println!(
"{}",
serde_json::to_string(&output).context(JSON_FORMAT_ERROR)?
);
}
}
}
Commands::Prune { before, name } => info.prune(&name, &before)?,
Commands::Remove { name } => info.remove(&name),
Commands::Show { name } => {
let data = (info.usages(&name)?).list();
if atty::is(Stream::Stdout) {
for u in data {
println!("{}", u.with_timezone(&Local));
}
} else {
println!(
"{}",
serde_json::to_string(&data).context(JSON_FORMAT_ERROR)?
);
}
}
Commands::Usage {
name,
duration,
duration_type,
} => {
let d = match duration_type {
'y' => Duration::days(duration * 365),
'M' => Duration::days(duration * 30),
'w' => Duration::weeks(duration),
'd' => Duration::days(duration),
'h' => Duration::hours(duration),
'm' => Duration::minutes(duration),
's' => Duration::seconds(duration),
_ => {
return Err(anyhow!("duration type '{}' doesn't exist", duration_type));
}
};
let data = info.usage(&name, &d)?;
if atty::is(Stream::Stdout) {
println!("{}", data);
} else {
println!("{}", serde_json::json!({ "value": data }));
}
}
Commands::Use { add_if_new, name } => info.record_use(&name, add_if_new)?,
}
if info != initial_info {
match &opt.data_file {
Some(df) => save_to_file(&info, &df, !opt.no_backup)?,
None => save_to_default_file(&info, !opt.no_backup, &sp)?,
}
}
Ok(())
}
fn load_from_default_files(sp: &StandardPaths) -> Result<UsageInformation> {
let path_base = sp
.writable_location(LocationType::AppDataLocation)
.context("application data directory not found")?;
let files = vec![("usages", true), ("default", false)];
for (name, is_json) in files {
let mut p = PathBuf::new();
p.push(&path_base);
p.push(name);
p.set_extension(match is_json {
true => "json",
false => "ron",
});
if !p.exists() {
continue;
}
if !p.is_file() {
return Err(anyhow!(
"found directory instead of file: {}",
p.to_str().context(PATH_CONVERT_ERROR)?
));
}
let file = File::open(Path::new(&p)).context(format!(
"could not open file: {}",
p.to_str().context(PATH_CONVERT_ERROR)?
))?;
return match is_json {
true => serde_json::from_reader(file).context(format!(
"could not parse JSON file: {}",
p.to_str().context(PATH_CONVERT_ERROR)?
)),
#[allow(deprecated)]
false => UsageInformation::load_usage_information_from_ron_file(file).context(format!(
"could not load data from RON file: {}",
p.to_str().context(PATH_CONVERT_ERROR)?
)),
};
}
Ok(UsageInformation::new())
}
fn load_from_file(path: &PathBuf) -> Result<UsageInformation> {
let fmt = match path.extension() {
Some(e) => match e.to_str().context("could not parse file name extension")? {
"json" => "JSON",
_ => {
return Err(anyhow!(
"\"{}\" is not a supported file format",
e.to_str().context(PATH_CONVERT_ERROR)?
))
}
},
None => return Err(anyhow!("file format not specified")),
};
if !path.exists() {
return Ok(UsageInformation::new());
}
let file = File::open(Path::new(&path)).context(format!(
"could not open file: {}",
path.to_str().context(PATH_CONVERT_ERROR)?
))?;
match fmt {
"JSON" => serde_json::from_reader(file),
_ => panic!("internal format value changed"),
}
.context(format!(
"could not parse {} file: {}",
fmt,
path.to_str().context(PATH_CONVERT_ERROR)?
))
}
fn parse_date(src: &str) -> Result<DateTime<Utc>> {
if src.len() == "dd.MM.yyyy".len() {
let d = NaiveDate::parse_from_str(src, "%d.%m.%Y")
.context(format!("could not parse local date: {}", src))?;
return Ok(Utc
.from_local_datetime(
&d.and_hms_opt(0, 0, 0)
.ok_or(anyhow!("could not convert to utc: {}", d))?,
)
.unwrap());
} else if src.len() == "yyyy-MM-ddThh:mm:ss".len() {
let dt: NaiveDateTime = src
.parse()
.context(format!("could not pares local datetime: {}", src))?;
let dtu = Local.from_local_datetime(&dt).unwrap();
return Ok(dtu.into());
} else {
return Ok(src
.parse()
.context(format!("could not parse datetime: {}", src))?);
}
}
fn save_to_default_file(ui: &UsageInformation, backup: bool, sp: &StandardPaths) -> Result<()> {
let mut path = sp
.writable_location(LocationType::AppDataLocation)
.context("application data directory not found")?;
path.push("usages");
path.set_extension("json");
save_to_file(ui, &path, backup)
}
fn save_to_file(ui: &UsageInformation, path: &PathBuf, backup: bool) -> Result<()> {
let fmt = match path.extension() {
Some(e) => match e.to_str().context("could not parse file name extension")? {
"json" => "JSON",
_ => {
return Err(anyhow!(
"\"{}\" is not a supported file format",
e.to_str().context(PATH_CONVERT_ERROR)?
))
}
},
None => return Err(anyhow!("file format not specified")),
};
if backup {
let mut backup_path = PathBuf::new();
backup_path.push(&path);
let backup_ext = backup_path
.extension()
.unwrap()
.to_str()
.unwrap()
.to_owned()
+ ".bak";
backup_path.set_extension(backup_ext);
if backup_path.exists() {
fs::remove_file(&backup_path).context("couldn't clear backup file path")?;
}
if path.exists() {
fs::rename(&path, &backup_path)
.context("couldn't move old data file to backup location")?;
}
}
if path.exists() {
fs::remove_file(&path).context("couldn't clear data file path")?;
}
let file = File::create(Path::new(&path)).context(format!(
"could not create file: {}",
path.to_str().context(PATH_CONVERT_ERROR)?
))?;
match fmt {
"JSON" => serde_json::to_writer_pretty(file, ui),
_ => panic!("internal format value changed"),
}
.context(format!(
"could not parse {} file: {}",
fmt,
path.to_str().context(PATH_CONVERT_ERROR)?
))
}