#![deny(
missing_docs,
missing_debug_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unstable_features,
unused_import_braces,
unused_qualifications
)]
use anyhow::{bail, Context, Result};
use chrono::naive::NaiveDate;
use glob::glob;
use indexmap::IndexMap;
use log::warn;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::str::FromStr;
#[derive(Debug)]
pub struct Repository {
path: PathBuf,
pub description: RootDescription,
}
impl Repository {
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
Ok(Repository {
path: path.as_ref().to_path_buf(),
description: RootDescription::load_from_file(
Self::initial_brevdash_toml_path(path),
)?,
})
}
pub fn create<P: AsRef<Path>>(
path: P,
description: RootDescription,
) -> Result<Self> {
let r = Repository {
path: path.as_ref().to_path_buf(),
description,
};
r.store_description()?;
Ok(r)
}
pub fn store_description(&self) -> Result<()> {
self.description.store_to_file(self.brevdash_toml_path())
}
fn initial_brevdash_toml_path<P: AsRef<Path>>(p: P) -> PathBuf {
p.as_ref().join("brevdash.toml")
}
fn project_dir_path(&self, project_id: &str) -> PathBuf {
self.path.join(project_id)
}
fn project_toml_file_path(&self, project_id: &str) -> PathBuf {
self.project_dir_path(project_id).join("project.toml")
}
fn project_datapoint_directory_path(
&self,
project_id: &str,
date: NaiveDate,
) -> PathBuf {
self.project_dir_path(project_id)
.join(date.format("%Y-%m-%d").to_string())
}
fn project_datapoint_toml_file_path(
&self,
project_id: &str,
date: NaiveDate,
) -> PathBuf {
self.project_datapoint_directory_path(project_id, date)
.join("datapoint.toml")
}
fn project_datapoint_artifacts_directory_path(
&self,
project_id: &str,
date: NaiveDate,
) -> PathBuf {
self.project_datapoint_directory_path(project_id, date)
.join("artifacts")
}
pub fn project_datapoint_characteristic_artifacts_directory_path(
&self,
project_id: &str,
date: NaiveDate,
characteristic_id: &str,
) -> PathBuf {
self.project_datapoint_artifacts_directory_path(project_id, date)
.join(characteristic_id)
}
pub fn project_datapoint_characteristic_artifact_path(
&self,
project_id: &str,
date: NaiveDate,
characteristic_id: &str,
artifact_relative_path: &Path,
) -> PathBuf {
self.project_datapoint_characteristic_artifacts_directory_path(
project_id,
date,
characteristic_id,
)
.join(artifact_relative_path)
}
fn brevdash_toml_path(&self) -> PathBuf {
self.path.join("brevdash.toml")
}
fn extract_project_id(
project_toml_file_path: &Path,
) -> Result<String> {
let project_path =
project_toml_file_path.parent().with_context(|| {
format!(
"Couldn't extract parent directory of {:?}",
project_toml_file_path
)
})?;
let project_path_name_raw =
project_path.file_name().with_context(|| {
format!(
"Couldn't extract directory name of {:?}",
project_path
)
})?;
Ok(project_path_name_raw
.to_str()
.with_context(|| {
format!(
"Couldn't get project directory name, \
possibly invalid UTF-8: {:?}",
project_path_name_raw
)
})?
.to_string())
}
pub fn load_project_ids(&self) -> Result<Vec<String>> {
let pattern = format!("{}/*/project.toml", self.path.as_str()?);
let mut ids = Vec::new();
for entry in glob(&pattern).with_context(|| {
format!("Failed to read glob pattern {:?}", pattern)
})? {
match entry {
Ok(path) => match Self::extract_project_id(&path) {
Ok(id) => ids.push(id),
Err(e) => warn!("{:?}", e),
},
Err(e) => warn!("{:?}", e),
}
}
Ok(ids)
}
pub fn has_project(&self, project_id: &str) -> bool {
self.project_toml_file_path(project_id).exists()
}
pub fn store_project_description(
&self,
project_id: &str,
description: &ProjectDescription,
) -> Result<()> {
let project_dir_path = self.project_dir_path(&project_id);
std::fs::create_dir_all(&project_dir_path).with_context(|| {
format!(
"Couldn't create project directory {:?}",
project_dir_path
)
})?;
description.store_to_file(self.project_toml_file_path(project_id))
}
pub fn load_project_description(
&self,
project_id: &str,
) -> Result<ProjectDescription> {
ProjectDescription::load_from_file(
&self.project_toml_file_path(project_id),
)
}
pub fn load_project_descriptions(
&self,
) -> Result<BTreeMap<String, ProjectDescription>> {
let mut descriptions = BTreeMap::new();
for project_id in self.load_project_ids()?.into_iter() {
let description =
self.load_project_description(&project_id)?;
descriptions.insert(project_id, description);
}
Ok(descriptions)
}
pub fn project_has_datapoint_date(
&self,
project_id: &str,
date: NaiveDate,
) -> bool {
self.project_datapoint_toml_file_path(project_id, date)
.exists()
}
pub fn load_project_datapoint_dates(
&self,
project_id: &str,
) -> Result<Vec<NaiveDate>> {
let pattern = format!(
"{}/[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]/datapoint.toml",
self.project_dir_path(project_id).as_str()?
);
let mut dates = Vec::new();
for entry in glob(&pattern).with_context(|| {
format!("Failed to read glob pattern {:?}", pattern)
})? {
match entry {
Ok(path) => {
let full_date_path =
path.parent().with_context(|| {
format!(
"Couldn't get parent of file {:?}",
path
)
})?;
let date_raw =
full_date_path.file_name().with_context(|| {
format!(
"Couldn't get file name of {:?}",
full_date_path
)
})?;
let date_str = date_raw
.to_str()
.with_context(|| {
format!(
"Couldn't get file path string, \
possibly invalid UTF-8: {:?}",
path
)
})?
.to_string();
let date =
NaiveDate::parse_from_str(&date_str, "%Y-%m-%d")?;
dates.push(date);
}
Err(e) => {
warn!("{:?}", e);
}
}
}
Ok(dates)
}
pub fn store_project_datapoint(
&self,
project_id: &str,
date: NaiveDate,
datapoint: &DataPoint,
) -> Result<()> {
let project_toml_file_path =
self.project_toml_file_path(project_id);
if !project_toml_file_path.exists() {
bail!(
"Attempting to store datapoint for project \
{:?}, but no project.toml file is present",
project_id
);
}
let project_datapoint_directory_path =
self.project_datapoint_directory_path(project_id, date);
std::fs::create_dir_all(&project_datapoint_directory_path)
.with_context(|| {
format!(
"Couldn't create datapoint directory {:?}",
project_datapoint_directory_path
)
})?;
datapoint.store_to_file(
self.project_datapoint_toml_file_path(project_id, date),
)
}
pub fn load_project_datapoint(
&self,
project_id: &str,
date: NaiveDate,
) -> Result<DataPoint> {
DataPoint::load_from_file(
&self.project_datapoint_toml_file_path(project_id, date),
)
}
pub fn load_project_datapoints(
&self,
project_id: &str,
) -> Result<BTreeMap<NaiveDate, DataPoint>> {
let mut datapoints = BTreeMap::new();
for date in
self.load_project_datapoint_dates(project_id)?.into_iter()
{
let datapoint =
self.load_project_datapoint(project_id, date)?;
datapoints.insert(date, datapoint);
}
Ok(datapoints)
}
}
trait LoadFromFile: Sized {
fn load_from_file<P: AsRef<Path>>(path: P) -> Result<Self>;
}
impl<T: serde::de::DeserializeOwned> LoadFromFile for T {
fn load_from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let path = path.as_ref();
let s = std::fs::read_to_string(path).with_context(|| {
format!("Couldn't open file {:?}", path.to_path_buf())
})?;
let t: T = toml::from_str(&s).with_context(|| {
format!("Couldn't read file: {:?}", path.to_path_buf())
})?;
Ok(t)
}
}
trait StoreToFile {
fn store_to_file<P: AsRef<Path>>(&self, path: P) -> Result<()>;
}
impl<T: Serialize> StoreToFile for T {
fn store_to_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let path = path.as_ref();
let s = toml::to_string(&self).with_context(|| {
format!(
"Couldn't serialize data for file {:?}",
path.to_path_buf()
)
})?;
std::fs::write(&path, s).with_context(|| {
format!("Couldn't write file {:?}", path.to_path_buf())
})?;
Ok(())
}
}
trait PathAsStr {
fn as_str(&self) -> Result<&str>;
}
impl<P: AsRef<Path>> PathAsStr for P {
fn as_str(&self) -> Result<&str> {
let p = self.as_ref();
p.to_str().with_context(|| {
format!("Couldn't get path, possibly invalid UTF-8: {:?}", p)
})
}
}
pub type DataPoint = BTreeMap<String, DataEntry>;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct DataEntry {
pub value: DataValue,
#[serde(
default = "Default::default",
skip_serializing_if = "Vec::is_empty"
)]
pub artifacts: Vec<PathBuf>,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DataValue {
Boolean(bool),
Integer(i64),
}
impl FromStr for DataValue {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(v) = s.parse::<bool>() {
Ok(DataValue::Boolean(v))
} else if let Ok(v) = s.parse::<i64>() {
Ok(DataValue::Integer(v))
} else {
bail!("Couldn't parse argument {:?}", s);
}
}
}
impl DataValue {
pub fn unwrap_boolean(&self) -> bool {
self.boolean().unwrap()
}
pub fn unwrap_integer(&self) -> i64 {
self.integer().unwrap()
}
pub fn boolean(&self) -> Option<bool> {
if let DataValue::Boolean(v) = *self {
Some(v)
} else {
None
}
}
pub fn integer(&self) -> Option<i64> {
if let DataValue::Integer(v) = *self {
Some(v)
} else {
None
}
}
pub fn datatype(&self) -> DataType {
match self {
DataValue::Integer(_) => DataType::Integer,
DataValue::Boolean(_) => DataType::Boolean,
}
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum DataType {
Boolean,
Integer,
}
impl FromStr for DataType {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"boolean" => Ok(DataType::Boolean),
"integer" => Ok(DataType::Integer),
s => bail!("Couldn't parse argument {:?}", s),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CharacteristicDescription {
pub datatype: DataType,
pub name: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct RootDescription {
pub name: String,
pub characteristics: IndexMap<String, CharacteristicDescription>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ProjectDescription {
pub name: String,
#[serde(
default = "Default::default",
skip_serializing_if = "String::is_empty"
)]
pub description: String,
#[serde(
default = "Default::default",
skip_serializing_if = "String::is_empty"
)]
pub website: String,
#[serde(
default = "Default::default",
skip_serializing_if = "String::is_empty"
)]
pub vcs: String,
}