ptth_relay 2.0.0

The PTTH relay
Documentation
// False positive with itertools::process_results
#![allow (clippy::redundant_closure)]

use std::{
	collections::HashMap,
	convert::{TryFrom},
	path::Path,
};

use crate::{
	errors::ConfigError,
	key_validity::{
		ScraperKey,
		Valid30Days,
	},
};

/// Config fields as they are loaded from the config file

pub mod file {
	use serde::Deserialize;
	
	use crate::key_validity::{
		BlakeHashWrapper,
		ScraperKey,
		Valid30Days,
	};
	
	#[derive (Deserialize)]
	pub struct Server {
		/// This is duplicated in the hashmap, but it's not a problem
		pub name: String,
		
		pub tripcode: BlakeHashWrapper,
		
		/// This allows a relay-side rename of servers
		pub display_name: Option <String>,
	}
	
	/// Empty
	
	#[derive (Deserialize)]
	pub struct DevMode {
		
	}
	
	/// Config fields that are identical in the file and at runtime
	
	#[derive (Default, Deserialize)]
	pub struct Isomorphic {
		#[serde (default)]
		pub enable_scraper_api: bool,
		
		/// If any of the `DevMode` fields are used, we are in dev mode 
		/// and have to show extra warnings, since auth may be weakened
		pub dev_mode: Option <DevMode>,
	}
	
	#[derive (Deserialize)]
	pub struct Config {
		#[serde (flatten)]
		pub iso: Isomorphic,
		
		pub port: Option <u16>,
		pub servers: Option <Vec <Server>>,
		
		// Adding a DB will take a while, so I'm moving these out of dev mode.
		pub scraper_keys: Option <Vec <ScraperKey <Valid30Days>>>,
	}
}

/// Config fields as they are used at runtime

pub struct Config {
	pub iso: file::Isomorphic,
	
	pub port: Option <u16>,
	pub servers: HashMap <String, file::Server>,
	pub scraper_keys: HashMap <String, ScraperKey <Valid30Days>>,
}

impl TryFrom <file::Config> for Config {
	type Error = ConfigError;
	
	fn try_from (f: file::Config) -> Result <Self, Self::Error> {
		let servers = f.servers.unwrap_or_else (|| vec! []);
		let servers = servers.into_iter ().map (|server| Ok::<_, ConfigError> ((server.name.clone (), server)));
		
		let servers = itertools::process_results (servers, |i| i.collect ())?;
		
		let scraper_keys = f.scraper_keys.unwrap_or_else (|| vec! []);
		let scraper_keys = if f.iso.enable_scraper_api {
			scraper_keys.into_iter ().map (|key| (key.hash.encode_base64 (), key)).collect ()
		}
		else {
			Default::default ()
		};
		
		Ok (Self {
			iso: f.iso,
			port: f.port,
			servers,
			scraper_keys,
		})
	}
}

impl Config {
	pub async fn from_file (path: &Path) -> Result <Self, ConfigError> {
		let config_s = tokio::fs::read_to_string (path).await?;
		let new_config: file::Config = toml::from_str (&config_s)?;
		
		Self::try_from (new_config)
	}
}