#![allow(clippy::single_char_lifetime_names)]
use std::{
borrow::Cow,
collections::HashMap,
convert::TryFrom,
fs::{self,},
io::{
self,
LineWriter,
Write,
},
ops::Deref,
path::{
Path,
PathBuf,
},
sync::Arc,
};
use parking_lot::Mutex;
use path_absolutize::Absolutize;
use tracing_subscriber::fmt::MakeWriter;
use crate::{
config::{
self,
AppenderId,
Policy,
},
env::expand_env_vars,
error::{
Error,
Result,
},
};
mod rolling;
use rolling::RollingFile;
#[cfg(test)]
mod test;
type AppenderMap = HashMap<AppenderId, Appender>;
#[derive(Clone)]
pub struct Appenders {
appenders: Arc<AppenderMap>,
}
impl<'a> IntoIterator for &'a Appenders {
type IntoIter = std::collections::hash_map::Values<'a, AppenderId, Appender>;
type Item = &'a Appender;
fn into_iter(self) -> Self::IntoIter {
self.appenders.as_ref().values()
}
}
impl Appenders {
pub fn new(m: AppenderMap) -> Self {
Self {
appenders: Arc::new(m),
}
}
pub fn correct_paths(&self) -> Result<()> {
for a in self {
a.correct_path()?;
}
Ok(())
}
pub fn flush(&self) -> Result<()> {
for a in self {
Appender::flush_io(a)?;
}
Ok(())
}
}
impl Deref for Appenders {
type Target = AppenderMap;
fn deref(&self) -> &Self::Target {
&*self.appenders
}
}
impl TryFrom<&HashMap<AppenderId, config::Appender>> for Appenders {
type Error = Error;
fn try_from(m: &HashMap<AppenderId, config::Appender>) -> Result<Self> {
let mut out = HashMap::new();
for (k, v) in m {
out.insert(k.clone(), v.try_into()?);
}
Ok(Self::new(out))
}
}
impl TryFrom<&config::Appender> for Appender {
type Error = Error;
fn try_from(value: &config::Appender) -> Result<Self> {
match value {
config::Appender::Null => Ok(crate::Appender::Null),
config::Appender::Console { .. } => Ok(crate::Appender::new_console()),
config::Appender::File { path, .. } => crate::Appender::new_file(path),
config::Appender::RollingFile {
path,
policy:
Policy {
max_size_roll_backups,
maximum_file_size,
pattern,
},
..
} => Appender::new_rolling(
path,
pattern.as_deref(),
*max_size_roll_backups as usize,
maximum_file_size,
),
}
}
}
#[derive(Clone)]
pub enum Appender {
Console(Console),
File(Arc<Mutex<File>>),
RollingFile(Arc<Mutex<RollingFile>>),
Null,
}
impl Appender {
#[must_use]
pub fn new_null() -> Self {
Self::Null
}
#[must_use]
pub fn new_console() -> Self {
Self::Console(Console::new())
}
pub fn new_file(p: impl AsRef<str>) -> Result<Self> {
Ok(Self::File(Arc::new(Mutex::new(File::new(p)?))))
}
pub fn new_rolling(
path_str: impl AsRef<str>,
pattern_opt: Option<&str>,
count: usize,
size: &str,
) -> Result<Self> {
use rolling::{
Roller,
Trigger,
};
let p = Path::new(path_str.as_ref());
let abs_path = p.absolutize().unwrap_or_else(|_| p.into());
let pattern = RollingFile::make_qualified_pattern(&abs_path, pattern_opt);
let abs_path_str = abs_path.to_string_lossy();
let trigger = Trigger::Size {
limit: config::Policy::calculate_maximum_file_size(size)?,
};
let roller = if count == 0 {
Roller::Delete
} else {
Roller::new_fixed(pattern, count)
};
Ok(Self::RollingFile(Arc::new(Mutex::new(RollingFile::new(
abs_path_str,
trigger,
roller,
)?))))
}
pub fn correct_path(&self) -> Result<()> {
match self {
Self::Null | Self::Console(_) => Ok(()),
Self::File(x) => {
let mut inner = x.lock();
inner
.correct_path()
.map_err(|e| Error::PathCorrectionFail(inner.path_str(), e))
},
Self::RollingFile(x) => {
let mut inner = x.lock();
inner
.correct_path()
.map_err(|e| Error::PathCorrectionFail(inner.path_str(), e))
},
}
}
pub fn flush_io(&self) -> Result<()> {
match self {
Self::Null | Self::Console(_) => Ok(()),
Self::File(x) => {
let mut inner = x.lock();
inner
.flush()
.map_err(|e| Error::FlushFail(inner.path_str(), e))
},
Self::RollingFile(x) => {
let mut inner = x.lock();
inner
.flush()
.map_err(|e| Error::FlushFail(inner.path_str(), e))
},
}
}
}
impl Default for Appender {
fn default() -> Self {
Self::Null
}
}
impl<'a> MakeWriter<'a> for Appender {
type Writer = Appender;
fn make_writer(&self) -> Self::Writer {
self.clone()
}
}
impl io::Write for Appender {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
Self::Console(x) => x.write(buf),
Self::File(x) => x.deref().lock().write(buf),
Self::RollingFile(x) => x.deref().lock().write(buf),
Self::Null => Ok(buf.len()),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
Self::Console(x) => x.flush(),
Self::File(x) => x.lock().flush(),
Self::RollingFile(x) => x.lock().flush(),
Self::Null => Ok(()),
}
}
}
#[derive(Clone, Default)]
pub struct Console;
impl Console {
pub fn new() -> Self {
Self::default()
}
}
impl io::Write for Console {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
io::stdout().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
pub struct File {
path: PathBuf,
writer: LineWriter<fs::File>,
}
impl File {
pub fn new(p: impl AsRef<str>) -> Result<Self> {
let path = PathBuf::from(expand_env_vars(p.as_ref()).as_ref());
let parent: Cow<Path> = path
.parent()
.map_or_else(|| Cow::Owned(PathBuf::from("/")), Into::into);
fs::create_dir_all(&*parent).map_err(|source| Error::CreateFailed {
path: parent.deref().to_owned(),
source,
})?;
let writer = Self::new_writer(&path).map_err(|source| Error::CreateFailed {
path: path.clone(),
source,
})?;
Ok(Self { path, writer })
}
pub fn correct_path(&mut self) -> io::Result<()> {
let correct = fs::metadata(&self.path);
let existing = self.writer.get_ref().metadata();
if rolling::needs_remount(Some(existing), correct) {
self.remount()?;
}
Ok(())
}
pub fn path_str(&self) -> String {
self.path.to_string_lossy().to_string()
}
fn remount(&mut self) -> io::Result<()> {
self.writer.flush()?;
self.writer = Self::new_writer(&self.path)?;
Ok(())
}
fn new_writer(path: &Path) -> io::Result<LineWriter<fs::File>> {
let f = fs::File::options().append(true).create(true).open(path)?;
Ok(LineWriter::new(f))
}
}
impl io::Write for File {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.writer.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.writer.flush()
}
}