use file_rotate::compression::Compression;
use file_rotate::suffix::{
AppendCount, AppendTimestamp, DateFrom, FileLimit, Representation, SuffixScheme,
};
use file_rotate::SuffixInfo;
use flate2::write::GzEncoder;
use parking_lot::Mutex;
use std::cell::UnsafeCell;
use std::collections::BTreeSet;
use std::fs::{self, File, OpenOptions};
use std::io;
use std::mem::transmute;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, SystemTime};
#[derive(Hash, Clone, Copy, PartialEq)]
pub enum Age {
Day,
Hour,
}
#[derive(Hash, Clone, Copy, PartialEq)]
pub struct ByAge {
pub age_type: Age,
pub use_last_time: bool,
}
#[derive(Hash, Clone, Copy, PartialEq)]
pub enum Upkeep {
Age(chrono::TimeDelta),
Count(usize),
All,
}
#[derive(Hash)]
pub struct Rotation {
pub by_age: Option<ByAge>,
pub by_size: Option<u64>,
pub time_fmt: Option<&'static str>,
pub upkeep: Upkeep,
pub archive_dir: Option<PathBuf>,
pub compress_exclude: Option<usize>,
}
impl Rotation {
pub fn by_size(size_limit: u64, max_files: Option<usize>) -> Self {
let upkeep =
if let Some(_max_files) = max_files { Upkeep::Count(_max_files) } else { Upkeep::All };
Self {
by_age: None,
by_size: Some(size_limit),
time_fmt: None,
upkeep,
archive_dir: None,
compress_exclude: None,
}
}
pub fn by_age(
age: Age, use_last_time: bool, time_fmt: &'static str, max_time: Option<chrono::TimeDelta>,
) -> Self {
let upkeep =
if let Some(_max_time) = max_time { Upkeep::Age(_max_time) } else { Upkeep::All };
Self {
by_age: Some(ByAge { age_type: age, use_last_time }),
by_size: None,
time_fmt: Some(time_fmt),
upkeep,
compress_exclude: None,
archive_dir: None,
}
}
pub fn by_age_and_size(
age: Age, size_limit: u64, use_last_time: bool, time_fmt: &'static str,
max_time: Option<chrono::TimeDelta>,
) -> Self {
let upkeep =
if let Some(_max_time) = max_time { Upkeep::Age(_max_time) } else { Upkeep::All };
Self {
by_age: Some(ByAge { age_type: age, use_last_time }),
by_size: Some(size_limit),
time_fmt: Some(time_fmt),
upkeep,
compress_exclude: None,
archive_dir: None,
}
}
pub fn compress_exclude(mut self, un_compress_files: usize) -> Self {
self.compress_exclude.replace(un_compress_files);
self
}
pub fn archive_dir<P: Into<PathBuf>>(mut self, archive_dir: P) -> Self {
self.archive_dir.replace(archive_dir.into());
self
}
pub(crate) fn build(&self, file_path: &Path) -> LogRotate {
assert!(
self.by_age.is_some() || self.by_size.is_some(),
"by_age and by_size can not be both None"
);
let archive_dir = if let Some(_dir) = &self.archive_dir {
_dir.clone()
} else {
file_path.parent().unwrap().to_path_buf()
};
let mut size = None;
let mut age = None;
let mut date_from = DateFrom::Now;
if let Some(by_age) = &self.by_age {
if by_age.use_last_time {
match by_age.age_type {
Age::Hour => {
date_from = DateFrom::DateHourAgo;
}
Age::Day => {
date_from = DateFrom::DateYesterday;
}
}
}
age.replace(LimiterAge::new(by_age.age_type));
}
if let Some(_size) = &self.by_size {
size.replace(LimiterSize::new(*_size));
}
let c = if let Some(compress) = &self.compress_exclude {
Compression::OnRotate(*compress)
} else {
Compression::None
};
let backend;
if let Some(time_fmt) = self.time_fmt {
let file_limit = match self.upkeep {
Upkeep::Age(d) => FileLimit::Age(d),
Upkeep::Count(c) => FileLimit::MaxFiles(c),
Upkeep::All => FileLimit::Unlimited,
};
let schema = AppendTimestamp { format: time_fmt, file_limit, date_from };
backend = Backend::Time(UnsafeCell::new(_Backend::new(
archive_dir.clone(),
file_path,
self.upkeep,
c,
schema,
)));
} else {
let file_limit = match self.upkeep {
Upkeep::Age(_) => 0,
Upkeep::Count(c) => c,
Upkeep::All => 0,
};
let schema = AppendCount::new(file_limit);
backend = Backend::Num(UnsafeCell::new(_Backend::new(
archive_dir.clone(),
file_path,
self.upkeep,
c,
schema,
)));
}
return LogRotate {
size_limit: size,
age_limit: age,
backend: Arc::new(backend),
th: Mutex::new(None),
};
}
}
pub(crate) struct LogRotate {
size_limit: Option<LimiterSize>,
age_limit: Option<LimiterAge>,
backend: Arc<Backend>,
th: Mutex<Option<thread::JoinHandle<()>>>,
}
impl LogRotate {
pub fn rotate<S: FileSinkTrait>(&self, sink: &S) -> bool {
let mut need_rotate = false;
if let Some(age) = self.age_limit.as_ref() {
if age.check(sink) {
need_rotate = true;
}
}
if let Some(size) = self.size_limit.as_ref() {
if size.check(sink) {
need_rotate = true;
}
}
if need_rotate == false {
return false;
}
self.wait();
self.backend.rename_files();
let backend = self.backend.clone();
let th = thread::spawn(move || {
let _ = backend.handle_old_files();
});
self.th.lock().replace(th);
true
}
pub fn wait(&self) {
if let Some(th) = self.th.lock().take() {
let _ = th.join();
}
}
}
pub(crate) struct LimiterSize {
limit: u64,
}
impl LimiterSize {
pub fn new(size: u64) -> Self {
Self { limit: size }
}
#[inline]
pub fn check<S: FileSinkTrait>(&self, sink: &S) -> bool {
return sink.get_size() > self.limit;
}
}
pub(crate) struct LimiterAge {
limit: Duration,
}
impl LimiterAge {
pub fn new(limit: Age) -> Self {
Self {
limit: match limit {
Age::Hour => Duration::from_secs(60 * 60),
Age::Day => Duration::from_secs(24 * 60 * 60),
},
}
}
pub fn check<S: FileSinkTrait>(&self, sink: &S) -> bool {
let now = SystemTime::now();
let start_ts = sink.get_create_time();
match now.duration_since(start_ts) {
Ok(d) => return d > self.limit,
Err(_) => return true, }
}
}
pub(crate) trait FileSinkTrait {
fn get_create_time(&self) -> SystemTime;
fn get_size(&self) -> u64;
}
enum Backend {
Num(UnsafeCell<_Backend<AppendCount>>),
Time(UnsafeCell<_Backend<AppendTimestamp>>),
}
unsafe impl Send for Backend {}
unsafe impl Sync for Backend {}
impl Backend {
fn rename_files(&self) {
match self {
Self::Num(_inner) => {
let inner: &mut _Backend<AppendCount> = unsafe { transmute(_inner.get()) };
inner.rename_files();
}
Self::Time(_inner) => {
let inner: &mut _Backend<AppendTimestamp> = unsafe { transmute(_inner.get()) };
inner.rename_files();
}
}
}
fn handle_old_files(&self) -> io::Result<()> {
match self {
Self::Num(_inner) => {
let inner: &mut _Backend<AppendCount> = unsafe { transmute(_inner.get()) };
inner.handle_old_files()
}
Self::Time(_inner) => {
let inner: &mut _Backend<AppendTimestamp> = unsafe { transmute(_inner.get()) };
inner.handle_old_files()
}
}
}
}
struct _Backend<S: SuffixScheme> {
archive_dir: PathBuf,
base_path: PathBuf, log_path: PathBuf, compress: Compression,
suffix_scheme: S,
suffixes: BTreeSet<SuffixInfo<S::Repr>>,
upkeep: Upkeep,
}
fn compress(path: &Path) -> io::Result<()> {
let dest_path = PathBuf::from(format!("{}.gz", path.display()));
let mut src_file = File::open(path)?;
let dest_file = OpenOptions::new().write(true).create(true).append(false).open(&dest_path)?;
assert!(path.exists());
assert!(dest_path.exists());
let mut encoder = GzEncoder::new(dest_file, flate2::Compression::default());
io::copy(&mut src_file, &mut encoder)?;
fs::remove_file(path)?;
Ok(())
}
impl<S: SuffixScheme> _Backend<S> {
fn new(
archive_dir: PathBuf, file: &Path, upkeep: Upkeep, compress: Compression, schema: S,
) -> Self {
let base_path = archive_dir.as_path().join(Path::new(file.file_name().unwrap()));
let mut s = Self {
archive_dir,
log_path: file.to_path_buf(),
base_path,
upkeep,
compress,
suffix_scheme: schema,
suffixes: BTreeSet::new(),
};
s.ensure_dir();
s.scan_suffixes();
s
}
#[inline]
fn ensure_dir(&self) {
if !self.archive_dir.exists() {
let _ = fs::create_dir_all(&self.archive_dir).expect("create dir");
}
}
#[inline]
fn scan_suffixes(&mut self) {
self.suffixes = self.suffix_scheme.scan_suffixes(&self.base_path);
}
#[inline]
fn rename_files(&mut self) {
self.ensure_dir();
let new_suffix_info = self._move_file_with_suffix(None).expect("move files");
self.suffixes.insert(new_suffix_info);
}
#[inline]
fn handle_old_files(&mut self) -> io::Result<()> {
let mut result = Ok(());
if let Upkeep::All = &self.upkeep {
} else {
let mut youngest_old = None;
for (i, suffix) in self.suffixes.iter().enumerate().rev() {
if self.suffix_scheme.too_old(&suffix.suffix, i) {
result = result.and(fs::remove_file(suffix.to_path(&self.base_path)));
youngest_old = Some((*suffix).clone());
} else {
break;
}
}
if let Some(youngest_old) = youngest_old {
let _ = self.suffixes.split_off(&youngest_old);
}
}
if let Compression::OnRotate(max_file_n) = self.compress {
let n = (self.suffixes.len() as i32 - max_file_n as i32).max(0) as usize;
let suffixes_to_compress = self
.suffixes
.iter()
.rev()
.take(n)
.filter(|info| !info.compressed)
.cloned()
.collect::<Vec<_>>();
for info in suffixes_to_compress {
let path = info.suffix.to_path(&self.base_path);
compress(&path)?;
self.suffixes.replace(SuffixInfo { compressed: true, ..info });
}
}
result
}
fn _move_file_with_suffix(
&mut self, old_suffix_info: Option<SuffixInfo<S::Repr>>,
) -> io::Result<SuffixInfo<S::Repr>> {
let newest_suffix = self.suffixes.iter().next().map(|info| &info.suffix);
let new_suffix = self.suffix_scheme.rotate_file(
&self.base_path,
newest_suffix,
&old_suffix_info.clone().map(|i| i.suffix),
)?;
let new_suffix_info = SuffixInfo {
suffix: new_suffix,
compressed: old_suffix_info.as_ref().map(|x| x.compressed).unwrap_or(false),
};
let new_path = new_suffix_info.to_path(&self.base_path);
let existing_suffix_info = self.suffixes.get(&new_suffix_info).cloned();
let newly_created_suffix = if let Some(existing_suffix_info) = existing_suffix_info {
self.suffixes.replace(new_suffix_info);
self._move_file_with_suffix(Some(existing_suffix_info))?
} else {
new_suffix_info
};
let old_path = match old_suffix_info {
Some(suffix) => suffix.to_path(&self.base_path),
None => self.log_path.clone(), };
assert!(old_path.exists());
assert!(!new_path.exists());
fs::rename(old_path, new_path)?;
Ok(newly_created_suffix)
}
}