#[cfg(feature = "with-binn-ir")]
extern crate binn_ir;
extern crate libc;
#[cfg(feature = "default")]
extern crate rusqlite;
#[macro_use]
#[allow(unused_macros)]
mod __;
pub mod version_info;
#[cfg(feature = "with-binn-ir")]
use std::{
fs::OpenOptions,
io::BufWriter,
collections::BTreeMap,
};
use std::{
fs,
io::{self, Error, ErrorKind},
path::{Path, PathBuf},
ptr,
sync::mpsc::{self, SyncSender},
thread,
time::{Duration, Instant, SystemTime},
};
#[cfg(feature = "with-binn-ir")]
use binn_ir::{
value::Value,
};
#[cfg(feature = "default")]
use rusqlite::{
Connection,
types::ToSql,
};
macro_rules! code_name { () => { "ice-age" }}
macro_rules! version { () => { "0.9.0" }}
pub const NAME: &'static str = "Ice Age";
pub const CODE_NAME: &'static str = code_name!();
pub const VERSION: &'static str = version!();
pub const RELEASE_DATE: (u16, u8, u8) = (2019, 3, 28);
pub const UUID: &'static str = "a9ab5574-e977-427d-a17b-ea421a1a6b5b";
pub const TAG: &'static str = concat!(code_name!(), "::a9ab5574::", version!());
#[test]
fn test_crate_version() {
assert_eq!(VERSION, env!("CARGO_PKG_VERSION"));
}
#[cfg(feature = "default")]
const SQL_CREATOR: &'static str = include_str!("../res/db_creator.sql");
#[cfg(feature = "default")]
const LOG_FILE_EXT: &'static str = ".sqlite";
#[cfg(feature = "with-binn-ir")]
const LOG_FILE_EXT: &'static str = ".ice-age";
#[derive(Debug)]
pub struct Config {
pub work_dir: PathBuf,
pub max_file_len: u64,
pub log_files_reserved: Duration,
pub buf_len: usize,
pub disk_flush_interval: Duration,
}
#[derive(Debug)]
pub struct Log {
pub time: u64,
pub remote_ip: String,
pub url: String,
pub response_size: Option<u64>,
pub code: u16,
pub runtime: Duration,
pub notes: Option<String>,
}
#[cfg(feature = "with-binn-ir")]
impl Log {
const KEY_TIME: i32 = 0;
const KEY_REMOTE_IP: i32 = 1;
const KEY_URL: i32 = 2;
const KEY_RESPONSE_SIZE: i32 = 3;
const KEY_CODE: i32 = 4;
const KEY_RUNTIME: i32 = 5;
const KEY_NOTES: i32 = 6;
const KEY_RUNTIME_SUBSEC_NANOS: i32 = 7;
pub fn to_value(self) -> Value {
Value::Map({
let mut map = BTreeMap::new();
map.insert(Self::KEY_TIME, self.time.into());
map.insert(Self::KEY_REMOTE_IP, self.remote_ip.into());
map.insert(Self::KEY_URL, self.url.into());
if let Some(response_size) = self.response_size {
map.insert(Self::KEY_RESPONSE_SIZE, response_size.into());
}
map.insert(Self::KEY_CODE, self.code.into());
map.insert(Self::KEY_RUNTIME, self.runtime.as_secs().into());
if let Some(notes) = self.notes {
map.insert(Self::KEY_NOTES, notes.into());
}
map.insert(Self::KEY_RUNTIME_SUBSEC_NANOS, self.runtime.subsec_nanos().into());
map
})
}
pub fn from_value(value: Value) -> io::Result<Self> {
match value {
Value::Map(mut map) => Ok(Self {
time: match map.remove(&Self::KEY_TIME).ok_or(Error::new(ErrorKind::InvalidInput, __!("Missing time")))? {
Value::U64(time) => time,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected U64 for time, got: {:?}", other))),
},
remote_ip: match map.remove(&Self::KEY_REMOTE_IP).ok_or(Error::new(ErrorKind::InvalidInput, __!("Missing remote IP")))? {
Value::Text(remote_ip) => remote_ip,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected Text for remote IP, got: {:?}", other))),
},
url: match map.remove(&Self::KEY_URL).ok_or(Error::new(ErrorKind::InvalidInput, __!("Missing URL")))? {
Value::Text(url) => url,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected Text for URL, got: {:?}", other))),
},
response_size: match map.remove(&Self::KEY_RESPONSE_SIZE) {
Some(Value::U64(response_size)) => Some(response_size),
None => None,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected U64 for response size, got: {:?}", other))),
},
code: match map.remove(&Self::KEY_CODE).ok_or(Error::new(ErrorKind::InvalidInput, __!("Missing code")))? {
Value::U16(code) => code,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected U16 for code, got: {:?}", other))),
},
runtime: match map.remove(&Self::KEY_RUNTIME).ok_or(Error::new(ErrorKind::InvalidInput, __!("Missing runtime")))? {
Value::U64(runtime) => match map.remove(&Self::KEY_RUNTIME_SUBSEC_NANOS) {
Some(Value::U32(subsec_nanos)) => Duration::from_nanos(runtime.saturating_add(subsec_nanos.into())),
Some(other) => return Err(Error::new(
ErrorKind::InvalidInput, __!("Expected U32 for runtime-subsec-nanos, got: {:?}", other)
)),
None => Duration::from_secs(runtime),
},
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected U64 for runtime, got: {:?}", other))),
},
notes: match map.remove(&Self::KEY_NOTES) {
Some(Value::Text(notes)) => Some(notes),
None => None,
other => return Err(Error::new(ErrorKind::InvalidInput, __!("Expected Text for notes, got: {:?}", other))),
},
}),
other => Err(Error::new(ErrorKind::InvalidInput, __!("Expected a map, got: {:?}", other))),
}
}
}
#[derive(Debug)]
pub enum Cmd {
StoreLog(Log),
FlushToDisk,
Ping,
}
pub type CmdSender = SyncSender<Cmd>;
pub struct Logger {
logs: Vec<Log>,
config: Config,
output: Option<PathBuf>,
}
impl Logger {
pub fn make(config: Config) -> io::Result<CmdSender> {
if config.disk_flush_interval.as_secs() == 0 {
return Err(Error::new(ErrorKind::InvalidInput, "Disk flush interval must be larger than zero"));
}
let disk_flush_interval = config.disk_flush_interval.clone();
let (sender, receiver) = mpsc::sync_channel(config.buf_len);
thread::spawn(move || {
let mut logger = Self {
logs: Vec::with_capacity(config.buf_len),
config,
output: None,
};
loop {
match receiver.recv() {
Ok(Cmd::StoreLog(log)) => logger.push(log),
Ok(Cmd::FlushToDisk) => match logger.flush_to_disk() {
Ok(()) => logger.clear_logs(),
Err(err) => __e!("{}", err),
},
Ok(Cmd::Ping) => (),
Err(err) => {
__e!("Failed to receive command: {} -> stopping server", err);
break;
},
};
}
});
{
let sender = sender.clone();
thread::spawn(move || {
let sleep_time = Duration::from_secs(disk_flush_interval.as_secs().min(10));
let mut last_saved = Instant::now();
loop {
let now = Instant::now();
match now - last_saved >= disk_flush_interval {
true => match sender.send(Cmd::FlushToDisk) {
Ok(()) => last_saved = Instant::now(),
Err(err) => {
__e!("Failed sending {:?} to server -> {}", Cmd::FlushToDisk, err);
break;
},
},
false => if let Err(err) = sender.send(Cmd::Ping) {
__e!("Failed sending {:?} to server -> {}", Cmd::Ping, err);
break;
},
};
thread::sleep(sleep_time);
}
});
}
Ok(sender)
}
pub fn push(&mut self, log: Log) {
self.logs.push(log);
if self.logs.len() >= self.logs.capacity() {
__p!("Buffer full; flushing to disk...");
if let Err(err) = self.flush_to_disk() {
__e!("Failed flushing logs to disk: {}", err);
}
self.logs.clear();
self.clean_up_old_log_files();
}
}
fn flush_to_disk(&mut self) -> io::Result<()> {
let need_new_file = match self.output.as_ref() {
Some(output) => output.metadata()?.len() >= self.config.max_file_len,
None => true,
};
if need_new_file {
let (year, month, day, hour, min, sec) = unsafe {
let localtime = libc::localtime(&libc::time(ptr::null_mut()));
match localtime.is_null() {
true => return Err(Error::new(ErrorKind::Other, __!("Failed to get local time"))),
false => (
(*localtime).tm_year.saturating_add(1900), (*localtime).tm_mon.saturating_add(1), (*localtime).tm_mday,
(*localtime).tm_hour, (*localtime).tm_min, (*localtime).tm_sec,
),
}
};
let path = self.config.work_dir.join(
format!("{:04}-{:02}-{:02}__{:02}-{:02}-{:02}{}", year, month, day, hour, min, sec, LOG_FILE_EXT)
);
match path.exists() {
true => return Err(Error::new(ErrorKind::Other, __!("Failed to create output file (it existed already): {:?}", path))),
false => self.output = Some(path),
}
}
if let Some(output) = self.output.clone() {
self.flush_to_file(output)?;
}
Ok(())
}
#[cfg(feature = "default")]
fn flush_to_file(&self, file: impl AsRef<Path>) -> io::Result<()> {
let file = file.as_ref();
let start_time = Instant::now();
let file_existed = file.exists();
let mut conn = Connection::open(file).map_err(|err|
Error::new(ErrorKind::Other, __!("Failed to open database connection: {}", err))
)?;
if file_existed == false {
conn.execute_batch(SQL_CREATOR).map_err(|err| Error::new(ErrorKind::Other, __!("Failed making new database: {}", err)))?;
}
let transaction = conn.transaction().map_err(|err|
Error::new(ErrorKind::Other, __!("Failed to make new database transaction: {}", err))
)?;
{
let mut statement = transaction.prepare_cached(
"insert into logs (time, remote_ip, url, response_size, code, runtime, runtime_millis, notes) values (?,?,?,?,?,?,?,?);"
).map_err(|err| {
Error::new(ErrorKind::Other, __!("Internal error: {}", err))
})?;
for log in self.logs.iter() {
let params: &[&ToSql] = &[
&(log.time as i64), &log.remote_ip, &log.url, &log.response_size.map(|s| s as i64), &log.code,
&(log.runtime.as_secs() as i64), &log.runtime.subsec_millis(), &(log.notes),
];
statement.execute(params).map_err(|err| Error::new(ErrorKind::Other, __!("Failed running SQL statement: {}", err)))?;
}
}
transaction.commit().map_err(|err| Error::new(ErrorKind::Other, __!("Failed to commit: {}", err)))?;
__p!(
"Flushed {} record{} to disk successfully, in {:?}",
self.logs.len(), match self.logs.len() { 1 => "", _ => "s" }, Instant::now().duration_since(start_time),
);
Ok(())
}
#[cfg(feature = "with-binn-ir")]
fn flush_to_file(&mut self, file: impl AsRef<Path>) -> io::Result<()> {
let file = file.as_ref();
let start_time = Instant::now();
let count = self.logs.len();
{
let mut writer = BufWriter::new(OpenOptions::new().append(true).create(true).open(file)?);
for i in (0..count).rev() {
self.logs.remove(i).to_value().encode(&mut writer)?;
}
}
__p!(
"Flushed {} record{} to disk successfully, in {:?}",
count, match count { 1 => "", _ => "s" }, Instant::now().duration_since(start_time),
);
Ok(())
}
fn clear_logs(&mut self) {
self.logs.clear();
}
fn clean_up_old_log_files(&self) {
let work_dir = self.config.work_dir.clone();
let log_files_reserved = self.config.log_files_reserved.clone();
thread::spawn(move || {
let read_dir = match fs::read_dir(&work_dir) {
Ok(read_dir) => read_dir,
Err(err) => {
__e!("Cleaning up old log files: failed to read work directory {:?} -> {}", work_dir, err);
return;
},
};
for dir_entry in read_dir {
if let Ok(dir_entry) = dir_entry {
match dir_entry.file_name().to_str() {
Some(file_name) if file_name.ends_with(LOG_FILE_EXT) => (),
_ => continue,
};
match dir_entry.metadata().map(|m| m.modified().map(|m| SystemTime::now().duration_since(m))) {
Ok(Ok(Ok(duration))) if duration > log_files_reserved => {
let path = dir_entry.path();
if path.is_file() == false {
continue;
}
match fs::remove_file(&path) {
Ok(()) => __p!("Removed old log file: {:?}", path),
Err(err) => __e!("Failed to remove old log file {:?} -> {}", path, err),
}
},
_ => continue,
};
}
}
});
}
}