1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
use crate::{config_handler::Config, Error, Result};
use bytes::Bytes;
use flexi_logger::{DeferredNow, Logger};
use log::{Log, Metadata, Record};
use pickledb::{PickleDb, PickleDbDumpPolicy};
use rand::{distributions::Standard, CryptoRng, Rng};
use serde::{de::DeserializeOwned, Serialize};
use std::io::Write;
use std::{fs, path::Path};
const NODE_MODULE_NAME: &str = "sn_node";
pub(crate) fn new_auto_dump_db<D: AsRef<Path>, N: AsRef<Path>>(
db_dir: D,
db_name: N,
) -> Result<PickleDb> {
let db_path = db_dir.as_ref().join(db_name);
match PickleDb::load_bin(db_path.clone(), PickleDbDumpPolicy::AutoDump) {
Ok(db) => Ok(db),
Err(_) => {
fs::create_dir_all(db_dir)?;
let mut db = PickleDb::new_bin(db_path.clone(), PickleDbDumpPolicy::AutoDump);
db.dump()?;
PickleDb::load_bin(db_path, PickleDbDumpPolicy::AutoDump).map_err(Error::PickleDb)
}
}
}
#[allow(dead_code)]
pub(crate) fn random_vec<R: CryptoRng + Rng>(rng: &mut R, size: usize) -> Vec<u8> {
rng.sample_iter(&Standard).take(size).collect()
}
pub(crate) fn serialise<T: Serialize>(data: &T) -> Result<Bytes> {
let serialised_data = bincode::serialize(data).map_err(Error::Bincode)?;
Ok(Bytes::copy_from_slice(serialised_data.as_slice()))
}
#[allow(unused)]
pub(crate) fn deserialise<T: DeserializeOwned>(bytes: &[u8]) -> Result<T> {
bincode::deserialize(bytes).map_err(Error::Bincode)
}
pub fn init_logging(config: &Config) {
let do_format = move |writer: &mut dyn Write, clock: &mut DeferredNow, record: &Record| {
let handle = std::thread::current();
write!(
writer,
"[{}] {} {} [{}:{}] {}",
handle
.name()
.unwrap_or(&format!("Thread-{:?}", handle.id())),
record.level(),
clock.now().to_rfc3339(),
record.file().unwrap_or_default(),
record.line().unwrap_or_default(),
record.args()
)
};
let level_filter = config.verbose().to_level_filter();
let module_log_filter = format!("{}={}", NODE_MODULE_NAME, level_filter.to_string());
let logger = Logger::with_env_or_str(module_log_filter)
.format(do_format)
.suppress_timestamp();
let logger = if let Some(log_dir) = config.log_dir() {
logger.log_to_file().directory(log_dir)
} else {
logger
};
if let Ok((logger, _)) = logger.build() {
let logger = LoggerWrapper(logger);
async_log::Logger::wrap(logger, || 5433)
.start(config.verbose().to_level_filter())
.unwrap_or(());
}
}
struct LoggerWrapper(Box<dyn Log>);
impl Log for LoggerWrapper {
fn enabled(&self, metadata: &Metadata) -> bool {
self.0.enabled(metadata)
}
fn log(&self, record: &Record) {
self.0.log(record)
}
fn flush(&self) {
self.0.flush();
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Command {
Shutdown,
}