1use crate::ui::style::edim;
2use crate::watch_files::WatchFiles;
3use crate::{Result, env};
4use chrono::{DateTime, Local, NaiveDateTime, TimeZone, Timelike};
5use itertools::Itertools;
6use miette::IntoDiagnostic;
7use notify::RecursiveMode;
8use std::collections::{BTreeMap, HashMap, HashSet};
9use std::fs;
10use std::io::{BufRead, BufReader, Seek, SeekFrom};
11use std::path::PathBuf;
12use std::time::Duration;
13use xx::regex;
14
15#[derive(Debug, clap::Args)]
17#[clap(
18 visible_alias = "l",
19 verbatim_doc_comment,
20 long_about = "\
21Displays logs for daemon(s)
22
23Shows logs from managed daemons. Logs are stored in the pitchfork logs directory
24and include timestamps for filtering.
25
26Examples:
27 pitchfork logs api Show last 100 lines for 'api'
28 pitchfork logs api worker Show logs for multiple daemons
29 pitchfork logs Show logs for all daemons
30 pitchfork logs api -n 50 Show last 50 lines
31 pitchfork logs api -n 0 Show all logs (no limit)
32 pitchfork logs api --tail Follow logs in real-time
33 pitchfork logs api --from '2024-01-15 10:00:00'
34 Show logs since a specific time
35 pitchfork logs api --to '2024-01-15 12:00:00'
36 Show logs until a specific time
37 pitchfork logs api --clear Delete logs for 'api'
38 pitchfork logs --clear Delete logs for all daemons"
39)]
40pub struct Logs {
41 id: Vec<String>,
43
44 #[clap(short, long)]
46 clear: bool,
47
48 #[clap(short, default_value = "100")]
52 n: usize,
53
54 #[clap(short, long)]
56 tail: bool,
57
58 #[clap(long)]
60 from: Option<String>,
61
62 #[clap(long)]
64 to: Option<String>,
65}
66
67impl Logs {
68 pub async fn run(&self) -> Result<()> {
69 if self.clear {
70 let ids = if self.id.is_empty() {
71 get_all_daemon_ids()?
73 } else {
74 self.id.clone()
75 };
76 for id in &ids {
77 let log_dir = env::PITCHFORK_LOGS_DIR.join(id);
78 let path = log_dir.join(format!("{}.log", id));
79 if path.exists() {
80 xx::file::create(&path)?;
81 }
82 }
83 return Ok(());
84 }
85
86 let from = self.from.as_ref().and_then(|s| parse_datetime(s).ok());
87 let to = self.to.as_ref().and_then(|s| parse_datetime(s).ok());
88
89 self.print_existing_logs(from, to)?;
90 if self.tail {
91 tail_logs(&self.id).await?;
92 }
93
94 Ok(())
95 }
96
97 fn print_existing_logs(
98 &self,
99 from: Option<DateTime<Local>>,
100 to: Option<DateTime<Local>>,
101 ) -> Result<()> {
102 let log_files = get_log_file_infos(&self.id)?;
103 trace!("log files for: {}", log_files.keys().join(", "));
104 let log_lines = log_files
105 .iter()
106 .flat_map(|(name, lf)| {
107 let rev = match xx::file::open(&lf.path) {
108 Ok(f) => rev_lines::RevLines::new(f),
109 Err(e) => {
110 error!("{}: {}", lf.path.display(), e);
111 return vec![];
112 }
113 };
114 let lines = rev.into_iter().filter_map(Result::ok);
115 let lines = if self.n == 0 {
116 lines.collect_vec()
117 } else {
118 lines.take(self.n).collect_vec()
119 };
120 merge_log_lines(name, lines)
121 })
122 .filter(|(date, _, _)| {
123 if let Ok(dt) = parse_datetime(date) {
124 if let Some(from) = from
125 && dt < from
126 {
127 return false;
128 }
129 if let Some(to) = to
130 && dt > to
131 {
132 return false;
133 }
134 true
135 } else {
136 true }
138 })
139 .sorted_by_cached_key(|l| l.0.to_string());
140
141 let log_lines = if self.n == 0 {
142 log_lines.collect_vec()
143 } else {
144 log_lines.rev().take(self.n).rev().collect_vec()
145 };
146
147 for (date, id, msg) in log_lines {
148 if self.id.len() == 1 {
149 println!("{} {}", edim(&date), msg);
150 } else {
151 println!("{} {} {}", edim(&date), id, msg);
152 }
153 }
154 Ok(())
155 }
156}
157
158fn merge_log_lines(id: &str, lines: Vec<String>) -> Vec<(String, String, String)> {
159 lines.into_iter().fold(vec![], |mut acc, line| {
160 match regex!(r"^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) (\w)+ (.*)$").captures(&line) {
161 Some(caps) => {
162 let (date, msg) = match (caps.get(1), caps.get(3)) {
163 (Some(d), Some(m)) => (d.as_str().to_string(), m.as_str().to_string()),
164 _ => return acc, };
166 acc.push((date, id.to_string(), msg));
167 acc
168 }
169 None => {
170 if let Some(l) = acc.last_mut() {
171 l.2.push_str(&line)
172 }
173 acc
174 }
175 }
176 })
177}
178
179fn get_all_daemon_ids() -> Result<Vec<String>> {
180 Ok(xx::file::ls(&*env::PITCHFORK_LOGS_DIR)?
181 .into_iter()
182 .filter(|d| !d.starts_with("."))
183 .filter(|d| d.is_dir())
184 .filter_map(|d| d.file_name().map(|f| f.to_string_lossy().to_string()))
185 .collect())
186}
187
188fn get_log_file_infos(names: &[String]) -> Result<BTreeMap<String, LogFile>> {
189 let names = names.iter().collect::<HashSet<_>>();
190 xx::file::ls(&*env::PITCHFORK_LOGS_DIR)?
191 .into_iter()
192 .filter(|d| !d.starts_with("."))
193 .filter(|d| d.is_dir())
194 .filter_map(|d| d.file_name().map(|f| f.to_string_lossy().to_string()))
195 .filter(|n| names.is_empty() || names.contains(n))
196 .map(|n| {
197 let path = env::PITCHFORK_LOGS_DIR
198 .join(&n)
199 .join(format!("{n}.log"))
200 .canonicalize()
201 .into_diagnostic()?;
202 let mut file = xx::file::open(&path)?;
203 file.seek(SeekFrom::End(0)).into_diagnostic()?;
206 let cur = file.stream_position().into_diagnostic()?;
207 Ok((
208 n.clone(),
209 LogFile {
210 _name: n,
211 file,
212 cur,
213 path,
214 },
215 ))
216 })
217 .filter_ok(|(_, f)| f.path.exists())
218 .collect::<Result<BTreeMap<_, _>>>()
219}
220
221pub async fn tail_logs(names: &[String]) -> Result<()> {
222 let mut log_files = get_log_file_infos(names)?;
223 let mut wf = WatchFiles::new(Duration::from_millis(10))?;
224
225 for lf in log_files.values() {
226 wf.watch(&lf.path, RecursiveMode::NonRecursive)?;
227 }
228
229 let files_to_name = log_files
230 .iter()
231 .map(|(n, f)| (f.path.clone(), n.clone()))
232 .collect::<HashMap<_, _>>();
233
234 while let Some(paths) = wf.rx.recv().await {
235 let mut out = vec![];
236 for path in paths {
237 let Some(name) = files_to_name.get(&path) else {
238 warn!("Unknown log file changed: {}", path.display());
239 continue;
240 };
241 let Some(info) = log_files.get_mut(name) else {
242 warn!("No log info for: {name}");
243 continue;
244 };
245 info.file
246 .seek(SeekFrom::Start(info.cur))
247 .into_diagnostic()?;
248 let reader = BufReader::new(&info.file);
249 let lines = reader.lines().map_while(Result::ok).collect_vec();
250 info.cur = info.file.stream_position().into_diagnostic()?;
253 out.extend(merge_log_lines(name, lines));
254 }
255 let out = out
256 .into_iter()
257 .sorted_by_cached_key(|l| l.0.to_string())
258 .collect_vec();
259 for (date, name, msg) in out {
260 println!("{} {} {}", edim(&date), name, msg);
261 }
262 }
263 Ok(())
264}
265
266struct LogFile {
267 _name: String,
268 path: PathBuf,
269 file: fs::File,
270 cur: u64,
271}
272
273fn parse_datetime(s: &str) -> Result<DateTime<Local>> {
274 let naive_dt = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S").into_diagnostic()?;
275 Local
276 .from_local_datetime(&naive_dt)
277 .single()
278 .ok_or_else(|| miette::miette!("Invalid or ambiguous datetime: '{}'. ", s))
279}
280
281pub fn print_logs_for_time_range(
284 daemon_id: &str,
285 from: DateTime<Local>,
286 to: Option<DateTime<Local>>,
287) -> Result<()> {
288 let daemon_ids = vec![daemon_id.to_string()];
289 let log_files = get_log_file_infos(&daemon_ids)?;
290
291 let from = from
295 .with_nanosecond(0)
296 .expect("0 is always valid for nanoseconds");
297 let to = to.map(|t| {
298 t.with_nanosecond(0)
299 .expect("0 is always valid for nanoseconds")
300 });
301
302 let log_lines = log_files
303 .iter()
304 .flat_map(|(name, lf)| {
305 let rev = match xx::file::open(&lf.path) {
306 Ok(f) => rev_lines::RevLines::new(f),
307 Err(e) => {
308 error!("{}: {}", lf.path.display(), e);
309 return vec![];
310 }
311 };
312 let lines = rev.into_iter().filter_map(Result::ok).collect_vec();
313 merge_log_lines(name, lines)
314 })
315 .filter(|(date, _, _)| {
316 if let Ok(dt) = parse_datetime(date) {
317 if dt < from {
319 return false;
320 }
321 if let Some(to) = to
322 && dt > to
323 {
324 return false;
325 }
326 true
327 } else {
328 true
329 }
330 })
331 .sorted_by_cached_key(|l| l.0.to_string())
332 .collect_vec();
333
334 if log_lines.is_empty() {
335 eprintln!(
336 "No logs found for daemon '{}' in the specified time range",
337 daemon_id
338 );
339 } else {
340 eprintln!("\n{} {} {}", edim("==="), edim("Error logs"), edim("==="));
341 for (date, _id, msg) in log_lines {
342 eprintln!("{} {}", edim(&date), msg);
343 }
344 eprintln!("{} {} {}\n", edim("==="), edim("End of logs"), edim("==="));
345 }
346
347 Ok(())
348}
349
350pub fn print_startup_logs(daemon_id: &str, from: DateTime<Local>) -> Result<()> {
353 let daemon_ids = vec![daemon_id.to_string()];
354 let log_files = get_log_file_infos(&daemon_ids)?;
355
356 let from = from
358 .with_nanosecond(0)
359 .expect("0 is always valid for nanoseconds");
360
361 let log_lines = log_files
362 .iter()
363 .flat_map(|(name, lf)| {
364 let rev = match xx::file::open(&lf.path) {
365 Ok(f) => rev_lines::RevLines::new(f),
366 Err(e) => {
367 error!("{}: {}", lf.path.display(), e);
368 return vec![];
369 }
370 };
371 let lines = rev.into_iter().filter_map(Result::ok).collect_vec();
372 merge_log_lines(name, lines)
373 })
374 .filter(|(date, _, _)| {
375 if let Ok(dt) = parse_datetime(date) {
376 dt >= from
377 } else {
378 true
379 }
380 })
381 .sorted_by_cached_key(|l| l.0.to_string())
382 .collect_vec();
383
384 if !log_lines.is_empty() {
385 eprintln!("\n{} {} {}", edim("==="), edim("Startup logs"), edim("==="));
386 for (date, _id, msg) in log_lines {
387 eprintln!("{} {}", edim(&date), msg);
388 }
389 eprintln!("{} {} {}\n", edim("==="), edim("End of logs"), edim("==="));
390 }
391
392 Ok(())
393}