pitchfork_cli/cli/
logs.rs

1use crate::ui::style::edim;
2use crate::watch_files::WatchFiles;
3use crate::{Result, env};
4use chrono::{DateTime, Local, NaiveDateTime, TimeZone, Timelike};
5use itertools::Itertools;
6use miette::IntoDiagnostic;
7use notify::RecursiveMode;
8use std::collections::{BTreeMap, HashMap, HashSet};
9use std::fs;
10use std::io::{BufRead, BufReader, Seek, SeekFrom};
11use std::path::PathBuf;
12use std::time::Duration;
13use xx::regex;
14
15/// Displays logs for daemon(s)
16#[derive(Debug, clap::Args)]
17#[clap(
18    visible_alias = "l",
19    verbatim_doc_comment,
20    long_about = "\
21Displays logs for daemon(s)
22
23Shows logs from managed daemons. Logs are stored in the pitchfork logs directory
24and include timestamps for filtering.
25
26Examples:
27  pitchfork logs api              Show last 100 lines for 'api'
28  pitchfork logs api worker       Show logs for multiple daemons
29  pitchfork logs                  Show logs for all daemons
30  pitchfork logs api -n 50        Show last 50 lines
31  pitchfork logs api -n 0         Show all logs (no limit)
32  pitchfork logs api --tail       Follow logs in real-time
33  pitchfork logs api --from '2024-01-15 10:00:00'
34                                  Show logs since a specific time
35  pitchfork logs api --to '2024-01-15 12:00:00'
36                                  Show logs until a specific time
37  pitchfork logs api --clear      Delete logs for 'api'
38  pitchfork logs --clear          Delete logs for all daemons"
39)]
40pub struct Logs {
41    /// Show only logs for the specified daemon(s)
42    id: Vec<String>,
43
44    /// Delete logs
45    #[clap(short, long)]
46    clear: bool,
47
48    /// Show N lines of logs
49    ///
50    /// Set to 0 to show all logs
51    #[clap(short, default_value = "100")]
52    n: usize,
53
54    /// Show logs in real-time
55    #[clap(short, long)]
56    tail: bool,
57
58    /// Show logs from this time (format: "YYYY-MM-DD HH:MM:SS")
59    #[clap(long)]
60    from: Option<String>,
61
62    /// Show logs until this time (format: "YYYY-MM-DD HH:MM:SS")
63    #[clap(long)]
64    to: Option<String>,
65}
66
67impl Logs {
68    pub async fn run(&self) -> Result<()> {
69        if self.clear {
70            let ids = if self.id.is_empty() {
71                // Clear all logs when no daemon specified
72                get_all_daemon_ids()?
73            } else {
74                self.id.clone()
75            };
76            for id in &ids {
77                let log_dir = env::PITCHFORK_LOGS_DIR.join(id);
78                let path = log_dir.join(format!("{}.log", id));
79                if path.exists() {
80                    xx::file::create(&path)?;
81                }
82            }
83            return Ok(());
84        }
85
86        let from = self.from.as_ref().and_then(|s| parse_datetime(s).ok());
87        let to = self.to.as_ref().and_then(|s| parse_datetime(s).ok());
88
89        self.print_existing_logs(from, to)?;
90        if self.tail {
91            tail_logs(&self.id).await?;
92        }
93
94        Ok(())
95    }
96
97    fn print_existing_logs(
98        &self,
99        from: Option<DateTime<Local>>,
100        to: Option<DateTime<Local>>,
101    ) -> Result<()> {
102        let log_files = get_log_file_infos(&self.id)?;
103        trace!("log files for: {}", log_files.keys().join(", "));
104        let log_lines = log_files
105            .iter()
106            .flat_map(|(name, lf)| {
107                let rev = match xx::file::open(&lf.path) {
108                    Ok(f) => rev_lines::RevLines::new(f),
109                    Err(e) => {
110                        error!("{}: {}", lf.path.display(), e);
111                        return vec![];
112                    }
113                };
114                let lines = rev.into_iter().filter_map(Result::ok);
115                let lines = if self.n == 0 {
116                    lines.collect_vec()
117                } else {
118                    lines.take(self.n).collect_vec()
119                };
120                merge_log_lines(name, lines)
121            })
122            .filter(|(date, _, _)| {
123                if let Ok(dt) = parse_datetime(date) {
124                    if let Some(from) = from
125                        && dt < from
126                    {
127                        return false;
128                    }
129                    if let Some(to) = to
130                        && dt > to
131                    {
132                        return false;
133                    }
134                    true
135                } else {
136                    true // Include lines without valid timestamps
137                }
138            })
139            .sorted_by_cached_key(|l| l.0.to_string());
140
141        let log_lines = if self.n == 0 {
142            log_lines.collect_vec()
143        } else {
144            log_lines.rev().take(self.n).rev().collect_vec()
145        };
146
147        for (date, id, msg) in log_lines {
148            if self.id.len() == 1 {
149                println!("{} {}", edim(&date), msg);
150            } else {
151                println!("{} {} {}", edim(&date), id, msg);
152            }
153        }
154        Ok(())
155    }
156}
157
158fn merge_log_lines(id: &str, lines: Vec<String>) -> Vec<(String, String, String)> {
159    lines.into_iter().fold(vec![], |mut acc, line| {
160        match regex!(r"^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) (\w)+ (.*)$").captures(&line) {
161            Some(caps) => {
162                let (date, msg) = match (caps.get(1), caps.get(3)) {
163                    (Some(d), Some(m)) => (d.as_str().to_string(), m.as_str().to_string()),
164                    _ => return acc, // Skip malformed lines
165                };
166                acc.push((date, id.to_string(), msg));
167                acc
168            }
169            None => {
170                if let Some(l) = acc.last_mut() {
171                    l.2.push_str(&line)
172                }
173                acc
174            }
175        }
176    })
177}
178
179fn get_all_daemon_ids() -> Result<Vec<String>> {
180    Ok(xx::file::ls(&*env::PITCHFORK_LOGS_DIR)?
181        .into_iter()
182        .filter(|d| !d.starts_with("."))
183        .filter(|d| d.is_dir())
184        .filter_map(|d| d.file_name().map(|f| f.to_string_lossy().to_string()))
185        .collect())
186}
187
188fn get_log_file_infos(names: &[String]) -> Result<BTreeMap<String, LogFile>> {
189    let names = names.iter().collect::<HashSet<_>>();
190    xx::file::ls(&*env::PITCHFORK_LOGS_DIR)?
191        .into_iter()
192        .filter(|d| !d.starts_with("."))
193        .filter(|d| d.is_dir())
194        .filter_map(|d| d.file_name().map(|f| f.to_string_lossy().to_string()))
195        .filter(|n| names.is_empty() || names.contains(n))
196        .map(|n| {
197            let path = env::PITCHFORK_LOGS_DIR
198                .join(&n)
199                .join(format!("{n}.log"))
200                .canonicalize()
201                .into_diagnostic()?;
202            Ok((
203                n.clone(),
204                LogFile {
205                    _name: n,
206                    file: xx::file::open(&path)?,
207                    // TODO: might be better to build the length when reading the file so we don't have gaps
208                    cur: xx::file::metadata(&path).into_diagnostic()?.len(),
209                    path,
210                },
211            ))
212        })
213        .filter_ok(|(_, f)| f.path.exists())
214        .collect::<Result<BTreeMap<_, _>>>()
215}
216
217pub async fn tail_logs(names: &[String]) -> Result<()> {
218    let mut log_files = get_log_file_infos(names)?;
219    let mut wf = WatchFiles::new(Duration::from_millis(10))?;
220
221    for lf in log_files.values() {
222        wf.watch(&lf.path, RecursiveMode::NonRecursive)?;
223    }
224
225    let files_to_name = log_files
226        .iter()
227        .map(|(n, f)| (f.path.clone(), n.clone()))
228        .collect::<HashMap<_, _>>();
229
230    while let Some(paths) = wf.rx.recv().await {
231        let mut out = vec![];
232        for path in paths {
233            let Some(name) = files_to_name.get(&path) else {
234                warn!("Unknown log file changed: {}", path.display());
235                continue;
236            };
237            let Some(info) = log_files.get_mut(name) else {
238                warn!("No log info for: {name}");
239                continue;
240            };
241            info.file
242                .seek(SeekFrom::Start(info.cur))
243                .into_diagnostic()?;
244            let reader = BufReader::new(&info.file);
245            let lines = reader.lines().map_while(Result::ok).collect_vec();
246            info.cur += lines.iter().fold(0, |acc, l| acc + l.len() as u64);
247            out.extend(merge_log_lines(name, lines));
248        }
249        let out = out
250            .into_iter()
251            .sorted_by_cached_key(|l| l.0.to_string())
252            .collect_vec();
253        for (date, name, msg) in out {
254            println!("{} {} {}", edim(&date), name, msg);
255        }
256    }
257    Ok(())
258}
259
260struct LogFile {
261    _name: String,
262    path: PathBuf,
263    file: fs::File,
264    cur: u64,
265}
266
267fn parse_datetime(s: &str) -> Result<DateTime<Local>> {
268    let naive_dt = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S").into_diagnostic()?;
269    Local
270        .from_local_datetime(&naive_dt)
271        .single()
272        .ok_or_else(|| miette::miette!("Invalid or ambiguous datetime: '{}'. ", s))
273}
274
275/// Print logs for a specific daemon within a time range
276/// This is a public API used by other commands (like run/start) to show logs on failure
277pub fn print_logs_for_time_range(
278    daemon_id: &str,
279    from: DateTime<Local>,
280    to: Option<DateTime<Local>>,
281) -> Result<()> {
282    let daemon_ids = vec![daemon_id.to_string()];
283    let log_files = get_log_file_infos(&daemon_ids)?;
284
285    // Truncate 'from' to second precision to match log timestamp precision
286    // This ensures we include logs that occurred in the same second as the start time
287    // Note: with_nanosecond(0) cannot fail since 0 is always valid
288    let from = from
289        .with_nanosecond(0)
290        .expect("0 is always valid for nanoseconds");
291    let to = to.map(|t| {
292        t.with_nanosecond(0)
293            .expect("0 is always valid for nanoseconds")
294    });
295
296    let log_lines = log_files
297        .iter()
298        .flat_map(|(name, lf)| {
299            let rev = match xx::file::open(&lf.path) {
300                Ok(f) => rev_lines::RevLines::new(f),
301                Err(e) => {
302                    error!("{}: {}", lf.path.display(), e);
303                    return vec![];
304                }
305            };
306            let lines = rev.into_iter().filter_map(Result::ok).collect_vec();
307            merge_log_lines(name, lines)
308        })
309        .filter(|(date, _, _)| {
310            if let Ok(dt) = parse_datetime(date) {
311                // include logs at the exact start time
312                if dt < from {
313                    return false;
314                }
315                if let Some(to) = to
316                    && dt > to
317                {
318                    return false;
319                }
320                true
321            } else {
322                true
323            }
324        })
325        .sorted_by_cached_key(|l| l.0.to_string())
326        .collect_vec();
327
328    if log_lines.is_empty() {
329        eprintln!(
330            "No logs found for daemon '{}' in the specified time range",
331            daemon_id
332        );
333    } else {
334        eprintln!("\n{} {} {}", edim("==="), edim("Error logs"), edim("==="));
335        for (date, _id, msg) in log_lines {
336            eprintln!("{} {}", edim(&date), msg);
337        }
338        eprintln!("{} {} {}\n", edim("==="), edim("End of logs"), edim("==="));
339    }
340
341    Ok(())
342}
343
344/// Print startup logs for a daemon after successful start
345/// Similar to print_logs_for_time_range but with "Startup logs" header
346pub fn print_startup_logs(daemon_id: &str, from: DateTime<Local>) -> Result<()> {
347    let daemon_ids = vec![daemon_id.to_string()];
348    let log_files = get_log_file_infos(&daemon_ids)?;
349
350    // Truncate 'from' to second precision to match log timestamp precision
351    let from = from
352        .with_nanosecond(0)
353        .expect("0 is always valid for nanoseconds");
354
355    let log_lines = log_files
356        .iter()
357        .flat_map(|(name, lf)| {
358            let rev = match xx::file::open(&lf.path) {
359                Ok(f) => rev_lines::RevLines::new(f),
360                Err(e) => {
361                    error!("{}: {}", lf.path.display(), e);
362                    return vec![];
363                }
364            };
365            let lines = rev.into_iter().filter_map(Result::ok).collect_vec();
366            merge_log_lines(name, lines)
367        })
368        .filter(|(date, _, _)| {
369            if let Ok(dt) = parse_datetime(date) {
370                dt >= from
371            } else {
372                true
373            }
374        })
375        .sorted_by_cached_key(|l| l.0.to_string())
376        .collect_vec();
377
378    if !log_lines.is_empty() {
379        eprintln!("\n{} {} {}", edim("==="), edim("Startup logs"), edim("==="));
380        for (date, _id, msg) in log_lines {
381            eprintln!("{} {}", edim(&date), msg);
382        }
383        eprintln!("{} {} {}\n", edim("==="), edim("End of logs"), edim("==="));
384    }
385
386    Ok(())
387}