crabtime_internal/
lib.rs

1#![cfg_attr(nightly, feature(proc_macro_span))]
2#![cfg_attr(nightly, feature(proc_macro_diagnostic))]
3
4#![cfg_attr(not(nightly), allow(dead_code))]
5#![cfg_attr(not(nightly), allow(unused_macros))]
6#![cfg_attr(not(nightly), allow(unused_imports))]
7
8mod error;
9mod path;
10
11use error::*;
12
13use std::fmt::Debug;
14use proc_macro2::Delimiter;
15use proc_macro2::LineColumn;
16use proc_macro2::Span;
17use proc_macro2::TokenStream;
18use proc_macro2::TokenTree;
19use quote::ToTokens;
20use quote::quote;
21use std::fs::File;
22use std::fs;
23use std::io::Write;
24use std::path::Path;
25use std::path::PathBuf;
26use std::process::Command;
27use std::default::Default;
28use std::collections::hash_map::DefaultHasher;
29use std::hash::Hash;
30use std::hash::Hasher;
31
32// =================
33// === Constants ===
34// =================
35
36/// Set to 'true' to enable debug prints.
37const DEBUG: bool = false;
38
39const CRATE: &str = "crabtime";
40/// Module with utils functions in the generated project.
41const GEN_MOD: &str = CRATE;
42const DEFAULT_EDITION: &str = "2024";
43const DEFAULT_RESOLVER: &str = "3";
44const OUTPUT_PREFIX: &str = "[OUTPUT]";
45const OUT_DIR: &str = env!("OUT_DIR");
46
47/// Rust keywords for special handling. This is not needed for this macro to work, it is only used
48/// to make `IntelliJ` / `RustRover` work correctly, as their `TokenStream` spans are incorrect.
49const KEYWORDS: &[&str] = &[
50    "as", "async", "await", "break", "const", "continue", "crate", "dyn", "else", "enum",
51    "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "match", "mod", "move",
52    "mut", "pub", "ref", "return", "self", "Self", "struct", "super", "trait", "true",
53    "type", "unsafe", "use", "where", "while", "abstract", "become", "box", "do", "final", "macro",
54    "override", "priv", "typeof", "unsized", "virtual", "yield", "try",
55];
56
57// ==================
58// === TokenRange ===
59// ==================
60
61#[derive(Debug)]
62struct TokenRange {
63    start: TokenTree,
64    end: TokenTree,
65}
66
67impl TokenRange {
68    fn new(start: TokenTree, end: TokenTree) -> Self {
69        Self { start, end }
70    }
71
72    #[cfg(nightly)]
73    fn span(&self) -> Span {
74        let first_span = self.start.span();
75        let last_span = self.end.span();
76        first_span.join(last_span).unwrap_or(first_span)
77    }
78}
79
80// ==============================
81// === Generated Code Prelude ===
82// ==============================
83
84fn gen_prelude(include_token_stream_impl: bool, paths: &Paths) -> String {
85    let warning_prefix = Level::WARNING_PREFIX;
86    let error_prefix = Level::ERROR_PREFIX;
87    let prelude_tok_stream = if include_token_stream_impl { PRELUDE_FOR_TOKEN_STREAM } else { "" };
88
89    let workspace_path =
90        format!("pub const WORKSPACE_PATH: &str = r#\"{}\"#;", paths.workspace.display());
91
92    #[cfg(nightly)]
93    let crate_config_path =
94        format!("pub const CRATE_CONFIG_PATH: &str = r#\"{}\"#;", paths.crate_config.display());
95    #[cfg(not(nightly))]
96    let crate_config_path = "";
97
98    #[cfg(nightly)]
99    let call_site_file_path =
100        format!("pub const CALL_SITE_FILE_PATH: &str = r#\"{}\"#;", paths.call_site_file.display());
101    #[cfg(not(nightly))]
102    let call_site_file_path = "";
103
104    format!("
105        #[allow(unused_macros)]
106        #[allow(unused_imports)]
107        #[allow(clippy::all)]
108        #[allow(warnings)]
109        mod {GEN_MOD} {{
110            {workspace_path}
111            {crate_config_path}
112            {call_site_file_path}
113
114            pub const OUTPUT_PREFIX: &str = \"{OUTPUT_PREFIX}\";
115            pub const WARNING_PREFIX: &str = \"{warning_prefix}\";
116            pub const ERROR_PREFIX: &str = \"{error_prefix}\";
117
118            macro_rules! output_str {{
119                ($($ts:tt)*) => {{
120                    println!(\"{{}}\", {GEN_MOD}::prefix_lines_with_output(&format!($($ts)*)));
121                }};
122            }}
123            pub(super) use output_str;
124
125            macro_rules! warning {{
126                ($($ts:tt)*) => {{
127                    println!(\"{{}}\", {GEN_MOD}::prefix_lines_with_warning(&format!($($ts)*)));
128                }};
129            }}
130            pub(super) use warning;
131
132            macro_rules! error {{
133                ($($ts:tt)*) => {{
134                    println!(\"{{}}\", {GEN_MOD}::prefix_lines_with_error(&format!($($ts)*)));
135                }};
136            }}
137            pub(super) use error;
138
139            {PRELUDE_STATIC}
140            {prelude_tok_stream}
141            {PRELUDE_ADDONS}
142        }}
143    ")
144}
145
146const PRELUDE_FOR_TOKEN_STREAM: &str = "
147    impl CodeFromOutput for proc_macro2::TokenStream {
148        fn code_from_output(output: Self) -> String {
149            output.to_string()
150        }
151    }
152";
153
154const PRELUDE_STATIC: &str = "
155    pub(super) trait CodeFromOutput {
156        fn code_from_output(output: Self) -> String;
157    }
158
159    impl CodeFromOutput for () {
160        fn code_from_output(_output: Self) -> String {
161            String::new()
162        }
163    }
164
165    impl<'t> CodeFromOutput for &'t str {
166        fn code_from_output(output: Self) -> String {
167            output.to_string()
168        }
169    }
170
171    impl CodeFromOutput for String {
172        fn code_from_output(output: Self) -> String {
173            output
174        }
175    }
176
177    impl CodeFromOutput for usize {
178        fn code_from_output(output: Self) -> String {
179            format!(\"{output}\")
180        }
181    }
182
183    impl CodeFromOutput for u8 {
184        fn code_from_output(output: Self) -> String {
185            format!(\"{output}\")
186        }
187    }
188
189    impl CodeFromOutput for u16 {
190        fn code_from_output(output: Self) -> String {
191            format!(\"{output}\")
192        }
193    }
194
195    impl CodeFromOutput for u32 {
196        fn code_from_output(output: Self) -> String {
197            format!(\"{output}\")
198        }
199    }
200
201    impl CodeFromOutput for u64 {
202        fn code_from_output(output: Self) -> String {
203            format!(\"{output}\")
204        }
205    }
206
207    impl CodeFromOutput for u128 {
208        fn code_from_output(output: Self) -> String {
209            format!(\"{output}\")
210        }
211    }
212
213    impl CodeFromOutput for f32 {
214        fn code_from_output(output: Self) -> String {
215            format!(\"{output}\")
216        }
217    }
218
219    impl CodeFromOutput for f64 {
220        fn code_from_output(output: Self) -> String {
221            format!(\"{output}\")
222        }
223    }
224
225    pub(super) fn code_from_output<T: CodeFromOutput>(output: T) -> String {
226        <T as CodeFromOutput>::code_from_output(output)
227    }
228
229    pub(super) fn prefix_lines_with(prefix: &str, input: &str) -> String {
230        input
231            .lines()
232            .map(|line| format!(\"{prefix} {line}\"))
233            .collect::<Vec<_>>()
234            .join(\"\\n\")
235    }
236
237    pub(super) fn prefix_lines_with_output(input: &str) -> String {
238        prefix_lines_with(OUTPUT_PREFIX, input)
239    }
240
241    pub(super) fn prefix_lines_with_warning(input: &str) -> String {
242        prefix_lines_with(WARNING_PREFIX, input)
243    }
244
245    pub(super) fn prefix_lines_with_error(input: &str) -> String {
246        prefix_lines_with(ERROR_PREFIX, input)
247    }
248
249    macro_rules! write_ln {
250        ($target:expr, $($ts:tt)*) => {
251            $target.push_str(&format!( $($ts)* ));
252            $target.push_str(\"\n\");
253        };
254    }
255    pub(super) use write_ln;
256
257    macro_rules! stringify_if_needed {
258        ($t:literal) => { $t };
259        ($t:expr) => { stringify!($t) };
260    }
261    pub(super) use stringify_if_needed;
262
263    // This is defined only to prevent compilation errors. The real expansion is done by the
264    // `function` attribute macro.
265    macro_rules! output {
266        ($($ts:tt)*) => {};
267    }
268    pub(super) use output;
269
270    // This is defined only to prevent compilation errors. The real expansion is done by the
271    // `function` attribute macro.
272    macro_rules! quote {
273        ($($ts:tt)*) => { String::new() };
274    }
275    pub(super) use quote;
276";
277
278const PRELUDE_ADDONS: &str = "
279    #[allow(clippy)]
280    pub fn sum_combinations(n: usize) -> Vec<Vec<usize>> {
281        let mut result = Vec::new();
282
283        fn generate(n: usize, current: Vec<usize>, result: &mut Vec<Vec<usize>>) {
284            if n == 0 {
285                if current.len() > 1 {
286                    result.push(current);
287                }
288                return;
289            }
290
291            for i in 1..=n {
292                let mut next = current.clone();
293                next.push(i);
294                generate(n - i, next, result);
295            }
296        }
297
298        generate(n, vec![], &mut result);
299        result
300    }
301";
302
303// =============
304// === Paths ===
305// =============
306
307#[derive(Debug)]
308struct Paths {
309    workspace: PathBuf,
310    output_dir: PathBuf,
311    #[cfg(nightly)]
312    call_site_file: PathBuf,
313    #[cfg(nightly)]
314    crate_config: PathBuf,
315    // Whether we should remove `output_dir` after usage.
316    one_shot_output_dir: bool,
317    /// None if we are on stable.
318    cargo_toml_path: Option<CargoConfigPaths>,
319}
320
321impl Paths {
322    #[cfg(nightly)]
323    fn new(options: MacroOptions, macro_name: &str, input_str: &str) -> Result<Self> {
324        let name = if options.content_base_name {
325            Self::project_name_from_input(input_str)
326        } else {
327            macro_name.to_string()
328        };
329        let call_site_path = Self::get_call_site_rel();
330        let output_dir = Self::get_output_root()?.join(&call_site_path).join(&name);
331        let target = path::find_parent(&output_dir, "target")?;
332        let workspace = path::parent(target)?.to_path_buf();
333        let call_site_file = workspace.join(&call_site_path);
334        let cargo_toml_path = find_cargo_configs(&call_site_file)?;
335        let crate_config = cargo_toml_path.crate_config.clone();
336        let cargo_toml_path = Some(cargo_toml_path);
337        let one_shot_output_dir = false;
338        let out = Self {
339            workspace,
340            output_dir,
341            crate_config,
342            call_site_file,
343            cargo_toml_path,
344            one_shot_output_dir
345        }.init(options);
346        Ok(out)
347    }
348
349    #[cfg(not(nightly))]
350    fn new(options: MacroOptions, _macro_name: &str, input_str: &str) -> Result<Self> {
351        let name = Self::project_name_from_input(input_str);
352        let output_dir = Self::get_output_root()?.join(&name);
353        let target = path::find_parent(&output_dir, "target")?;
354        let workspace = path::parent(target)?.to_path_buf();
355        let cargo_toml_path = None;
356        let one_shot_output_dir = false;
357        Ok(Self { workspace, output_dir, cargo_toml_path, one_shot_output_dir }.init(options))
358    }
359
360    fn init(mut self, options: MacroOptions) -> Self {
361        // We cache projects on nightly by default. On stable, the project name is based on the
362        // input code.
363        self.one_shot_output_dir = cfg!(not(nightly)) || !options.cache;
364        // If we are removing projects after usage, it is possible that multiple processes try to
365        // expand the same macro in parallel – e.g. user's watch script and IDE checker. In such a
366        // case, one of the processes might end while another is still running. This can cause
367        // the other process to fail if it still needs project access on disk.
368        if self.one_shot_output_dir {
369            let pid = std::process::id();
370            self.output_dir = self.output_dir.join(format!("pid_{pid}"));
371        }
372        self
373    }
374
375    #[cfg(nightly)]
376    fn get_call_site_rel() -> PathBuf {
377        // Sometimes `proc_macro::Span::call_site()` returns a relative path, sometimes an absolute
378        // one. In the latter case, we need to discover the relative part from the project root.
379        let mut call_site_path = proc_macro::Span::call_site()
380            .local_file()
381            .unwrap_or_default();
382        call_site_path.set_extension("");
383        if call_site_path.is_relative() {
384            return call_site_path.to_path_buf();
385        }
386
387        // We strip the common prefix of `proc_macro::Span::call_site()` and `OUT_DIR`.
388        let mut common_prefix_len = 0;
389        let mut common_prefix = PathBuf::new();
390        let mut components1 = call_site_path.components();
391        let mut components2 = Path::new(OUT_DIR).components();
392        while let (Some(part1), Some(part2)) = (components1.next(), components2.next()) {
393            if part1 == part2 {
394                common_prefix_len += 1;
395                common_prefix.push(part1);
396            } else {
397                break;
398            }
399        }
400
401        // We don't want to strip small prefix, like `/` or `C:\\`. E.g. when running tests, cargo
402        // generates projects in `var/folders/wm/...`
403        if common_prefix_len <= 3 {
404            return call_site_path.to_path_buf();
405        }
406
407        match call_site_path.strip_prefix(&common_prefix) {
408            Ok(relative_path) => relative_path.to_path_buf(),
409            Err(_) => call_site_path.to_path_buf(), // Should not happen
410        }
411    }
412
413    fn project_name_from_input(input_str: &str) -> String {
414        let mut hasher = DefaultHasher::new();
415        input_str.hash(&mut hasher);
416        format!("project_{:016x}", hasher.finish())
417    }
418
419    fn get_output_root() -> Result<PathBuf> {
420        let crate_out_str = OUT_DIR;
421        let crate_out = Path::new(&crate_out_str);
422        let build_dir = path::find_parent(crate_out, "build")?;
423        Ok(build_dir.join(CRATE))
424    }
425
426    fn with_output_dir<T>(&self, f: impl FnOnce(&PathBuf) -> Result<T>) -> Result<T> {
427        if !self.output_dir.exists() {
428            fs::create_dir_all(&self.output_dir).context("Failed to create project directory.")?;
429        }
430        let out = f(&self.output_dir);
431        if self.one_shot_output_dir {
432            fs::remove_dir_all(&self.output_dir).ok();
433        }
434        out
435    }
436}
437
438// ========================
439// === CargoConfigPaths ===
440// ========================
441
442#[derive(Debug)]
443struct CargoConfigPaths {
444    crate_config: PathBuf,
445    workspace_config: Option<PathBuf>,
446}
447
448fn find_cargo_configs(path: &Path) -> Result<CargoConfigPaths> {
449    let mut current_path = path.to_path_buf();
450    let mut candidates = Vec::new();
451    loop {
452        let candidate = current_path.join("Cargo.toml");
453        if candidate.is_file() { candidates.push(candidate) }
454        if !current_path.pop() { break }
455    }
456    let Some((crate_config, other_candidates)) = candidates.split_first() else {
457        return err!("No 'Cargo.toml' files found in parent directories of '{}'.", path.display())
458    };
459
460    // Cargo uses the top-level workspace only.
461    let mut workspace_config = None;
462    for candidate in other_candidates.iter().rev() {
463        if CargoConfig::is_workspace(candidate)? {
464            workspace_config = Some(candidate.clone());
465        }
466    }
467    let crate_config = crate_config.clone();
468    Ok(CargoConfigPaths { crate_config, workspace_config })
469}
470
471// ===================
472// === CargoConfig ===
473// ===================
474
475#[derive(Debug)]
476struct Dependency {
477    label: String,
478    tokens_str: String,
479    token_range: Option<TokenRange>,
480}
481
482impl Dependency {
483    fn new(label: String, tokens_str: String, token_range: Option<TokenRange>) -> Self {
484        Self { label, tokens_str, token_range }
485    }
486
487    fn to_config_string(&self) -> String {
488        format!("{} = {}", self.label, self.tokens_str)
489    }
490
491    #[cfg(nightly)]
492    fn span(&self) -> Span {
493        self.token_range.as_ref().map_or(Span::call_site(), |t| t.span())
494    }
495}
496
497#[derive(Debug, Default)]
498struct CargoConfig {
499    edition: Option<String>,
500    resolver: Option<String>,
501    dependencies: Vec<Dependency>,
502    lints: LintsConfig,
503}
504
505#[derive(Debug, Default)]
506struct LintsConfig {
507    clippy: String,
508    rust: String,
509}
510
511impl CargoConfig {
512    fn contains_dependency(&self, name: &str) -> bool {
513        self.dependencies.iter().any(|d| d.label == name)
514    }
515
516    fn print(&self) -> String {
517        let edition = self.edition.as_ref().map_or(DEFAULT_EDITION, |t| t.as_str());
518        let resolver = self.resolver.as_ref().map_or(DEFAULT_RESOLVER, |t| t.as_str());
519        let lints_rust = &self.lints.rust;
520        let lints_clippy = &self.lints.clippy;
521        let dependencies = self.dependencies.iter()
522            .map(|t| t.to_config_string())
523            .collect::<Vec<_>>()
524            .join("\n");
525        let out = format!("
526            [workspace]
527            [package]
528            name     = \"eval_project\"
529            version  = \"1.0.0\"
530            edition  = \"{edition}\"
531            resolver = \"{resolver}\"
532
533            [dependencies]
534            {dependencies}
535
536            [lints.rust]
537            {lints_rust}
538
539            [lints.clippy]
540            {lints_clippy}
541        ");
542        out
543    }
544
545    fn is_workspace(path: &Path) -> Result<bool> {
546        let cargo_toml_content = fs::read_to_string(path)?;
547        let parsed: toml::Value = toml::from_str(&cargo_toml_content)?;
548        Ok(parsed.get("workspace").is_some())
549    }
550
551    fn is_workspace_table(value: &toml::Value) -> bool {
552        if let toml::Value::Table(table) = value {
553            if let Some(toml::Value::Boolean(true)) = table.get("workspace") {
554                return true;
555            }
556        }
557        false
558    }
559
560    fn get_package_edition(table: &toml::Table) -> Option<&str> {
561        table.get("package")
562            .and_then(toml::Value::as_table)
563            .and_then(|pkg_table| pkg_table.get("edition"))
564            .and_then(toml::Value::as_str)
565    }
566
567    fn get_package_version<'t>(table: &'t toml::Table, name: &str) -> Option<&'t toml::Value> {
568        table.get("dependencies")
569            .and_then(toml::Value::as_table)
570            .and_then(|pkg_table| pkg_table.get(name))
571            .filter(|v| v.is_str())
572    }
573
574    fn print_lints(lints: &toml::Value) -> String {
575        lints.as_table().map(|t| {
576            t.iter().map(|(k, v)| format!("{k} = {v}")).collect::<Vec<_>>().join("\n")
577        }).unwrap_or_default()
578    }
579
580    fn fill_from_cargo_toml(&mut self, paths: &CargoConfigPaths) -> Result {
581        use toml::Value;
582        let config_str = fs::read_to_string(&paths.crate_config)?;
583        let workspace_str = paths.workspace_config.as_ref().map(fs::read_to_string).transpose()?;
584        let config = toml::from_str::<Value>(&config_str)?;
585        let workspace_config_opt = workspace_str.map(|t| toml::from_str::<Value>(&t)).transpose()?;
586        let workspace_config_table_opt = workspace_config_opt.as_ref()
587            .and_then(|t| t.get("workspace")).and_then(|v| v.as_table());
588        let dependencies = config
589            .get("build-dependencies")
590            .and_then(|v| v.as_table())
591            .map_or(vec![], |t| t.iter().filter_map(|(k, v)|
592                if !Self::is_workspace_table(v) {
593                    Some(Dependency::new(k.clone(), v.to_string(), None))
594                } else {
595                    workspace_config_table_opt
596                        .and_then(|t| Self::get_package_version(t, k))
597                        .map(|t| Dependency::new(k.clone(), t.to_string(), None))
598                }
599            ).collect());
600        let edition = config
601            .get("package")
602            .and_then(|v| v.as_table())
603            .and_then(|table| table.get("edition"))
604            .and_then(|v| if !Self::is_workspace_table(v) { v.as_str() } else {
605                workspace_config_table_opt.and_then(Self::get_package_edition)
606            })
607            .unwrap_or("2024");
608        let lints = config.get("lints").map(|v| {
609            let table_opt = if Self::is_workspace_table(v) {
610                workspace_config_table_opt
611            } else {
612                v.as_table()
613            };
614            let ws_lints = table_opt
615                .and_then(|t| t.get("lints"))
616                .and_then(|t| t.as_table());
617            let clippy = ws_lints.and_then(|t| t.get("clippy"))
618                .map(Self::print_lints).unwrap_or_default();
619            let rust = ws_lints.and_then(|t| t.get("rust"))
620                .map(Self::print_lints).unwrap_or_default();
621            LintsConfig {clippy, rust}
622        });
623        self.dependencies.extend(dependencies);
624        self.edition = Some(edition.to_string());
625        self.lints = lints.unwrap_or_default();
626        Ok(())
627    }
628
629    fn extract_inline_attributes(&mut self, attributes: Vec<syn::Attribute>) -> Result<String> {
630        let mut other_attributes = Vec::with_capacity(attributes.len());
631        let mut new_dependencies = vec![];
632        for attr in attributes {
633            let tokens = attr.parse_args::<TokenStream>().context("Failed to parse attributes")?;
634            let tokens_str = tokens.to_string().replace(" ", "");
635            let token_range = tokens.clone().into_iter().next()
636                .zip(tokens.clone().into_iter().last())
637                .map(|(first, last)| TokenRange::new(first, last));
638            if attr.path().is_ident("dependency") {
639                let (key, value) = tokens_str.split_once('=').context(||
640                    error!("Incorrect dependency '{tokens_str}'")
641                )?;
642                let key = key.to_string();
643                let value = value.to_string();
644                new_dependencies.push(Dependency::new(key, value, token_range));
645            } else if attr.path().is_ident("edition") {
646                self.edition = Some(tokens_str);
647            } else {
648                other_attributes.push(attr.to_token_stream().to_string());
649            }
650        }
651        #[cfg(nightly)]
652        for dependency in &new_dependencies {
653            warning!(dependency.span(),
654                "When using the nightly Rust channel, dependencies should be specified in the \
655                [build-dependencies] section of your Cargo.toml file."
656            ).emit();
657        }
658        self.dependencies.extend(new_dependencies);
659        Ok(other_attributes.join("\n"))
660    }
661}
662
663fn create_project_skeleton(project_dir: &Path, cfg: CargoConfig, main: &str) -> Result<bool> {
664    let src_dir = project_dir.join("src");
665    let existed = src_dir.exists();
666    if !existed {
667        fs::create_dir_all(&src_dir).context("Failed to create src directory.")?;
668    }
669
670    let cargo_toml = project_dir.join("Cargo.toml");
671    let cargo_toml_content = cfg.print();
672    fs::write(&cargo_toml, cargo_toml_content).context("Failed to write Cargo.toml.")?;
673
674    let main_rs = src_dir.join("main.rs");
675    let mut file = File::create(&main_rs).context("Failed to create main.rs")?;
676    file.write_all(main.as_bytes()).context("Failed to write main.rs")?;
677    Ok(existed)
678}
679
680fn get_host_target() -> Result<String> {
681    let output = Command::new("rustc")
682        .arg("-vV")
683        .stdout(std::process::Stdio::piped())
684        .output()
685        .context("Failed to run rustc")?;
686
687    let stdout = String::from_utf8_lossy(&output.stdout);
688    for line in stdout.lines() {
689        if let Some(stripped) = line.strip_prefix("host:") {
690            return Ok(stripped.trim().to_string())
691        }
692    }
693    err!("Could not determine host target from rustc")
694}
695
696fn run_cargo_project(project_dir: &PathBuf) -> Result<String> {
697    // In case the project uses .cargo/config.toml, we need to explicitly revert target to native.
698    let host_target = get_host_target()?;
699    let output = Command::new("cargo")
700        .arg("run")
701        .arg("--target")
702        .arg(&host_target)
703        .current_dir(project_dir)
704        .output()
705        .context("Failed to execute cargo run")?;
706
707    if !output.status.success() {
708        let stderr = String::from_utf8_lossy(&output.stderr);
709        // TODO: Parse it and map gen code spans to call site spans.
710        eprintln!("{stderr}");
711        #[allow(clippy::panic)]
712        if let Some(index) = stderr.find("thread 'main' panicked") {
713            panic!("{}", &stderr[index..]);
714        }
715        err!("Compilation of the generated code failed.")
716    } else {
717        Ok(String::from_utf8_lossy(&output.stdout).to_string())
718    }
719}
720
721// ====================
722// === Output Macro ===
723// ====================
724
725/// Find and expand the `output!` macro in the input `TokenStream`. After this lib stabilizes, this
726/// should be rewritten to standard macro and imported by the generated code.
727fn expand_expand_macro(input: TokenStream) -> TokenStream {
728    let tokens: Vec<TokenTree> = input.into_iter().collect();
729    let mut output = TokenStream::new();
730    let mut i = 0;
731    while i < tokens.len() {
732        if let TokenTree::Ident(ref ident) = tokens[i] {
733            if *ident == "expand" && i + 1 < tokens.len() {
734                if let TokenTree::Punct(ref excl) = tokens[i + 1] {
735                    if excl.as_char() == '!' && i + 2 < tokens.len() {
736                        if let TokenTree::Group(ref group) = tokens[i + 2] {
737                            output.extend(group.stream());
738                            i += 3;
739                            continue;
740                        }
741                    }
742                }
743            }
744        }
745        match &tokens[i] {
746            TokenTree::Group(group) => {
747                let new_stream = expand_expand_macro(group.stream());
748                let new_group = TokenTree::Group(
749                    proc_macro2::Group::new(group.delimiter(), new_stream)
750                );
751                output.extend(std::iter::once(new_group));
752            }
753            _ => {
754                output.extend(std::iter::once(tokens[i].clone()));
755            }
756        }
757        i += 1;
758    }
759    output
760}
761
762fn expand_builtin_macro(
763    name: &str,
764    input: TokenStream,
765    f: &impl Fn(TokenStream) -> TokenStream
766) -> TokenStream {
767    let tokens: Vec<TokenTree> = input.into_iter().collect();
768    let mut output = TokenStream::new();
769    let len = tokens.len();
770    let mut i = 0;
771
772    while i < len {
773        // Check for the pattern: crabtime :: output ! ( group )
774        if i + 5 < len {
775            if let TokenTree::Ident(ref ident) = tokens[i] {
776                if ident == GEN_MOD {
777                    if let TokenTree::Punct(ref colon1) = tokens[i + 1] {
778                        if colon1.as_char() == ':' {
779                            if let TokenTree::Punct(ref colon2) = tokens[i + 2] {
780                                if colon2.as_char() == ':' {
781                                    if let TokenTree::Ident(ref out_ident) = tokens[i + 3] {
782                                        if out_ident == name {
783                                            if let TokenTree::Punct(ref excl) = tokens[i + 4] {
784                                                if excl.as_char() == '!' {
785                                                    if let TokenTree::Group(ref group) = tokens[i + 5] {
786                                                        let inner_rewritten = expand_builtin_macro(name, group.stream(), f);
787                                                        let new_tokens = f(inner_rewritten);
788                                                        output.extend(new_tokens);
789                                                        i += 6;
790                                                        continue;
791                                                    }
792                                                }
793                                            }
794                                        }
795                                    }
796                                }
797                            }
798                        }
799                    }
800                }
801            }
802        }
803
804        // Recurse into groups or pass through token.
805        match &tokens[i] {
806            TokenTree::Group(group) => {
807                let new_stream = expand_builtin_macro(name, group.stream(), f);
808                // Rebuild group with same delimiter.
809                let new_group = TokenTree::Group(proc_macro2::Group::new(group.delimiter(), new_stream));
810                output.extend(std::iter::once(new_group));
811            }
812            token => output.extend(std::iter::once(token.clone())),
813        }
814        i += 1;
815    }
816    output
817}
818
819fn expand_output_macro(input: TokenStream) -> TokenStream {
820    let gen_mod = syn::Ident::new(GEN_MOD, Span::call_site());
821    expand_builtin_macro("output", input, &|inner_rewritten| {
822        let content_str = print_tokens(&inner_rewritten);
823        let lit = syn::LitStr::new(&content_str, Span::call_site());
824        quote! {
825            #gen_mod::write_ln!(__output_buffer__, #lit);
826        }
827    })
828}
829
830fn expand_quote_macro(input: TokenStream) -> TokenStream {
831    expand_builtin_macro("quote", input, &|inner_rewritten| {
832        let content_str = print_tokens(&inner_rewritten);
833        let lit = syn::LitStr::new(&content_str, Span::call_site());
834        quote! { format!(#lit) }
835    })
836}
837
838// =============
839// === Print ===
840// =============
841
842#[derive(Debug)]
843struct PrintOutput {
844    output: String,
845    start_token: Option<LineColumn>,
846    end_token: Option<LineColumn>,
847}
848
849/// Prints the token stream as a string ready to be used by the format macro. It is very careful
850/// where spaces are inserted. In particular, spaces are not inserted around `{` and `}` tokens if
851/// they were not present in the original token stream. It is fine-tuned to work in different IDEs,
852/// such as `RustRover`.
853fn print_tokens(tokens: &TokenStream) -> String {
854    // Replaces `{` with `{{` and vice versa.
855    print_tokens_internal(tokens).output
856        .replace("{", "{{")
857        .replace("}", "}}")
858        .replace("{{%%%{{%%%{{", "{{ {")
859        .replace("}}%%%}}%%%}}", "} }}")
860        .replace("{{%%%{{", "{")
861        .replace("}}%%%}}", "}")
862}
863
864fn print_tokens_internal(tokens: &TokenStream) -> PrintOutput {
865    let token_vec: Vec<TokenTree> = tokens.clone().into_iter().collect();
866    let mut output = String::new();
867    let mut first_token_start = None;
868    let mut prev_token_end: Option<LineColumn> = None;
869    let mut prev_token_was_brace = false;
870    for (i, token) in token_vec.iter().enumerate() {
871        let mut add_space = true;
872        let mut token_start = token.span().start();
873        let mut token_end = token.span().end();
874        let mut is_brace = false;
875        let mut is_keyword = false;
876        let token_str = match token {
877            TokenTree::Group(g) => {
878                let content = print_tokens_internal(&g.stream());
879                let mut content_str = content.output;
880                content_str.pop();
881                let (open, close) = match g.delimiter() {
882                    Delimiter::Brace => {
883                        is_brace = true;
884                        if content_str.starts_with("{") && content_str.ends_with("}") {
885                            ("{%%%", "%%%}")
886                        } else {
887                            ("{", "}")
888                        }
889                    },
890                    Delimiter::Parenthesis => ("(", ")"),
891                    Delimiter::Bracket => ("[", "]"),
892                    _ => ("", ""),
893                };
894
895                if let Some(content_first_token_start) = content.start_token {
896                    token_start.line = content_first_token_start.line;
897                    if content_first_token_start.column > 0 {
898                        token_start.column = content_first_token_start.column - 1;
899                    }
900                }
901                if let Some(content_end) = content.end_token {
902                    token_end.line = content_end.line;
903                    token_end.column = content_end.column + 1;
904                }
905
906                format!("{open}{content_str}{close}")
907            }
908            TokenTree::Ident(ident) => {
909                let str = ident.to_string();
910                is_keyword = KEYWORDS.contains(&str.as_str());
911                str
912            },
913            TokenTree::Literal(lit) => lit.to_string(),
914            TokenTree::Punct(punct) => {
915                if punct.spacing() == proc_macro2::Spacing::Joint {
916                    add_space = false;
917                }
918                punct.as_char().to_string()
919            },
920        };
921        debug!("{i}: [{token_start:?}-{token_end:?}] [{prev_token_end:?}]: {token}");
922
923        // check if the punct has set flags to have no spaces
924        if is_brace || prev_token_was_brace {
925            if let Some(prev_token_end) = prev_token_end {
926                if prev_token_end.line == token_start.line
927                && prev_token_end.column >= token_start.column
928                && output.ends_with(" ") {
929                    output.pop();
930                }
931            }
932        }
933        prev_token_was_brace = is_brace;
934
935        // Pushing a space before and after keywords is for IntelliJ only.
936        // Their token spans are invalid.
937        if is_keyword { output.push(' '); }
938        output.push_str(&token_str);
939        if add_space {
940            output.push(' ');
941        }
942        if is_keyword { output.push(' '); }
943
944        first_token_start.get_or_insert(token_start);
945        prev_token_end = Some(token_end);
946    }
947    PrintOutput {
948        output,
949        start_token: first_token_start,
950        end_token: prev_token_end,
951    }
952}
953
954// ==================
955// === Eval Macro ===
956// ==================
957
958enum Args {
959    TokenStream { ident: syn::Ident },
960    Pattern { str: TokenStream }
961}
962
963impl Args {
964    fn pattern(&self) -> TokenStream {
965        match self {
966            Self::TokenStream { ident } => quote! { $($#ident:tt)* },
967            Self::Pattern { str } => str.clone(),
968        }
969    }
970
971    fn setup(&self) -> TokenStream {
972        if let Self::TokenStream { ident } = self {
973            quote! {
974                use proc_macro2::TokenStream;
975                let #ident: TokenStream = stringify!($($#ident)*).parse().unwrap();
976            }
977        } else {
978            Default::default()
979        }
980    }
981}
982
983fn parse_args(
984    args: &syn::punctuated::Punctuated<syn::FnArg, syn::token::Comma>
985) -> Option<(Args, TokenStream)> {
986    let Some(arg) = args.first() else {
987        return Some((Args::Pattern { str: Default::default() }, TokenStream::new()))
988    };
989
990    // First try the specialized parsers, then fallback to our generic type handling.
991    parse_args_for_pattern(arg)
992        .or_else(|| parse_args_for_token_stream(arg))
993        .map(|t| (t, TokenStream::new()))
994        .or_else(|| {
995            let mut is_first = true;
996            let mut pat = quote!{};
997            let mut code = TokenStream::new();
998
999            for arg in args {
1000                if !is_first {
1001                    pat = quote! {#pat, };
1002                }
1003                is_first = false;
1004                if let syn::FnArg::Typed(pat_type) = arg {
1005
1006                    if let syn::Pat::Ident(name) = &*pat_type.pat {
1007                        let name_str = name.ident.to_string();
1008                        let ty = &*pat_type.ty;
1009                        code = quote! {
1010                            #code
1011                            let #name: #ty =
1012                        };
1013                        if let Some((param_pat, param_code)) = parse_arg_type(&name_str, ty) {
1014                            pat = quote! {#pat #param_pat};
1015                            code = quote! {#code #param_code};
1016                        }
1017                        code = quote! {#code;};
1018                    }
1019                }
1020            }
1021            pat = quote! {#pat $(,)?};
1022            Some((Args::Pattern { str: pat }, code))
1023        })
1024}
1025
1026/// Returns (pattern, code) for a given type. It supports both vector types and non‑vector types.
1027#[inline(always)]
1028fn parse_arg_type(pfx: &str, ty: &syn::Type) -> Option<(TokenStream, TokenStream)> {
1029    if let syn::Type::Path(type_path) = ty {
1030        let last_segment = type_path.path.segments.last()?;
1031        if last_segment.ident == "Vec" {
1032            if let syn::PathArguments::AngleBracketed(angle_bracketed) = &last_segment.arguments {
1033                let generic_arg = angle_bracketed.args.first()?;
1034                if let syn::GenericArgument::Type(inner_ty) = generic_arg {
1035                    if let Some((inner_pat, inner_code)) = parse_inner_type(pfx, inner_ty) {
1036                        let pat = quote! {[$(#inner_pat),*$(,)?]};
1037                        let code = quote! { [$(#inner_code),*].into_iter().collect() };
1038                        return Some((pat, code));
1039                    }
1040                }
1041            }
1042        } else {
1043            return parse_inner_type(pfx, ty);
1044        }
1045    }
1046    None
1047}
1048
1049#[inline(always)]
1050fn parse_inner_type(pfx: &str, ty: &syn::Type) -> Option<(TokenStream, TokenStream)> {
1051    let arg_str = format!("{pfx}_arg");
1052    let arg_ident = syn::Ident::new(&arg_str, Span::call_site());
1053    let arg = quote! {$#arg_ident};
1054    match ty {
1055        syn::Type::Reference(ty_ref) => {
1056            if let syn::Type::Path(inner_path) = &*ty_ref.elem {
1057                if let Some(inner_seg) = inner_path.path.segments.last() {
1058                    if inner_seg.ident == "str" {
1059                        let pat = quote!{#arg:expr};
1060                        let code = quote!{crabtime::stringify_if_needed!{#arg}};
1061                        return Some((pat, code));
1062                    }
1063                }
1064            }
1065        },
1066        syn::Type::Path(inner_type_path) => {
1067            if let Some(inner_seg) = inner_type_path.path.segments.last() {
1068                let ident_str = inner_seg.ident.to_string();
1069                if ident_str == "String" {
1070                    let pat = quote!{#arg:expr};
1071                    let code = quote!{crabtime::stringify_if_needed!(#arg).to_string()};
1072                    return Some((pat, code));
1073                } else if matches!(ident_str.as_str(),
1074                    "usize" | "u8" | "u16" | "u32" | "u64" | "u128" |
1075                    "isize" | "i8" | "i16" | "i32" | "i64" | "i128"
1076                ) {
1077                    return Some((quote!{#arg:literal}, quote!{#arg}));
1078                }
1079            }
1080        },
1081        _ => {}
1082    }
1083    None
1084}
1085
1086fn parse_args_for_pattern(arg: &syn::FnArg) -> Option<Args> {
1087    let syn::FnArg::Typed(pat) = arg else { return None };
1088    let syn::Pat::Macro(m) = &*pat.pat else { return None };
1089    Some(Args::Pattern {str: m.mac.tokens.clone() })
1090}
1091
1092fn parse_args_for_token_stream(arg: &syn::FnArg) -> Option<Args> {
1093    let syn::FnArg::Typed(pat) = arg else { return None };
1094    let syn::Pat::Ident(pat_ident) = &*pat.pat else { return None };
1095    let tp = &pat.ty;
1096    let tp_str = quote! { #tp }.to_string();
1097    if tp_str != "TokenStream" { return None }
1098    let ident = pat_ident.ident.clone();
1099    Some(Args::TokenStream { ident })
1100}
1101
1102const WRONG_ARGS: &str = "Function should have zero or one argument, one of:
1103    - `pattern!(<pattern>): _`, where <pattern> is a `macro_rules!` pattern
1104    - `input: TokenStream`
1105";
1106
1107fn prepare_input_code(
1108    attributes:&str,
1109    body: &str,
1110    output_tp: &str,
1111    include_token_stream_impl: bool,
1112    paths: &Paths
1113) -> String {
1114    let body_esc: String = body.chars().flat_map(|c| c.escape_default()).collect();
1115    let prelude = gen_prelude(include_token_stream_impl, paths);
1116    format!("
1117        {attributes}
1118        {prelude}
1119
1120        const SOURCE_CODE: &str = \"{body_esc}\";
1121
1122        fn main() {{
1123            let mut __output_buffer__ = String::new();
1124            let result: {output_tp} = {{
1125                {body}
1126            }};
1127            __output_buffer__.push_str(&{GEN_MOD}::code_from_output(result));
1128            println!(\"{{}}\", {GEN_MOD}::prefix_lines_with_output(&__output_buffer__));
1129        }}",
1130    )
1131}
1132
1133fn parse_output(output: &str) -> String {
1134    let mut code = String::new();
1135    for line in output.split('\n') {
1136        let line_trimmed = line.trim();
1137        if let Some(stripped) = line_trimmed.strip_prefix(OUTPUT_PREFIX) {
1138            code.push_str(stripped);
1139            code.push('\n');
1140        } else if let Some(stripped) = line_trimmed.strip_prefix(Level::WARNING_PREFIX) {
1141            print_warning!("{}", stripped);
1142        } else if let Some(stripped) = line_trimmed.strip_prefix(Level::ERROR_PREFIX) {
1143            print_error!("{}", stripped);
1144        } else if !line_trimmed.is_empty() {
1145            println!("{line}");
1146        }
1147    }
1148    code
1149}
1150
1151#[derive(Clone, Copy, Debug)]
1152struct MacroOptions {
1153    pub cache: bool,
1154    pub content_base_name: bool,
1155}
1156
1157impl Default for MacroOptions {
1158    fn default() -> Self {
1159        Self {
1160            cache: true,
1161            content_base_name: false,
1162        }
1163    }
1164}
1165
1166impl syn::parse::Parse for MacroOptions {
1167    fn parse(input: syn::parse::ParseStream) -> Result<Self, syn::Error> {
1168        let mut options = MacroOptions::default();
1169        while !input.is_empty() {
1170            let ident: syn::Ident = input.parse()?;
1171            let _eq_token: syn::Token![=] = input.parse()?;
1172            if ident == "cache" {
1173                let bool_lit: syn::LitBool = input.parse()?;
1174                options.cache = bool_lit.value;
1175            } else if ident == "content_base_name" {
1176                let bool_lit: syn::LitBool = input.parse()?;
1177                options.content_base_name = bool_lit.value;
1178            } else {
1179                return Err(syn::Error::new(ident.span(), "unknown attribute"));
1180            }
1181            if input.peek(syn::Token![,]) {
1182                let _comma: syn::Token![,] = input.parse()?;
1183            }
1184        }
1185        Ok(options)
1186    }
1187}
1188
1189// =====================
1190// === Eval Function ===
1191// =====================
1192
1193#[proc_macro_attribute]
1194pub fn eval_function(
1195    attr: proc_macro::TokenStream,
1196    item: proc_macro::TokenStream
1197) -> proc_macro::TokenStream {
1198    // SAFETY: Used to panic in case of error.
1199    #[allow(clippy::unwrap_used)]
1200    eval_function_impl(attr, item).unwrap_or_compile_error().into()
1201}
1202
1203
1204fn eval_function_impl(
1205    attr: proc_macro::TokenStream,
1206    item: proc_macro::TokenStream
1207) -> Result<TokenStream> {
1208    let options = syn::parse::<MacroOptions>(attr)?;
1209    let start_time = get_current_time();
1210    let timer = std::time::Instant::now();
1211
1212    let input_fn_ast = syn::parse::<syn::ItemFn>(item)?;
1213    let name = &input_fn_ast.sig.ident.to_string();
1214    let body_ast = &input_fn_ast.block.stmts;
1215    let output_tp = &input_fn_ast.sig.output;
1216    let input_str = expand_output_macro(expand_quote_macro(quote!{ #(#body_ast)* })).to_string();
1217    let paths = Paths::new(options, name, &input_str)?;
1218
1219    let mut cfg = CargoConfig::default();
1220    if let Some(path) = &paths.cargo_toml_path {
1221        cfg.fill_from_cargo_toml(path)?;
1222    }
1223    let attributes = cfg.extract_inline_attributes(input_fn_ast.attrs)?;
1224    let include_token_stream_impl = cfg.contains_dependency("proc-macro2");
1225    let output_tp_str = match output_tp {
1226        syn::ReturnType::Default => "()".to_string(),
1227        syn::ReturnType::Type(_, tp) => quote!{#tp}.to_string(),
1228    };
1229    let input_code = prepare_input_code(
1230        &attributes,
1231        &input_str,
1232        &output_tp_str,
1233        include_token_stream_impl,
1234        &paths
1235    );
1236    debug!("INPUT CODE: {input_code}");
1237    let mut output_dir_str = String::new();
1238    let (output, was_cached) = paths.with_output_dir(|output_dir| {
1239        debug!("OUTPUT_DIR: {:?}", output_dir);
1240        output_dir_str = output_dir.to_string_lossy().to_string();
1241        let was_cached = create_project_skeleton(output_dir, cfg, &input_code)?;
1242        let output = run_cargo_project(output_dir)?;
1243        Ok((output, was_cached))
1244    })?;
1245    let output_code = parse_output(&output);
1246    let duration = format_duration(timer.elapsed());
1247    let options_doc = format!("{options:#?}").replace("\n", "\n/// ");
1248    let macro_code = format!("
1249        /// # Compilation Stats
1250        /// Start: {start_time}
1251        /// Duration: {duration}
1252        /// Cached: {was_cached}
1253        /// Output Dir: {output_dir_str}
1254        /// Macro Options: {options_doc}
1255        #[cfg(any())]
1256        const _: () = ();
1257        {output_code}
1258    ");
1259
1260    debug!("BODY: {macro_code}");
1261    let out: TokenStream = macro_code.parse()
1262        .map_err(|err| error!("{err:?}"))
1263        .context("Failed to parse generated code.")?;
1264    debug!("OUTPUT: {out} ");
1265    Ok(out)
1266}
1267
1268// ================
1269// === Function ===
1270// ================
1271
1272#[proc_macro_attribute]
1273pub fn function(
1274    attr: proc_macro::TokenStream,
1275    item: proc_macro::TokenStream
1276) -> proc_macro::TokenStream {
1277    // SAFETY: Used to panic in case of error.
1278    #[allow(clippy::unwrap_used)]
1279    function_impl(attr, item, false).unwrap_or_compile_error().into()
1280}
1281
1282#[proc_macro_attribute]
1283pub fn statement(
1284    attr: proc_macro::TokenStream,
1285    item: proc_macro::TokenStream
1286) -> proc_macro::TokenStream {
1287    // SAFETY: Used to panic in case of error.
1288    #[allow(clippy::unwrap_used)]
1289    function_impl(attr, item, false).unwrap_or_compile_error().into()
1290}
1291
1292#[proc_macro_attribute]
1293pub fn expression(
1294    attr: proc_macro::TokenStream,
1295    item: proc_macro::TokenStream
1296) -> proc_macro::TokenStream {
1297    // SAFETY: Used to panic in case of error.
1298    #[allow(clippy::unwrap_used)]
1299    function_impl(attr, item, true).unwrap_or_compile_error().into()
1300}
1301
1302fn split_attrs(attrs: Vec<syn::Attribute>) -> (Vec<syn::Attribute>, Vec<syn::Attribute>) {
1303    let (outer, inner): (Vec<_>, Vec<_>) = attrs.into_iter().partition(|attr| {
1304        matches!(attr.style, syn::AttrStyle::Outer)
1305    });
1306    (outer, inner)
1307}
1308
1309fn function_impl(
1310    attr_in: proc_macro::TokenStream,
1311    item: proc_macro::TokenStream,
1312    extra_braces: bool,
1313) -> Result<TokenStream> {
1314    let attr: TokenStream = attr_in.into();
1315    let input_fn_ast = syn::parse::<syn::ItemFn>(item)?;
1316    let name = &input_fn_ast.sig.ident;
1317    let args_ast = &input_fn_ast.sig.inputs;
1318    let body_ast = &input_fn_ast.block.stmts;
1319    let output_tp = &input_fn_ast.sig.output;
1320
1321    let (args, args_code) = parse_args(args_ast).context(|| error!(WRONG_ARGS))?;
1322    let args_pattern = args.pattern();
1323    let args_setup = args.setup();
1324    let body = quote!{ #(#body_ast)* };
1325    let input_str = expand_expand_macro(quote!{ #(#body_ast)* });
1326
1327    // Check if the expansion engine is Rust Analyzer. If so, we need to generate
1328    // a code which looks like a function to enable type hints.
1329    let program_name = std::env::current_exe()?
1330        .file_name()
1331        .map_or_else(|| "unknown".into(), |s| s.to_string_lossy().into_owned());
1332    let rust_analyzer_hints = if program_name.contains("rust-analyzer") {
1333        quote! {
1334            mod __rust_analyzer_hints__ {
1335                #[test]
1336                #[ignore]
1337                fn mytest() {
1338                    #body
1339                }
1340            }
1341        }
1342    } else {
1343        quote! {}
1344    };
1345
1346    let attrs_vec = input_fn_ast.attrs;
1347    let (outer_attrs_vec, inner_attrs_vec) = split_attrs(attrs_vec);
1348
1349    let outer_attrs = quote!{ #(#outer_attrs_vec)* };
1350    let inner_attrs = quote!{ #(#inner_attrs_vec)* };
1351    let mut out = quote! {
1352        {
1353            #[crabtime::eval_function(#attr)]
1354            fn #name() #output_tp {
1355                #inner_attrs
1356                #args_setup
1357                #args_code
1358                #input_str
1359            }
1360        }
1361    };
1362    if extra_braces {
1363        out = quote! {
1364            { #out }
1365        };
1366    }
1367    out = quote! {
1368        #rust_analyzer_hints
1369
1370        #outer_attrs
1371        macro_rules! #name {
1372            (#args_pattern) => #out;
1373        }
1374    };
1375    debug!("OUT: {out}");
1376    Ok(out)
1377}
1378
1379fn format_duration(duration: std::time::Duration) -> String {
1380    let total_seconds = duration.as_secs();
1381    if total_seconds >= 60 {
1382        let minutes = total_seconds / 60;
1383        let seconds = total_seconds % 60;
1384        format!("{minutes}m {seconds}s")
1385    } else {
1386        let millis = duration.as_millis() % 1000;
1387        let fractional = millis as f64 / 1000.0;
1388        format!("{:.2} s", total_seconds as f64 + fractional)
1389    }
1390}
1391
1392fn get_current_time() -> String {
1393    let now = std::time::SystemTime::now();
1394    #[allow(clippy::unwrap_used)]
1395    let duration_since_epoch = now.duration_since(std::time::UNIX_EPOCH).unwrap();
1396    let total_seconds = duration_since_epoch.as_secs();
1397    let milliseconds = (duration_since_epoch.as_millis() % 1000) as u32;
1398    let hours = (total_seconds / 3600) % 24;
1399    let minutes = (total_seconds / 60) % 60;
1400    let seconds = total_seconds % 60;
1401    format!("{hours:02}:{minutes:02}:{seconds:02} ({milliseconds:03})")
1402}