prek 0.3.11

A fast Git hook manager written in Rust, designed as a drop-in alternative to pre-commit, reimagined.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
use std::ops::Deref;
use std::path::Path;
use std::str::FromStr;

use anyhow::Result;
use bstr::ByteSlice;
use clap::Parser;

use crate::hook::Hook;
use crate::hooks::run_concurrent_file_checks;
use crate::run::CONCURRENCY;

const MARKDOWN_LINE_BREAK: &[u8] = b"  ";

#[derive(Clone)]
struct Chars(Vec<char>);

impl FromStr for Chars {
    type Err = String;
    fn from_str(s: &str) -> Result<Self, Self::Err> {
        Ok(Chars(s.chars().collect()))
    }
}

impl Deref for Chars {
    type Target = Vec<char>;
    fn deref(&self) -> &Self::Target {
        &self.0
    }
}

#[derive(Parser)]
#[command(disable_help_subcommand = true)]
#[command(disable_version_flag = true)]
#[command(disable_help_flag = true)]
struct Args {
    #[arg(long)]
    markdown_linebreak_ext: Vec<String>,
    // `clap` cannot parse `--chars= \t` into vec<char> correctly.
    // so, we use Chars to achieve it.
    #[arg(long)]
    chars: Option<Chars>,
}

impl Args {
    fn markdown_exts(&self) -> Result<Vec<String>> {
        let markdown_exts = self
            .markdown_linebreak_ext
            .iter()
            .flat_map(|ext| ext.split(','))
            .map(|ext| format!(".{}", ext.trim_start_matches('.')).to_ascii_lowercase())
            .collect::<Vec<_>>();

        // Validate extensions don't contain path separators
        for ext in &markdown_exts {
            if ext[1..]
                .chars()
                .any(|c| matches!(c, '.' | '/' | '\\' | ':'))
            {
                anyhow::bail!("bad `--markdown-linebreak-ext` argument '{ext}' (has . / \\ :)");
            }
        }
        Ok(markdown_exts)
    }

    fn force_markdown(&self) -> bool {
        self.markdown_linebreak_ext.iter().any(|ext| ext == "*")
    }
}

pub(crate) async fn fix_trailing_whitespace(
    hook: &Hook,
    filenames: &[&Path],
) -> Result<(i32, Vec<u8>)> {
    let args = Args::try_parse_from(hook.entry.expect_direct().split()?.iter().chain(&hook.args))?;

    let force_markdown = args.force_markdown();
    let markdown_exts = args.markdown_exts()?;
    let chars = if let Some(chars) = args.chars {
        chars.deref().to_owned()
    } else {
        Vec::new()
    };

    run_concurrent_file_checks(filenames.iter().copied(), *CONCURRENCY, |filename| {
        fix_file(
            hook.project().relative_path(),
            filename,
            &chars,
            force_markdown,
            &markdown_exts,
        )
    })
    .await
}

async fn fix_file(
    file_base: &Path,
    filename: &Path,
    chars: &[char],
    force_markdown: bool,
    markdown_exts: &[String],
) -> Result<(i32, Vec<u8>)> {
    let is_markdown = force_markdown || {
        Path::new(filename)
            .extension()
            .and_then(|e| e.to_str())
            .map(|e| format!(".{}", e.to_ascii_lowercase()))
            .is_some_and(|e| markdown_exts.contains(&e))
    };

    let file_path = file_base.join(filename);
    let content = fs_err::tokio::read(&file_path).await?;

    let mut output = Vec::with_capacity(content.len());
    let mut modified = false;
    for line in content.split_inclusive(|&b| b == b'\n') {
        let line_ending = detect_line_ending(line);
        let mut trimmed = &line[..line.len() - line_ending.len()];

        let markdown_end = needs_markdown_break(is_markdown, trimmed);
        if markdown_end {
            trimmed = &trimmed[..trimmed.len() - MARKDOWN_LINE_BREAK.len()];
        }

        if chars.is_empty() {
            trimmed = trimmed.trim_ascii_end();
        } else {
            trimmed = trimmed.trim_end_with(|c| chars.contains(&c));
        }

        output.extend_from_slice(trimmed);
        if markdown_end {
            output.extend_from_slice(MARKDOWN_LINE_BREAK);
            modified |= trimmed.len() + MARKDOWN_LINE_BREAK.len() + line_ending.len() != line.len();
        } else {
            modified |= trimmed.len() + line_ending.len() != line.len();
        }
        output.extend_from_slice(line_ending);
    }

    if modified {
        fs_err::tokio::write(&file_path, &output).await?;
        Ok((1, format!("Fixing {}\n", filename.display()).into_bytes()))
    } else {
        Ok((0, Vec::new()))
    }
}

fn detect_line_ending(line: &[u8]) -> &[u8] {
    if line.ends_with(b"\r\n") {
        b"\r\n"
    } else if line.ends_with(b"\n") {
        b"\n"
    } else if line.ends_with(b"\r") {
        b"\r"
    } else {
        b""
    }
}

fn needs_markdown_break(is_markdown: bool, trimmed: &[u8]) -> bool {
    is_markdown
        && !trimmed.chars().all(|b| b.is_ascii_whitespace())
        && trimmed.ends_with(MARKDOWN_LINE_BREAK)
}

#[cfg(test)]
mod tests {
    use super::*;

    use std::path::PathBuf;
    use tempfile::TempDir;

    async fn create_test_file(dir: &TempDir, name: &str, content: &[u8]) -> Result<PathBuf> {
        let file_path = dir.path().join(name);
        fs_err::tokio::write(&file_path, content).await?;
        Ok(file_path)
    }

    #[tokio::test]
    async fn test_trim_non_markdown_trims_spaces() -> Result<()> {
        let dir = TempDir::new()?;
        let file_path =
            create_test_file(&dir, "file.txt", b"keep this line\ntrim trailing    \n").await?;

        let chars = vec![' ', '\t'];
        let md_exts = vec![".md".to_string()];

        let (code, msg) = fix_file(Path::new(""), &file_path, &chars, false, &md_exts).await?;

        // modified
        assert_eq!(code, 1);
        let msg_str = String::from_utf8_lossy(&msg);
        assert!(msg_str.contains("file.txt"));

        // file content updated: trailing spaces removed
        let content = fs_err::tokio::read_to_string(&file_path).await?;
        let expected = "keep this line\ntrim trailing\n";
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_markdown_preserve_two_spaces_and_reduce_extra() -> Result<()> {
        let dir = TempDir::new()?;
        let file_path = create_test_file(
            &dir,
            "doc.md",
            b"line_keep_two  \nline_reduce_three   \nother_line\n",
        )
        .await?;

        let chars = vec![' ', '\t'];
        let md_exts = vec![".md".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &file_path, &chars, false, &md_exts).await?;

        // second line changed 3 -> 2 spaces, so modified
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&file_path).await?;
        let expected = "line_keep_two  \nline_reduce_three  \nother_line\n";
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_force_markdown_obeys_markdown_rules() -> Result<()> {
        let dir = TempDir::new()?;
        // .txt normally not markdown, but we force markdown=true
        let file_path = create_test_file(
            &dir,
            "forced.txt",
            b"keep_two_spaces  \nthree_spaces_line   \n",
        )
        .await?;

        let chars = vec![' ', '\t'];
        let md_exts: Vec<String> = vec![]; // irrelevant because force_markdown = true

        let (code, _msg) = fix_file(Path::new(""), &file_path, &chars, true, &md_exts).await?;

        // modified because one line had 3 spaces -> reduced to 2
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&file_path).await?;
        let expected = "keep_two_spaces  \nthree_spaces_line  \n";
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_no_changes_returns_zero_and_no_write() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "ok.txt", b"already_trimmed\nline_two\n").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![".md".to_string()];

        // file already trimmed -> no changes
        let (code, msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 0);
        assert!(msg.is_empty());

        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, "already_trimmed\nline_two\n");

        Ok(())
    }

    #[tokio::test]
    async fn test_empty_file_no_change() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "empty.txt", b"").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![];

        let (code, msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 0);
        assert!(msg.is_empty());
        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, "");

        Ok(())
    }

    #[tokio::test]
    async fn test_only_whitespace_lines_are_handled_not_markdown_end() -> Result<()> {
        let dir = TempDir::new()?;
        // lines are only whitespace; markdown_end_flag should NOT trigger
        let path = create_test_file(&dir, "ws.txt", b"   \n\t\n  \n").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![".md".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        // trimming whitespace-only lines will change them to empty lines -> modified true
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        // Expect empty lines (newline preserved per implementation)
        assert_eq!(content, "\n\n\n");

        Ok(())
    }

    #[tokio::test]
    async fn test_chars_empty_uses_trim_ascii_end() -> Result<()> {
        let dir = TempDir::new()?;
        // trailing ascii spaces should be removed by trim_ascii_end when chars is empty
        let path = create_test_file(&dir, "ascii.txt", b"foo   \nbar \t\n").await?;
        let chars = vec![]; // will hit trim_ascii_end()
        let md_exts = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        let expected = "foo\nbar\n";
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_crlf_lines_handling() -> Result<()> {
        let dir = TempDir::new()?;
        // CRLF content (use \r\n). Ensure trimming still works.
        let path = create_test_file(&dir, "crlf.txt", b"one  \r\ntwo   \r\n").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![".txt".to_string()]; // treat as markdown for this test

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        // read file and check logical lines presence (line endings may be normalized by lines())
        let content = fs_err::tokio::read_to_string(&path).await?;
        assert!(content.contains("one"));
        assert!(content.contains("two"));

        Ok(())
    }

    #[tokio::test]
    async fn test_no_newline_at_eof() -> Result<()> {
        let dir = TempDir::new()?;
        // no trailing newline on last line
        let path = create_test_file(&dir, "no_nl.txt", b"lastline   ").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        // Expect trailing spaces removed
        assert_eq!(content, "lastline");

        Ok(())
    }

    #[tokio::test]
    async fn test_unicode_trim_char() -> Result<()> {
        let dir = TempDir::new()?;
        // use a unicode char '。' and ideographic space ' ' to trim
        let path = create_test_file(&dir, "uni.txt", "hello。 \n".as_bytes()).await?;
        let chars = vec!['。', ' '];
        let md_exts = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, "hello\n");

        Ok(())
    }

    #[tokio::test]
    async fn test_extension_case_insensitive_matching() -> Result<()> {
        let dir = TempDir::new()?;
        // capital extension .MD should match .md in markdown_exts
        let path = create_test_file(&dir, "Doc.MD", b"hi   \n").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![".md".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        // markdown rules: trailing >2 -> reduce to two spaces
        assert!(content.contains("hi"));

        Ok(())
    }

    #[tokio::test]
    async fn test_mixed_lines_modified_flag_true_if_any_changed() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "mix.txt", b"ok\nneedtrim   \nalso_ok\n").await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 1);

        let content = fs_err::tokio::read_to_string(&path).await?;
        let expected = "ok\nneedtrim\nalso_ok\n";
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_no_change_no_newline_at_eof() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "ok_no_nl.txt", b"foo\nbar").await?;

        let chars = vec![' ', '\t'];
        let md_exts = vec![];

        let (code, msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 0);
        assert!(msg.is_empty());

        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, "foo\nbar");

        Ok(())
    }

    #[tokio::test]
    async fn test_markdown_wildcard_ext_and_eof_whitespace_removed() -> Result<()> {
        let dir = TempDir::new()?;
        let content = b"foo  \nbar \nbaz    \n\t\n\n  ";
        let path = create_test_file(&dir, "wild.md", content).await?;
        let chars = vec![' ', '\t'];
        let md_exts = vec!["*".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, true, &md_exts).await?;
        assert_eq!(code, 1);

        let expected = "foo  \nbar\nbaz  \n\n\n";
        let new_content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(new_content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_markdown_with_custom_charset() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "custom_charset.md", b"\ta \t   \n").await?;
        let chars = vec![' '];
        let md_exts = vec!["*".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, true, &md_exts).await?;
        assert_eq!(code, 1);

        let expected = "\ta \t  \n";
        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_eol_trim() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "trim_eol.md", b"a\nb\r\r\r\n").await?;
        let chars = vec!['x'];
        let md_exts = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, true, &md_exts).await?;
        assert_eq!(code, 0);

        let expected = "a\nb\r\r\r\n";
        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_markdown_trim() -> Result<()> {
        let dir = TempDir::new()?;
        let path = create_test_file(&dir, "trim_markdown.md", b"axxx  \n").await?;
        let chars = vec!['x'];
        let md_exts = vec!["md".to_string()];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, true, &md_exts).await?;
        assert_eq!(code, 1);

        let expected = "a  \n";
        let content = fs_err::tokio::read_to_string(&path).await?;
        assert_eq!(content, expected);

        Ok(())
    }

    #[tokio::test]
    async fn test_invalid_utf8_file_is_handled() -> Result<()> {
        let dir = TempDir::new()?;
        // This is valid ASCII followed by invalid UTF-8 (0xFF)
        let content = b"valid line\ninvalid utf8 here:\xff\n";
        let path = create_test_file(&dir, "invalid_utf8.txt", content).await?;
        let chars = vec![' ', '\t'];
        let md_exts: Vec<String> = vec![];

        let (code, _msg) = fix_file(Path::new(""), &path, &chars, false, &md_exts).await?;
        assert_eq!(code, 0);

        let new_content = fs_err::tokio::read(&path).await?;
        // The invalid byte should still be present, but trailing whitespace should be trimmed
        assert!(new_content.starts_with(b"valid line\ninvalid utf8 here:\xff\n"));

        Ok(())
    }
}