wdl-engine 0.13.2

Execution engine for Workflow Description Language (WDL) documents.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
//! Implements calculation of file and directory content digests.
//!
//! This is used by the call cache and for uploading inputs for remote backends.

use std::collections::HashMap;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::LazyLock;
use std::sync::Mutex;
use std::time::UNIX_EPOCH;

use anyhow::Context;
use anyhow::Result;
use anyhow::bail;
use arrayvec::ArrayString;
use blake3::Hash;
use blake3::Hasher;
use cloud_copy::ContentDigest;
use cloud_copy::UrlExt;
use futures::FutureExt;
use tokio::sync::OnceCell;
use tokio::task::spawn_blocking;
use tracing::debug;
use url::Url;

use crate::ContentKind;
use crate::cache::Hashable;
use crate::config::ContentDigestMode;
use crate::http::Transferer;

/// Represents a calculated [Blake3](https://github.com/BLAKE3-team/BLAKE3) digest of a file or directory.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Digest {
    /// The digest is for a file.
    File(Hash),
    /// The digest is for a directory.
    Directory(Hash),
}

impl Digest {
    /// Converts the digest to a hex string.
    pub fn to_hex(self) -> ArrayString<64> {
        match self {
            Self::File(hash) => hash.to_hex(),
            Self::Directory(hash) => hash.to_hex(),
        }
    }
}

/// Represents a map of (digest mode, local path) to digest.
type LocalDigestMap = HashMap<(ContentDigestMode, PathBuf), Arc<OnceCell<Digest>>>;

/// Represents a map of remote URL to digest.
type RemoteDigestMap = HashMap<Url, Arc<OnceCell<Digest>>>;

/// Keeps track of previously calculated local digests.
///
/// As WDL evaluation cannot write to existing files, it is assumed that files
/// and directories are not modified during evaluation.
///
/// We check for changes to files and directories when we get a cache hit and
/// error if the source has been modified.
static LOCAL_DIGESTS: LazyLock<Mutex<LocalDigestMap>> = LazyLock::new(Mutex::default);

/// Keeps track of previously calculated remote digests.
static REMOTE_DIGESTS: LazyLock<Mutex<RemoteDigestMap>> = LazyLock::new(Mutex::default);

/// An extension trait for joining a digest to a URL.
pub trait UrlDigestExt: Sized {
    /// Joins the given digest to the URL.
    ///
    /// If the digest is for a file, a `file` path segment is pushed first.
    ///
    /// If the digest is for a directory, a `directory` path segment is pushed
    /// first.
    ///
    /// A path segment is then pushed for the digest as a hex string.
    fn join_digest(&self, digest: Digest) -> Self;
}

impl UrlDigestExt for Url {
    fn join_digest(&self, digest: Digest) -> Self {
        assert!(
            !self.cannot_be_a_base(),
            "invalid URL: URL is required to be a base"
        );

        let mut url = self.clone();

        {
            // SAFETY: this will always return `Ok` if the above assert passed
            let mut segments = url.path_segments_mut().unwrap();
            segments.pop_if_empty();

            let digest = match digest {
                Digest::File(digest) => {
                    segments.push("file");
                    digest
                }
                Digest::Directory(digest) => {
                    segments.push("directory");
                    digest
                }
            };

            let hex = digest.to_hex();
            segments.push(hex.as_str());
        }

        url
    }
}

/// Helper for retrieving the content digest of a URL.
async fn get_content_digest(transferer: &dyn Transferer, url: &Url) -> Result<Arc<ContentDigest>> {
    match transferer.digest(url).await.with_context(|| {
        format!(
            "failed to get content digest of URL `{url}`",
            url = url.display()
        )
    })? {
        Some(digest) => Ok(digest),
        None => bail!("URL `{url}` does not have a known content digest"),
    }
}

/// Calculates the digest of a local file.
async fn calculate_file_digest(path: &Path, mode: ContentDigestMode) -> Result<Digest> {
    match mode {
        ContentDigestMode::Strong => {
            // Calculate a Blake3 digest for the file's contents
            let path = path.to_path_buf();
            spawn_blocking(move || {
                let mut hasher = Hasher::new();
                hasher.update_mmap_rayon(&path).with_context(|| {
                    format!(
                        "failed to calculate digest of `{path}`",
                        path = path.display()
                    )
                })?;

                anyhow::Ok(Digest::File(hasher.finalize()))
            })
            .await
            .context("file digest task panicked")?
        }
        ContentDigestMode::Weak => {
            // Calculate a digest solely off of file metadata
            let metadata = path.metadata().with_context(|| {
                format!("failed to read metadata of `{path}`", path = path.display())
            })?;
            let mtime = metadata
                .modified()
                .with_context(|| {
                    format!(
                        "failed to determine last modified time of `{path}`",
                        path = path.display()
                    )
                })?
                .duration_since(UNIX_EPOCH)
                .with_context(|| {
                    format!(
                        "last modified time of `{path}` occurs is before UNIX epoch",
                        path = path.display()
                    )
                })?;

            let mut hasher = Hasher::new();
            hasher.update(&metadata.len().to_le_bytes());
            hasher.update(&mtime.as_secs().to_le_bytes());
            hasher.update(&mtime.as_millis().to_le_bytes());
            hasher.update(&mtime.as_micros().to_le_bytes());
            hasher.update(&mtime.as_nanos().to_le_bytes());
            Ok(Digest::File(hasher.finalize()))
        }
    }
}

/// Calculates the digest of a local directory.
///
/// This is a recursive operation where every file and directory recursively
/// contained in the directory will have their content digests calculated.
///
/// Returns a boxed future to break the type recursion.
fn calculate_directory_digest(
    path: &Path,
    mode: ContentDigestMode,
) -> impl Future<Output = Result<Digest>> + Send {
    async move {
        let mut dir = tokio::fs::read_dir(&path)
            .await
            .with_context(|| format!("failed to read directory `{path}`", path = path.display()))?;

        let mut entries = Vec::new();
        while let Some(entry) = dir
            .next_entry()
            .await
            .with_context(|| format!("failed to read directory `{path}`", path = path.display()))?
        {
            entries.push(entry);
        }

        // Sort the entries by name so that the digest order is consistent
        drop(dir);
        entries.sort_by_key(|e| e.file_name());

        let mut count: u32 = 0;
        let mut hasher = Hasher::new();
        for entry in &entries {
            let entry_path = entry.path();
            let mut metadata = entry.metadata().await.with_context(|| {
                format!(
                    "failed to read metadata for path `{path}`",
                    path = entry_path.display()
                )
            })?;

            // For symlink entries, ensure the link isn't broken by retrieving the target's
            // metadata; if it is broken, ignore it by not including it
            if metadata.is_symlink() {
                match fs::metadata(&entry_path) {
                    Ok(m) => metadata = m,
                    Err(_) => continue,
                }
            }

            let kind = if metadata.is_file() {
                ContentKind::File
            } else {
                ContentKind::Directory
            };

            // Hash the relative path to the entry
            let entry_rel_path = entry_path
                .strip_prefix(path)
                .expect("entry path should be relative")
                .to_str()
                .with_context(|| {
                    format!("path `{path}` is not UTF-8", path = entry_path.display())
                })?;
            entry_rel_path.hash(&mut hasher);

            // Recursively calculate the entry's digest
            let digest = calculate_local_digest(&entry_path, kind, mode).await?;
            digest.hash(&mut hasher);
            count += 1;
        }

        hasher.update(&count.to_le_bytes());
        Ok(Digest::Directory(hasher.finalize()))
    }
    .boxed()
}

/// Calculates the content digest of a local path.
///
/// If the path is a file, a [blake3](blake3) digest is calculated for the
/// file's content.
///
/// If the path is a directory, a consistent, recursive walk of the directory is
/// performed and a digest calculated based on the directory's entries.
///
/// The hash of a directory entry consist of:
///
/// * The relative path to the entry.
/// * Whether or not the entry is a file or a directory.
/// * If the entry is a file, the hash of the file's contents as noted above.
///
/// [blake3]: https://github.com/BLAKE3-team/BLAKE3
pub async fn calculate_local_digest(
    path: &Path,
    kind: ContentKind,
    mode: ContentDigestMode,
) -> Result<Digest> {
    let digest = {
        let mut digests = LOCAL_DIGESTS.lock().expect("failed to lock digests");
        digests
            .entry((mode, path.to_path_buf()))
            .or_default()
            .clone()
    };

    // Get an existing result or initialize a new one exactly once
    Ok(*digest
        .get_or_try_init(|| async move {
            let metadata = path.metadata().with_context(|| {
                format!("failed to read metadata of `{path}`", path = path.display())
            })?;

            debug!(
                "calculating content digest of `{path}`",
                path = path.display()
            );

            if kind == ContentKind::File {
                if !metadata.is_file() {
                    bail!("expected path `{path}` to be a file", path = path.display());
                }

                calculate_file_digest(path, mode).await
            } else {
                if metadata.is_file() {
                    bail!(
                        "expected path `{path}` to be a directory",
                        path = path.display()
                    );
                }

                calculate_directory_digest(path, mode).await
            }
        })
        .await?)
}

/// Calculates the content digest of a remote URL.
///
/// If the URL is to a remote file, a `HEAD` request is made and the response
/// must have an associated content digest header; the header's value is hashed
/// to produce the content digest of the file.
///
/// If the URL is a "directory", a consistent, recursive walk of the directory
/// is performed and a digest calculated based on the directory's entries.
///
/// The hash of a directory entry consist of:
///
/// * The relative path to the entry.
/// * The content digest of the entry.
pub async fn calculate_remote_digest(
    transferer: &dyn Transferer,
    url: &Url,
    kind: ContentKind,
) -> Result<Digest> {
    let digest = {
        let mut digests = REMOTE_DIGESTS.lock().expect("failed to lock digests");
        digests.entry(url.clone()).or_default().clone()
    };

    // Get an existing result or initialize a new one exactly once
    Ok(*digest
        .get_or_try_init(|| async {
            debug!("calculating content digest of `{url}`", url = url.display());

            // If there were no entries, treat the URL as a file
            if kind == ContentKind::File {
                let digest = get_content_digest(transferer, url).await?;
                let mut hasher = Hasher::new();
                digest.hash(&mut hasher);
                return anyhow::Ok(Digest::File(hasher.finalize()));
            }

            // Walk the URL; the returned entries are in lexicographical order
            let entries = transferer
                .walk(url)
                .await
                .with_context(|| format!("failed to walk URL `{url}`", url = url.display()))?;

            let mut hasher = Hasher::new();
            for entry in entries.iter() {
                let mut url = url.clone();

                {
                    // Append the entry to the url; we must pop the last segment if it is empty as
                    // otherwise `push` will append another empty segment
                    let mut segments = url.path_segments_mut().expect("URL should have a path");
                    segments.pop_if_empty();
                    for segment in entry.split('/') {
                        segments.push(segment);
                    }
                }

                let digest = get_content_digest(transferer, &url).await?;
                entry.hash(&mut hasher);
                digest.hash(&mut hasher);
            }

            hasher.update(&(entries.len() as u32).to_le_bytes());
            Ok(Digest::Directory(hasher.finalize()))
        })
        .await?)
}

#[cfg(test)]
pub(crate) mod test {
    use std::fs;
    use std::io::Write;
    use std::time::Duration;
    use std::time::SystemTime;

    use anyhow::anyhow;
    use futures::FutureExt;
    use futures::future::BoxFuture;
    use pretty_assertions::assert_eq;
    use tempfile::NamedTempFile;
    use tempfile::tempdir;

    use super::*;
    use crate::ContentKind;
    use crate::http::Location;

    /// Helper for clearing the cached digests for tests
    pub fn clear_digest_cache() {
        LOCAL_DIGESTS
            .lock()
            .expect("failed to lock digests")
            .clear();
        REMOTE_DIGESTS
            .lock()
            .expect("failed to lock digests")
            .clear();
    }

    pub struct DigestTransferer(HashMap<&'static str, Option<Arc<ContentDigest>>>);

    impl DigestTransferer {
        pub fn new<C>(c: C) -> Self
        where
            C: IntoIterator<Item = (&'static str, Option<ContentDigest>)>,
        {
            Self(HashMap::from_iter(
                c.into_iter().map(|(k, v)| (k, v.map(Into::into))),
            ))
        }
    }

    impl Transferer for DigestTransferer {
        fn download<'a>(&'a self, _source: &'a Url) -> BoxFuture<'a, Result<Location>> {
            unimplemented!()
        }

        fn upload<'a>(
            &'a self,
            _source: &'a Path,
            _destination: &'a Url,
        ) -> BoxFuture<'a, Result<()>> {
            unimplemented!()
        }

        fn size<'a>(&'a self, _url: &'a Url) -> BoxFuture<'a, Result<Option<u64>>> {
            unimplemented!()
        }

        fn walk<'a>(&'a self, url: &'a Url) -> BoxFuture<'a, Result<Arc<[String]>>> {
            async {
                let mut entries = Vec::new();
                for k in self.0.keys() {
                    if let Some(path) = k.strip_prefix(url.as_str()) {
                        let path = path.strip_prefix("/").unwrap_or(path);
                        entries.push(path.to_string());
                    }
                }

                entries.sort();
                Ok(entries.into())
            }
            .boxed()
        }

        fn exists<'a>(&'a self, _url: &'a Url) -> BoxFuture<'a, Result<bool>> {
            unimplemented!()
        }

        fn digest<'a>(&'a self, url: &'a Url) -> BoxFuture<'a, Result<Option<Arc<ContentDigest>>>> {
            async {
                Ok(self
                    .0
                    .get(url.as_str())
                    .ok_or_else(|| anyhow!("does not exist"))?
                    .clone())
            }
            .boxed()
        }
    }

    #[tokio::test]
    async fn local_file_digest_strong() {
        let mut file = NamedTempFile::new().unwrap();
        file.write_all(b"hello world!").unwrap();

        let digest =
            calculate_local_digest(file.path(), ContentKind::File, ContentDigestMode::Strong)
                .await
                .unwrap();
        // Digest of `hello world!` from https://emn178.github.io/online-tools/blake3/
        assert_eq!(
            *digest.to_hex(),
            *"3aa61c409fd7717c9d9c639202af2fae470c0ef669be7ba2caea5779cb534e9d"
        );
    }

    #[tokio::test]
    async fn local_file_digest_weak() {
        let mut file = NamedTempFile::new().unwrap();
        file.write_all(b"hello world!").unwrap();

        let digest =
            calculate_local_digest(file.path(), ContentKind::File, ContentDigestMode::Weak)
                .await
                .unwrap();

        // It should match the digest returned by `calculate_file_digest`
        assert_eq!(
            digest,
            calculate_file_digest(file.path(), ContentDigestMode::Weak)
                .await
                .unwrap()
        );

        // The digest should change if we modify its size
        file.write_all(b"!").unwrap();
        file.flush().unwrap();

        clear_digest_cache();

        let changed =
            calculate_local_digest(file.path(), ContentKind::File, ContentDigestMode::Weak)
                .await
                .unwrap();

        assert!(digest != changed, "expected digest to change");

        let digest = changed;

        // The digest should change if we modify the mtime
        file.as_file()
            .set_modified(
                SystemTime::now()
                    .checked_sub(Duration::from_hours(1))
                    .unwrap(),
            )
            .unwrap();

        clear_digest_cache();

        let changed =
            calculate_local_digest(file.path(), ContentKind::File, ContentDigestMode::Weak)
                .await
                .unwrap();

        assert!(digest != changed, "expected digest to change");
    }

    #[tokio::test]
    async fn local_directory_digest() {
        let dir = tempdir().unwrap();
        fs::write(dir.path().join("a"), b"a").unwrap();
        fs::write(dir.path().join("b"), b"b").unwrap();
        fs::write(dir.path().join("c"), b"c").unwrap();

        let subdir = dir.path().join("subdir");
        fs::create_dir(&subdir).unwrap();
        fs::write(subdir.join("z"), b"z").unwrap();
        fs::write(subdir.join("y"), b"y").unwrap();
        fs::write(subdir.join("x"), b"x").unwrap();

        let digest = calculate_local_digest(
            dir.path(),
            ContentKind::Directory,
            ContentDigestMode::Strong,
        )
        .await
        .unwrap();

        // Calculate the digest of the `subdir`
        let mut hasher = Hasher::new();
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("x".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `x` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("3ae7d805f6789a6402acb70ad4096a85a56bf6804eaf25c0493ac697548d30b5")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("y".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `y` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("08112a9e334ce73042b531c25668cf5cb12a1ee040a4326afeac065461079a06")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("z".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `z` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("1104908ab930e671002c7cd7f3fc921570b1bf64ecfa12fe363585c630eaca6b")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&3u32.to_le_bytes()); // Number of entries
        let subdir_digest = hasher.finalize();

        // Calculate the digest of the parent directory
        let mut hasher = Hasher::new();
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("a".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `a` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("17762fddd969a453925d65717ac3eea21320b66b54342fde15128d6caf21215f")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("b".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `b` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("10e5cf3d3c8a4f9f3468c8cc58eea84892a22fdadbc1acb22410190044c1d553")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&1u32.to_le_bytes()); // Path length
        hasher.update("c".as_bytes()); // Path
        hasher.update(&[0]); // File digest tag
        hasher.update(&32u32.to_le_bytes()); // File digest length
        hasher.update(
            // Digest of `c` from https://emn178.github.io/online-tools/blake3/
            Hash::from_hex("ea7aa1fc9efdbe106dbb70369a75e9671fa29d52bd55536711bf197477b8f021")
                .unwrap()
                .as_bytes(),
        ); // File digest
        hasher.update(&6u32.to_le_bytes()); // Path length
        hasher.update("subdir".as_bytes()); // Path
        hasher.update(&[1]); // Directory digest tag
        hasher.update(&32u32.to_le_bytes()); // Directory digest length
        hasher.update(subdir_digest.as_bytes()); // Directory digest
        hasher.update(&4u32.to_le_bytes()); // Number of entries
        assert_eq!(digest.to_hex(), hasher.finalize().to_hex());
    }

    #[tokio::test]
    async fn remote_file_digest() {
        // SHA-256 of `hello world!`
        let content_digest =
            Hash::from_hex("7509e5bda0c762d2bac7f90d758b5b2263fa01ccbc542ab5e3df163be08e6ca9")
                .unwrap();

        let transferer = DigestTransferer::new([
            (
                "http://example.com/foo",
                Some(ContentDigest::Hash {
                    algorithm: "sha256".to_string(),
                    digest: content_digest.as_bytes().into(),
                }),
            ),
            (
                "http://example.com/bar",
                Some(ContentDigest::ETag("etag".into())),
            ),
            ("http://example.com/baz", None),
        ]);

        // URL with Content-Digest header
        let digest = calculate_remote_digest(
            &transferer,
            &"http://example.com/foo".parse().unwrap(),
            ContentKind::File,
        )
        .await
        .unwrap();

        let mut hasher = Hasher::new();
        hasher.update(&[0]); // Hash tag
        hasher.update(&6u32.to_le_bytes()); // Algorithm length
        hasher.update("sha256".as_bytes()); // Algorithm
        hasher.update(&32u32.to_le_bytes()); // Digest length
        hasher.update(content_digest.as_bytes()); // Digest bytes
        assert_eq!(digest.to_hex(), hasher.finalize().to_hex());

        // URL with ETag header
        let digest = calculate_remote_digest(
            &transferer,
            &"http://example.com/bar".parse().unwrap(),
            ContentKind::File,
        )
        .await
        .unwrap();

        let mut hasher = Hasher::new();
        hasher.update(&[1]); // ETag tag
        hasher.update(&4u32.to_le_bytes()); // ETag length
        hasher.update("etag".as_bytes()); // ETag
        assert_eq!(digest.to_hex(), hasher.finalize().to_hex());

        // URL with no digest
        assert_eq!(
            calculate_remote_digest(
                &transferer,
                &"http://example.com/baz".parse().unwrap(),
                ContentKind::File,
            )
            .await
            .unwrap_err()
            .to_string(),
            "URL `http://example.com/baz` does not have a known content digest"
        );

        // 404
        assert_eq!(
            format!(
                "{:#}",
                calculate_remote_digest(
                    &transferer,
                    &"http://example.com/nope".parse().unwrap(),
                    ContentKind::File,
                )
                .await
                .unwrap_err()
            ),
            "failed to get content digest of URL `http://example.com/nope`: does not exist"
        );
    }

    #[tokio::test]
    async fn remote_directory_digest() {
        // SHA-256 of `hello world!`
        let content_digest =
            Hash::from_hex("7509e5bda0c762d2bac7f90d758b5b2263fa01ccbc542ab5e3df163be08e6ca9")
                .unwrap();

        let transferer = DigestTransferer::new([
            (
                "http://example.com/dir/foo",
                Some(ContentDigest::Hash {
                    algorithm: "sha256".to_string(),
                    digest: content_digest.as_bytes().into(),
                }),
            ),
            (
                "http://example.com/dir/bar/baz",
                Some(ContentDigest::ETag("etag".into())),
            ),
            ("http://example.com/missing/baz", None),
        ]);

        // Digest of a remote "directory"
        let digest = calculate_remote_digest(
            &transferer,
            &"http://example.com/dir".parse().unwrap(),
            ContentKind::Directory,
        )
        .await
        .unwrap();

        let mut hasher = Hasher::new();
        hasher.update(&7u32.to_le_bytes()); // Path length
        hasher.update("bar/baz".as_bytes()); // Path
        hasher.update(&[1]); // ETag tag
        hasher.update(&4u32.to_le_bytes()); // ETag length
        hasher.update("etag".as_bytes()); // ETag
        hasher.update(&3u32.to_le_bytes()); // Path length
        hasher.update("foo".as_bytes()); // Path
        hasher.update(&[0]); // Hash tag
        hasher.update(&6u32.to_le_bytes()); // Algorithm length
        hasher.update("sha256".as_bytes()); // Algorithm
        hasher.update(&32u32.to_le_bytes()); // Digest length
        hasher.update(content_digest.as_bytes()); // Digest bytes
        hasher.update(&2u32.to_le_bytes()); // Number of entries
        assert_eq!(digest.to_hex(), hasher.finalize().to_hex());

        // Digest of a remote "directory" with a trailing slash
        let trailing_digest = calculate_remote_digest(
            &transferer,
            &"http://example.com/dir/".parse().unwrap(),
            ContentKind::Directory,
        )
        .await
        .unwrap();
        assert_eq!(digest, trailing_digest);

        // Digest of a remote "directory" that is "empty"
        // We can't distinguish between a non-existent directory and an empty one
        let digest = calculate_remote_digest(
            &transferer,
            &"http://example.com/empty".parse().unwrap(),
            ContentKind::Directory,
        )
        .await
        .unwrap();

        let mut hasher = Hasher::new();
        hasher.update(&0u32.to_le_bytes()); // Number of entries
        assert_eq!(digest.to_hex(), hasher.finalize().to_hex());

        // Digest of a remote "directory" containing a file with a missing content
        // digest
        assert_eq!(
            format!(
                "{:#}",
                calculate_remote_digest(
                    &transferer,
                    &"http://example.com/missing".parse().unwrap(),
                    ContentKind::Directory,
                )
                .await
                .unwrap_err()
            ),
            "URL `http://example.com/missing/baz` does not have a known content digest"
        );
    }

    #[cfg(unix)]
    #[tokio::test]
    async fn ignore_broken_symlink() {
        use std::os::unix::fs::symlink;

        // Create a temp file as the target of the symlink
        let target = NamedTempFile::new()
            .expect("failed to create temporary file")
            .into_temp_path();
        fs::write(&target, b"hello world!").expect("failed to write temporary file");

        // Symlink the file
        let dir = tempdir().expect("failed to create temp directory");
        let link = dir.path().join("b");
        symlink(&target, &link).expect("failed to create symlink");

        // Digest the directory with the file
        let digest = calculate_directory_digest(dir.path(), ContentDigestMode::Strong)
            .await
            .expect("failed to calculate digest");

        // Delete the file to break the link
        fs::remove_file(&target).expect("failed to delete file");

        // Digest again; the link should be ignored and the digest changed
        let modified = calculate_directory_digest(dir.path(), ContentDigestMode::Strong)
            .await
            .expect("failed to calculate digest");
        assert!(digest != modified);

        // Restore the file
        fs::write(&target, b"hello world!").expect("failed to create temporary file");

        // Digest again; the digest should match the original
        let modified = calculate_directory_digest(dir.path(), ContentDigestMode::Strong)
            .await
            .expect("failed to calculate digest");
        assert_eq!(digest, modified);
    }
}