tor-netdoc 0.41.0

Network document formats used with the Tor protocols.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
//! Support for encoding the network document meta-format
//!
//! Implements writing documents according to
//! [dir-spec.txt](https://spec.torproject.org/dir-spec).
//! section 1.2 and 1.3.
//!
//! This facility processes output that complies with the meta-document format,
//! (`dir-spec.txt` section 1.2) -
//! unless `raw` methods are called with improper input.
//!
//! However, no checks are done on keyword presence/absence, multiplicity, or ordering,
//! so the output may not necessarily conform to the format of the particular intended document.
//! It is the caller's responsibility to call `.item()` in the right order,
//! with the right keywords and arguments.

// TODO Plan for encoding signed documents:
//
//  * Derive an encoder function for Foo; the encoder gives you Encoded<Foo>.
//  * Write code ad-hoc to construct FooSignatures.
//  * Call encoder-core-provided method on Encoded to add the signatures
//
// Method(s) on Encoded<Foo> are provided centrally to let you get the &str to hash it.
//
// Nothing cooked is provided to help with the signature encoding layering violation:
// the central encoding derives do not provide any way to obtain a partly-encoded
// signature item so that it can be added to the hash.
//
// So the signing code must recapitulate some of the item encoding.  This will generally
// be simply a const str (or similar) with the encoded item name and any parameters,
// in precisely the form that needs to be appended to the hash.
//
// This does leave us open to bugs where the hashed data doesn't match what ends up
// being encoded, but since it's a fixed string, such a bug couldn't survive a smoke test.
//
// If there are items where the layering violation involves encoding
// of variable parameters, this would need further work, either ad-hoc,
// or additional traits/macrology/etc. if there's enough cases where it's needed.

mod multiplicity;
#[macro_use]
mod derive;

use std::cmp;
use std::collections::BTreeSet;
use std::fmt::Write;
use std::iter;
use std::marker::PhantomData;

use base64ct::{Base64, Base64Unpadded, Encoding};
use educe::Educe;
use itertools::Itertools;
use paste::paste;
use rand::{CryptoRng, RngCore};
use tor_bytes::EncodeError;
use tor_error::internal;
use void::Void;

use crate::KeywordEncodable;
use crate::parse::tokenize::tag_keywords_ok;
use crate::types::misc::Iso8601TimeSp;

// Exports used by macros, which treat this module as a prelude
#[doc(hidden)]
pub use {
    derive::{DisplayHelper, RestMustComeLastMarker},
    multiplicity::{
        MultiplicityMethods, MultiplicitySelector, OptionalityMethods,
        SingletonMultiplicitySelector,
    },
    std::fmt::{self, Display},
    std::result::Result,
    tor_error::{Bug, into_internal},
};

/// Encoder, representing a partially-built document.
///
/// For example usage, see the tests in this module, or a descriptor building
/// function in tor-netdoc (such as `hsdesc::build::inner::HsDescInner::build_sign`).
#[derive(Debug, Clone)]
pub struct NetdocEncoder {
    /// The being-built document, with everything accumulated so far
    ///
    /// If an [`ItemEncoder`] exists, it will add a newline when it's dropped.
    ///
    /// `Err` means bad values passed to some builder function.
    /// Such errors are accumulated here for the benefit of handwritten document encoders.
    built: Result<String, Bug>,
}

/// Encoder for an individual item within a being-built document
///
/// Returned by [`NetdocEncoder::item()`].
#[derive(Debug)]
pub struct ItemEncoder<'n> {
    /// The document including the partial item that we're building
    ///
    /// We will always add a newline when we're dropped
    doc: &'n mut NetdocEncoder,
}

/// Position within a (perhaps partially-) built document
///
/// This is provided mainly to allow the caller to perform signature operations
/// on the part of the document that is to be signed.
/// (Sometimes this is only part of it.)
///
/// There is no enforced linkage between this and the document it refers to.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct Cursor {
    /// The offset (in bytes, as for `&str`)
    ///
    /// Can be out of range if the corresponding `NetdocEncoder` is contains an `Err`.
    offset: usize,
}

/// Types that can be added as argument(s) to item keyword lines
///
/// Implemented for strings, and various other types.
///
/// This is a separate trait so we can control the formatting of (eg) [`Iso8601TimeSp`],
/// without having a method on `ItemEncoder` for each argument type.
//
// TODO consider renaming this to ItemArgumentEncodable to mirror all the other related traits.
pub trait ItemArgument {
    /// Format as a string suitable for including as a netdoc keyword line argument
    ///
    /// The implementation is responsible for checking that the syntax is legal.
    /// For example, if `self` is a string, it must check that the string is
    /// in legal as a single argument.
    ///
    /// Some netdoc values (eg times) turn into several arguments; in that case,
    /// one `ItemArgument` may format into multiple arguments, and this method
    /// is responsible for writing them all, with the necessary spaces.
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug>;
}

impl NetdocEncoder {
    /// Start encoding a document
    pub fn new() -> Self {
        NetdocEncoder {
            built: Ok(String::new()),
        }
    }

    /// Adds an item to the being-built document
    ///
    /// The item can be further extended with arguments or an object,
    /// using the returned `ItemEncoder`.
    pub fn item(&mut self, keyword: impl KeywordEncodable) -> ItemEncoder {
        self.raw(&keyword.to_str());
        ItemEncoder { doc: self }
    }

    /// Internal name for `push_raw_string()`
    fn raw(&mut self, s: &dyn Display) {
        self.write_with(|b| {
            write!(b, "{}", s).expect("write! failed on String");
            Ok(())
        });
    }

    /// Extend the being-built document with a fallible function `f`
    ///
    /// Doesn't call `f` if the building has already failed,
    /// and handles the error if `f` fails.
    fn write_with(&mut self, f: impl FnOnce(&mut String) -> Result<(), Bug>) {
        let Ok(build) = &mut self.built else {
            return;
        };
        match f(build) {
            Ok(()) => (),
            Err(e) => {
                self.built = Err(e);
            }
        }
    }

    /// Adds raw text to the being-built document
    ///
    /// `s` is added as raw text, after the newline ending the previous item.
    /// If `item` is subsequently called, the start of that item
    /// will immediately follow `s`.
    ///
    /// It is the responsibility of the caller to obey the metadocument syntax.
    /// In particular, `s` should end with a newline.
    /// No checks are performed.
    /// Incorrect use might lead to malformed documents, or later errors.
    pub fn push_raw_string(&mut self, s: &dyn Display) {
        self.raw(s);
    }

    /// Return a cursor, pointing to just after the last item (if any)
    pub fn cursor(&self) -> Cursor {
        let offset = match &self.built {
            Ok(b) => b.len(),
            Err(_) => usize::MAX,
        };
        Cursor { offset }
    }

    /// Obtain the text of a section of the document
    ///
    /// Useful for making a signature.
    pub fn slice(&self, begin: Cursor, end: Cursor) -> Result<&str, Bug> {
        self.built
            .as_ref()
            .map_err(Clone::clone)?
            .get(begin.offset..end.offset)
            .ok_or_else(|| internal!("NetdocEncoder::slice out of bounds, Cursor mismanaged"))
    }

    /// Build the document into textual form
    pub fn finish(self) -> Result<String, Bug> {
        self.built
    }
}

impl Default for NetdocEncoder {
    fn default() -> Self {
        // We must open-code this because the actual encoder contains Result, which isn't Default
        NetdocEncoder::new()
    }
}

impl ItemArgument for str {
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        // Implements this
        // https://gitlab.torproject.org/tpo/core/torspec/-/merge_requests/106
        if self.is_empty() || self.chars().any(|c| !c.is_ascii_graphic()) {
            return Err(internal!(
                "invalid netdoc keyword line argument syntax {:?}",
                self
            ));
        }
        out.args_raw_nonempty(&self);
        Ok(())
    }
}

impl ItemArgument for &str {
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        <str as ItemArgument>::write_arg_onto(self, out)
    }
}

impl<T: crate::NormalItemArgument> ItemArgument for T {
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        (*self.to_string()).write_arg_onto(out)
    }
}

impl ItemArgument for Iso8601TimeSp {
    // Unlike the macro'd formats, contains a space while still being one argument
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        let arg = self.to_string();
        out.args_raw_nonempty(&arg.as_str());
        Ok(())
    }
}

#[cfg(feature = "hs-pow-full")]
impl ItemArgument for tor_hscrypto::pow::v1::Seed {
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        let mut seed_bytes = vec![];
        tor_bytes::Writer::write(&mut seed_bytes, &self)?;
        out.add_arg(&Base64Unpadded::encode_string(&seed_bytes));
        Ok(())
    }
}

#[cfg(feature = "hs-pow-full")]
impl ItemArgument for tor_hscrypto::pow::v1::Effort {
    fn write_arg_onto(&self, out: &mut ItemEncoder<'_>) -> Result<(), Bug> {
        out.add_arg(&<Self as Into<u32>>::into(*self));
        Ok(())
    }
}

impl<'n> ItemEncoder<'n> {
    /// Add a single argument.
    ///
    /// Convenience method that defers error handling, for use in infallible contexts.
    /// Consider whether to use `ItemArgument::write_arg_onto` directly, instead.
    ///
    /// If the argument is not in the correct syntax, a `Bug`
    /// error will be reported (later).
    //
    // This is not a hot path.  `dyn` for smaller code size.
    pub fn arg(mut self, arg: &dyn ItemArgument) -> Self {
        self.add_arg(arg);
        self
    }

    /// Add a single argument, to a borrowed `ItemEncoder`
    ///
    /// If the argument is not in the correct syntax, a `Bug`
    /// error will be reported (later).
    //
    // Needed for implementing `ItemArgument`
    pub(crate) fn add_arg(&mut self, arg: &dyn ItemArgument) {
        let () = arg
            .write_arg_onto(self)
            .unwrap_or_else(|err| self.doc.built = Err(err));
    }

    /// Add zero or more arguments, supplied as a single string.
    ///
    /// `args` should zero or more valid argument strings,
    /// separated by (single) spaces.
    /// This is not (properly) checked.
    /// Incorrect use might lead to malformed documents, or later errors.
    pub fn args_raw_string(&mut self, args: &dyn Display) {
        let args = args.to_string();
        if !args.is_empty() {
            self.args_raw_nonempty(&args);
        }
    }

    /// Add one or more arguments, supplied as a single string, without any checking
    fn args_raw_nonempty(&mut self, args: &dyn Display) {
        self.doc.raw(&format_args!(" {}", args));
    }

    /// Add an object to the item
    ///
    /// Checks that `keywords` is in the correct syntax.
    /// Doesn't check that it makes semantic sense for the position of the document.
    /// `data` will be PEM (base64) encoded.
    //
    // If keyword is not in the correct syntax, a `Bug` is stored in self.doc.
    pub fn object(
        self,
        keywords: &str,
        // Writeable isn't dyn-compatible
        data: impl tor_bytes::WriteableOnce,
    ) {
        use crate::parse::tokenize::object::*;

        self.doc.write_with(|out| {
            if keywords.is_empty() || !tag_keywords_ok(keywords) {
                return Err(internal!("bad object keywords string {:?}", keywords));
            }
            let data = {
                let mut bytes = vec![];
                data.write_into(&mut bytes)?;
                Base64::encode_string(&bytes)
            };
            let mut data = &data[..];
            writeln!(out, "\n{BEGIN_STR}{keywords}{TAG_END}").expect("write!");
            while !data.is_empty() {
                let (l, r) = if data.len() > BASE64_PEM_MAX_LINE {
                    data.split_at(BASE64_PEM_MAX_LINE)
                } else {
                    (data, "")
                };
                writeln!(out, "{l}").expect("write!");
                data = r;
            }
            // final newline will be written by Drop impl
            write!(out, "{END_STR}{keywords}{TAG_END}").expect("write!");
            Ok(())
        });
    }

    /// Finish encoding this item
    ///
    /// The item will also automatically be finished if the `ItemEncoder` is dropped.
    pub fn finish(self) {}
}

impl Drop for ItemEncoder<'_> {
    fn drop(&mut self) {
        self.doc.raw(&'\n');
    }
}

/// Ordering, to be used when encoding network documents
///
/// Implemented for anything `Ord`.
///
/// Can also be implemented manually, for if a type cannot be `Ord`
/// (perhaps for trait coherence reasons).
pub trait EncodeOrd {
    /// Compare `self` and `other`
    ///
    /// As `Ord::cmp`.
    fn encode_cmp(&self, other: &Self) -> cmp::Ordering;
}
impl<T: Ord> EncodeOrd for T {
    fn encode_cmp(&self, other: &Self) -> cmp::Ordering {
        self.cmp(other)
    }
}

/// Documents (or sub-documents) that can be encoded in the netdoc metaformat
pub trait NetdocEncodable {
    /// Append the document onto `out`
    fn encode_unsigned(&self, out: &mut NetdocEncoder) -> Result<(), Bug>;
}

/// Collections of fields that can be encoded in the netdoc metaformat
///
/// Whole documents have structure; a `NetdocEncodableFields` does not.
pub trait NetdocEncodableFields {
    /// Append the document onto `out`
    fn encode_fields(&self, out: &mut NetdocEncoder) -> Result<(), Bug>;
}

/// Items that can be encoded in network documents
pub trait ItemValueEncodable {
    /// Write the item's arguments, and any object, onto `out`
    ///
    /// `out` will have been freshly returned from [`NetdocEncoder::item`].
    fn write_item_value_onto(&self, out: ItemEncoder) -> Result<(), Bug>;
}

/// An Object value that be encoded into a netdoc
pub trait ItemObjectEncodable {
    /// The label (keyword(s) in `BEGIN` and `END`)
    fn label(&self) -> &str;

    /// Represent the actual value as bytes.
    ///
    /// The caller, not the object, is responsible for base64 encoding.
    //
    // This is not a tor_bytes::Writeable supertrait because tor_bytes's writer argument
    // is generic, which prevents many deisrable manipulations of an `impl Writeable`.
    fn write_object_onto(&self, b: &mut Vec<u8>) -> Result<(), Bug>;
}

/// Builders for network documents.
///
/// This trait is a bit weird, because its `Self` type must contain the *private* keys
/// necessary to sign the document!
///
/// So it is implemented for "builders", not for documents themselves.
/// Some existing documents can be constructed only via these builders.
/// The newer approach is for documents to be transparent data, at the Rust level,
/// and to derive an encoder.
/// TODO this derive approach is not yet implemented!
///
/// Actual document types, which only contain the information in the document,
/// don't implement this trait.
pub trait NetdocBuilder {
    /// Build the document into textual form.
    fn build_sign<R: RngCore + CryptoRng>(self, rng: &mut R) -> Result<String, EncodeError>;
}

impl ItemValueEncodable for Void {
    fn write_item_value_onto(&self, _out: ItemEncoder) -> Result<(), Bug> {
        void::unreachable(*self)
    }
}

impl ItemObjectEncodable for Void {
    fn label(&self) -> &str {
        void::unreachable(*self)
    }
    fn write_object_onto(&self, _: &mut Vec<u8>) -> Result<(), Bug> {
        void::unreachable(*self)
    }
}

/// implement [`ItemValueEncodable`] for a particular tuple size
macro_rules! item_value_encodable_for_tuple {
    { $($i:literal)* } => { paste! {
        impl< $( [<T$i>]: ItemArgument, )* > ItemValueEncodable for ( $( [<T$i>], )* ) {
            fn write_item_value_onto(
                &self,
                #[allow(unused)]
                mut out: ItemEncoder,
            ) -> Result<(), Bug> {
                $(
                    <[<T$i>] as ItemArgument>::write_arg_onto(&self.$i, &mut out)?;
                )*
                Ok(())
            }
        }
    } }
}

item_value_encodable_for_tuple! {}
item_value_encodable_for_tuple! { 0 }
item_value_encodable_for_tuple! { 0 1 }
item_value_encodable_for_tuple! { 0 1 2 }
item_value_encodable_for_tuple! { 0 1 2 3 }
item_value_encodable_for_tuple! { 0 1 2 3 4 }
item_value_encodable_for_tuple! { 0 1 2 3 4 5 }
item_value_encodable_for_tuple! { 0 1 2 3 4 5 6 }
item_value_encodable_for_tuple! { 0 1 2 3 4 5 6 7 }
item_value_encodable_for_tuple! { 0 1 2 3 4 5 6 7 8 }
item_value_encodable_for_tuple! { 0 1 2 3 4 5 6 7 8 9 }

#[cfg(test)]
mod test {
    // @@ begin test lint list maintained by maint/add_warning @@
    #![allow(clippy::bool_assert_comparison)]
    #![allow(clippy::clone_on_copy)]
    #![allow(clippy::dbg_macro)]
    #![allow(clippy::mixed_attributes_style)]
    #![allow(clippy::print_stderr)]
    #![allow(clippy::print_stdout)]
    #![allow(clippy::single_char_pattern)]
    #![allow(clippy::unwrap_used)]
    #![allow(clippy::unchecked_time_subtraction)]
    #![allow(clippy::useless_vec)]
    #![allow(clippy::needless_pass_by_value)]
    //! <!-- @@ end test lint list maintained by maint/add_warning @@ -->
    use super::*;
    use std::str::FromStr;

    use crate::types::misc::Iso8601TimeNoSp;
    use base64ct::{Base64Unpadded, Encoding};

    #[test]
    fn time_formats_as_args() {
        use crate::doc::authcert::AuthCertKwd as ACK;
        use crate::doc::netstatus::NetstatusKwd as NK;

        let t_sp = Iso8601TimeSp::from_str("2020-04-18 08:36:57").unwrap();
        let t_no_sp = Iso8601TimeNoSp::from_str("2021-04-18T08:36:57").unwrap();

        let mut encode = NetdocEncoder::new();
        encode.item(ACK::DIR_KEY_EXPIRES).arg(&t_sp);
        encode
            .item(NK::SHARED_RAND_PREVIOUS_VALUE)
            .arg(&"3")
            .arg(&"bMZR5Q6kBadzApPjd5dZ1tyLt1ckv1LfNCP/oyGhCXs=")
            .arg(&t_no_sp);

        let doc = encode.finish().unwrap();
        println!("{}", doc);
        assert_eq!(
            doc,
            r"dir-key-expires 2020-04-18 08:36:57
shared-rand-previous-value 3 bMZR5Q6kBadzApPjd5dZ1tyLt1ckv1LfNCP/oyGhCXs= 2021-04-18T08:36:57
"
        );
    }

    #[test]
    fn authcert() {
        use crate::doc::authcert::AuthCertKwd as ACK;
        use crate::doc::authcert::{AuthCert, UncheckedAuthCert};

        // c&p from crates/tor-llcrypto/tests/testvec.rs
        let pk_rsa = {
            let pem = "
MIGJAoGBANUntsY9boHTnDKKlM4VfczcBE6xrYwhDJyeIkh7TPrebUBBvRBGmmV+
PYK8AM9irDtqmSR+VztUwQxH9dyEmwrM2gMeym9uXchWd/dt7En/JNL8srWIf7El
qiBHRBGbtkF/Re5pb438HC/CGyuujp43oZ3CUYosJOfY/X+sD0aVAgMBAAE";
            Base64Unpadded::decode_vec(&pem.replace('\n', "")).unwrap()
        };

        let mut encode = NetdocEncoder::new();
        encode.item(ACK::DIR_KEY_CERTIFICATE_VERSION).arg(&3);
        encode
            .item(ACK::FINGERPRINT)
            .arg(&"9367f9781da8eabbf96b691175f0e701b43c602e");
        encode
            .item(ACK::DIR_KEY_PUBLISHED)
            .arg(&Iso8601TimeSp::from_str("2020-04-18 08:36:57").unwrap());
        encode
            .item(ACK::DIR_KEY_EXPIRES)
            .arg(&Iso8601TimeSp::from_str("2021-04-18 08:36:57").unwrap());
        encode
            .item(ACK::DIR_IDENTITY_KEY)
            .object("RSA PUBLIC KEY", &*pk_rsa);
        encode
            .item(ACK::DIR_SIGNING_KEY)
            .object("RSA PUBLIC KEY", &*pk_rsa);
        encode
            .item(ACK::DIR_KEY_CROSSCERT)
            .object("ID SIGNATURE", []);
        encode
            .item(ACK::DIR_KEY_CERTIFICATION)
            .object("SIGNATURE", []);

        let doc = encode.finish().unwrap();
        eprintln!("{}", doc);
        assert_eq!(
            doc,
            r"dir-key-certificate-version 3
fingerprint 9367f9781da8eabbf96b691175f0e701b43c602e
dir-key-published 2020-04-18 08:36:57
dir-key-expires 2021-04-18 08:36:57
dir-identity-key
-----BEGIN RSA PUBLIC KEY-----
MIGJAoGBANUntsY9boHTnDKKlM4VfczcBE6xrYwhDJyeIkh7TPrebUBBvRBGmmV+
PYK8AM9irDtqmSR+VztUwQxH9dyEmwrM2gMeym9uXchWd/dt7En/JNL8srWIf7El
qiBHRBGbtkF/Re5pb438HC/CGyuujp43oZ3CUYosJOfY/X+sD0aVAgMBAAE=
-----END RSA PUBLIC KEY-----
dir-signing-key
-----BEGIN RSA PUBLIC KEY-----
MIGJAoGBANUntsY9boHTnDKKlM4VfczcBE6xrYwhDJyeIkh7TPrebUBBvRBGmmV+
PYK8AM9irDtqmSR+VztUwQxH9dyEmwrM2gMeym9uXchWd/dt7En/JNL8srWIf7El
qiBHRBGbtkF/Re5pb438HC/CGyuujp43oZ3CUYosJOfY/X+sD0aVAgMBAAE=
-----END RSA PUBLIC KEY-----
dir-key-crosscert
-----BEGIN ID SIGNATURE-----
-----END ID SIGNATURE-----
dir-key-certification
-----BEGIN SIGNATURE-----
-----END SIGNATURE-----
"
        );

        let _: UncheckedAuthCert = AuthCert::parse(&doc).unwrap();
    }
}