tor_netdoc/
parse2.rs

1//! New netdoc parsing arrangements, with `derive`
2//!
3//! # Parsing principles
4//!
5//! A parseable network document is a type implementing [`NetdocParseable`].
6//! usually via the
7//! [`NetdocParseable` derive=deftly macro`](crate::derive_deftly_template_NetdocParseable).
8//!
9//! A document type is responsible for recognising its own heading item.
10//! Its parser will also be told other of structural items that it should not consume.
11//! The structural lines can then be used to pass control to the appropriate parser.
12//!
13//! A "structural item" is a netdoc item that is defines the structure of the document.
14//! This includes the intro items for whole documents,
15//! the items that introduce document sections
16//! (which we model by treating the section as a sub-document)
17//! and signature items (which introduce the signatures at the end of the document,
18//! and after which no non-signature items may appear).
19//!
20//! # Ordering
21//!
22//! We don't always parse things into a sorted order.
23//! Sorting will be done when assembling documents, before outputting.
24// TODO we don't implement deriving output yet.
25//!
26//! # Types, and signature handling
27//!
28//! Most top-level network documents are signed somehow.
29//! In this case there are three types:
30//!
31//!   * **`FooSigned`**: a signed `Foo`, with its signatures, not yet verified.
32//!     Implements [`NetdocSigned`],
33//!     typically by invoking the
34//!     [`NetdocSigned` derive macro](crate::derive_deftly_template_NetdocSigned)
35//!     on `Foo`.
36//!
37//!     Type-specific methods are provided for verification,
38//!     to obtain a `Foo`.
39//!
40//!   * **`Foo`**: the body data for the document.
41//!     This doesn't contain any signatures.
42//!     Having one of these to play with means signatures have already been validated.
43//!     Implement `NetdocParseable`, via
44//!     [derive](crate::derive_deftly_template_NetdocParseable).
45//!
46//!   * **`FooSignatures`**: the signatures for a `Foo`.
47//!     Implement `NetdocParseable`, via
48//!     [derive](crate::derive_deftly_template_NetdocParseable),
49//!     with `#[deftly(netdoc(signatures))]`.
50//!
51//! # Relationship to tor_netdoc::parse
52//!
53//! This is a completely new parsing approach, based on different principles.
54//! The key principle is the recognition of "structural keywords",
55//! recursively within a parsing stack, via the p`NetdocParseable`] trait.
56//!
57//! This allows the parser to be derived.  We have type-driven parsing
58//! of whole Documents, Items, and their Arguments and Objects,
59//! including of their multiplicity.
60//!
61//! The different keyword handling means we can't use most of the existing lexer,
62//! and need new item parsing API:
63//!
64//!  * [`NetdocParseable`] trait.
65//!  * [`KeywordRef`] type.
66//!  * [`ItemStream`], [`UnparsedItem`], [`ArgumentStream`], [`UnparsedObject`].
67//!
68//! The different error handling means we have our own error types.
69//! (The crate's existing parse errors have information that we don't track,
70//! and is also a portmanteau error for parsing, writing, and other functions.)
71//!
72//! Document signing is handled in a more abstract way.
73//!
74//! Some old netdoc constructs are not supported.
75//! For example, the obsolete `opt` prefix on safe-to-ignore Items.
76//! The parser may make different decisions about netdocs with anomalous item ordering.
77
78#[doc(hidden)]
79#[macro_use]
80pub mod internal_prelude;
81
82#[macro_use]
83mod structural;
84
85#[macro_use]
86mod derive;
87
88mod error;
89mod impls;
90pub mod keyword;
91mod lex;
92mod lines;
93pub mod multiplicity;
94mod signatures;
95mod traits;
96
97#[cfg(feature = "plain-consensus")]
98pub mod poc;
99
100use internal_prelude::*;
101
102pub use error::{ArgumentError, ErrorProblem, ParseError, UnexpectedArgument, VerifyFailed};
103pub use impls::raw_data_object;
104pub use impls::times::NdaSystemTimeDeprecatedSyntax;
105pub use keyword::KeywordRef;
106pub use lex::{ArgumentStream, ItemStream, NoFurtherArguments, UnparsedItem, UnparsedObject};
107pub use lines::{Lines, Peeked, StrExt};
108pub use signatures::{
109    SignatureHashInputs, SignatureItemParseable, check_validity_time,
110    check_validity_time_tolerance, sig_hash_methods,
111};
112pub use structural::{StopAt, StopPredicate};
113pub use traits::{
114    ItemArgumentParseable, ItemObjectParseable, ItemValueParseable, NetdocParseable,
115    NetdocParseableFields, NetdocSigned,
116};
117
118#[doc(hidden)]
119pub use derive::netdoc_parseable_derive_debug;
120
121pub(crate) use internal_prelude::EP;
122
123//---------- input ----------
124
125/// Options for parsing
126///
127/// Specific document and type parsing methods may use these parameters
128/// to control their parsing behaviour at run-time.
129#[derive(educe::Educe, Debug, Clone)]
130#[allow(clippy::manual_non_exhaustive)]
131#[educe(Default)]
132pub struct ParseOptions {
133    /// Retain unknown values?
134    ///
135    /// Some field types, especially for flags fields, have the capability to retain
136    /// unknown flags.  But, whereas known flags can be represented as single bits,
137    /// representing unknown flags involves allocating and copying strings.
138    /// Unless the document is to be reproduced, this is a waste of effort.
139    ///
140    /// Each document field type affected by this option should store the unknowns
141    /// as `Unknown<HashSet<String>>` or similar.
142    ///
143    /// This feature should only be used where performance is important.
144    /// For example, it is useful for types that appear in md consensus routerdescs,
145    /// but less useful for types that appear only in a netstatus preamble.
146    ///
147    /// This is currently used for router flags.
148    #[educe(Default(expression = "Unknown::new_discard()"))]
149    pub retain_unknown_values: Unknown<()>,
150
151    // Like `#[non_exhaustive]`, but doesn't prevent use of struct display syntax with `..`
152    #[doc(hidden)]
153    _private_non_exhaustive: (),
154}
155
156/// Input to a network document top-level parsing operation
157pub struct ParseInput<'s> {
158    /// The actual document text
159    input: &'s str,
160    /// Filename (for error reporting)
161    file: &'s str,
162    /// Parsing options
163    options: ParseOptions,
164}
165
166impl<'s> ParseInput<'s> {
167    /// Prepare to parse an input string
168    pub fn new(input: &'s str, file: &'s str) -> Self {
169        ParseInput {
170            input,
171            file,
172            options: ParseOptions::default(),
173        }
174    }
175}
176
177//---------- parser ----------
178
179/// Common code for `parse_netdoc` and `parse_netdoc_multiple`
180///
181/// Creates the `ItemStream`, calls `parse_completely`, and handles errors.
182fn parse_internal<T, D: NetdocParseable>(
183    input: &ParseInput<'_>,
184    parse_completely: impl FnOnce(&mut ItemStream) -> Result<T, ErrorProblem>,
185) -> Result<T, ParseError> {
186    let mut items = ItemStream::new(input)?;
187    parse_completely(&mut items).map_err(|problem| ParseError {
188        problem,
189        doctype: D::doctype_for_error(),
190        file: input.file.to_owned(),
191        lno: items.lno_for_error(),
192        column: problem.column(),
193    })
194}
195
196/// Parse a network document - **toplevel entrypoint**
197pub fn parse_netdoc<D: NetdocParseable>(input: &ParseInput<'_>) -> Result<D, ParseError> {
198    parse_internal::<_, D>(input, |items| {
199        let doc = D::from_items(items, StopAt(false))?;
200        if let Some(_kw) = items.peek_keyword()? {
201            return Err(EP::MultipleDocuments);
202        }
203        Ok(doc)
204    })
205}
206
207/// Parse multiple concatenated network documents - **toplevel entrypoint**
208pub fn parse_netdoc_multiple<D: NetdocParseable>(
209    input: &ParseInput<'_>,
210) -> Result<Vec<D>, ParseError> {
211    parse_internal::<_, D>(input, |items| {
212        let mut docs = vec![];
213        while items.peek_keyword()?.is_some() {
214            let doc = D::from_items(items, StopAt(false))?;
215            docs.push(doc);
216        }
217        Ok(docs)
218    })
219}
220
221/// Parse multiple network documents, also returning their offsets  - **toplevel entrypoint**
222///
223/// Each returned document is accompanied by the byte offsets of its start and end.
224///
225/// (The netdoc metaformat does not allow anything in between subsequent documents in a file,
226/// so the end of one document is the start of the next.)
227//
228// This returns byte offsets rather than string slices,
229// because the caller can always convert the offsets into string slices,
230// but it is not straightforward to convert string slices borrowed from some input string
231// into offsets, in a way that is obviously correct without nightly `str::substr_range`.
232pub fn parse_netdoc_multiple_with_offsets<D: NetdocParseable>(
233    input: &ParseInput<'_>,
234) -> Result<Vec<(D, usize, usize)>, ParseError> {
235    parse_internal::<_, D>(input, |items| {
236        let mut docs = vec![];
237        while items.peek_keyword()?.is_some() {
238            let start_pos = items.byte_position();
239            let doc = D::from_items(items, StopAt(false))?;
240            let end_pos = items.byte_position();
241            docs.push((doc, start_pos, end_pos));
242        }
243        Ok(docs)
244    })
245}