dcbor_parse/
parse.rs

1use bc_ur::prelude::*;
2use known_values::KnownValue;
3use logos::{Lexer, Logos, Span};
4
5use crate::{
6    Token,
7    error::{Error, Result},
8};
9
10/// Parses a dCBOR item from a string input.
11///
12/// This function takes a string slice containing a dCBOR diagnostic notation
13/// encoded value and attempts to parse it into a `CBOR` object. If the input
14/// contains extra tokens after a valid item, an error is returned.
15///
16/// # Arguments
17///
18/// * `src` - A string slice containing the dCBOR-encoded data.
19///
20/// # Returns
21///
22/// * `Ok(CBOR)` if parsing is successful and the input contains exactly one
23///   valid dCBOR item, which itself might be an atomic value like a number or
24///   string, or a complex value like an array or map.
25/// * `Err(Error)` if parsing fails or if extra tokens are found after the item.
26///
27/// # Errors
28///
29/// Returns an error if the input is invalid, contains extra tokens, or if any
30/// token cannot be parsed as expected.
31///
32/// # Example
33///
34/// ```rust
35/// # use dcbor_parse::parse_dcbor_item;
36/// let cbor = parse_dcbor_item("[1, 2, 3]").unwrap();
37/// assert_eq!(cbor.diagnostic(), "[1, 2, 3]");
38/// ```
39pub fn parse_dcbor_item(src: &str) -> Result<CBOR> {
40    let mut lexer = Token::lexer(src);
41    let first_token = expect_token(&mut lexer);
42    match first_token {
43        Ok(token) => parse_item_token(&token, &mut lexer).and_then(|cbor| {
44            if lexer.next().is_some() {
45                Err(Error::ExtraData(lexer.span()))
46            } else {
47                Ok(cbor)
48            }
49        }),
50        Err(e) => {
51            if e == Error::UnexpectedEndOfInput {
52                return Err(Error::EmptyInput);
53            }
54            Err(e)
55        }
56    }
57}
58
59/// Parses a dCBOR item from the beginning of a string and returns the parsed
60/// [`CBOR`] along with the number of bytes consumed.
61///
62/// Unlike [`parse_dcbor_item`], this function succeeds even if additional
63/// characters follow the first item. The returned index points to the first
64/// unparsed character after skipping any trailing whitespace or comments.
65///
66/// # Example
67///
68/// ```rust
69/// # use dcbor_parse::parse_dcbor_item_partial;
70/// # use dcbor::prelude::*;
71/// let (cbor, used) = parse_dcbor_item_partial("true )").unwrap();
72/// assert_eq!(cbor, CBOR::from(true));
73/// assert_eq!(used, 5);
74/// ```
75pub fn parse_dcbor_item_partial(src: &str) -> Result<(CBOR, usize)> {
76    let mut lexer = Token::lexer(src);
77    let first_token = expect_token(&mut lexer);
78    match first_token {
79        Ok(token) => parse_item_token(&token, &mut lexer).map(|cbor| {
80            let consumed = match lexer.next() {
81                Some(_) => lexer.span().start,
82                None => src.len(),
83            };
84            (cbor, consumed)
85        }),
86        Err(e) => {
87            if e == Error::UnexpectedEndOfInput {
88                Err(Error::EmptyInput)
89            } else {
90                Err(e)
91            }
92        }
93    }
94}
95
96//
97// === Private Functions ===
98//
99
100fn parse_item(lexer: &mut Lexer<'_, Token>) -> Result<CBOR> {
101    let token = expect_token(lexer)?;
102    parse_item_token(&token, lexer)
103}
104
105fn expect_token(lexer: &mut Lexer<'_, Token>) -> Result<Token> {
106    let span = lexer.span();
107    match lexer.next() {
108        Some(token_or_err) => match token_or_err {
109            Ok(token) => Ok(token),
110            Err(e) => {
111                if e.is_default() {
112                    Err(Error::UnrecognizedToken(span))
113                } else {
114                    Err(e)
115                }
116            }
117        },
118        None => Err(Error::UnexpectedEndOfInput),
119    }
120}
121
122fn parse_item_token(
123    token: &Token,
124    lexer: &mut Lexer<'_, Token>,
125) -> Result<CBOR> {
126    // Handle embedded lexing errors in token payloads
127    if let Token::ByteStringHex(Err(e)) = token {
128        return Err(e.clone());
129    }
130    if let Token::ByteStringBase64(Err(e)) = token {
131        return Err(e.clone());
132    }
133    if let Token::TagValue(Err(e)) = token {
134        return Err(e.clone());
135    }
136    if let Token::UR(Err(e)) = token {
137        return Err(e.clone());
138    }
139    if let Token::KnownValueNumber(Err(e)) = token {
140        return Err(e.clone());
141    }
142
143    match token {
144        Token::Bool(b) => Ok((*b).into()),
145        Token::Null => Ok(CBOR::null()),
146        Token::ByteStringHex(Ok(bytes)) => Ok(CBOR::to_byte_string(bytes)),
147        Token::ByteStringBase64(Ok(bytes)) => Ok(CBOR::to_byte_string(bytes)),
148        Token::Number(num) => Ok((*num).into()),
149        Token::NaN => Ok(f64::NAN.into()),
150        Token::Infinity => Ok(f64::INFINITY.into()),
151        Token::NegInfinity => Ok(f64::NEG_INFINITY.into()),
152        Token::String(s) => parse_string(s, lexer.span()),
153        Token::UR(Ok(ur)) => parse_ur(ur, lexer.span()),
154        Token::TagValue(Ok(tag_value)) => parse_number_tag(*tag_value, lexer),
155        Token::TagName(name) => parse_name_tag(name, lexer),
156        Token::KnownValueNumber(Ok(value)) => {
157            Ok(KnownValue::new(*value).into())
158        }
159        Token::KnownValueName(name) => {
160            if let Some(known_value) = known_value_for_name(name) {
161                Ok(known_value.into())
162            } else {
163                let span = lexer.span().start + 1..lexer.span().end - 1;
164                Err(Error::UnknownKnownValueName(name.clone(), span))
165            }
166        }
167        Token::Unit => Ok(KnownValue::new(0).into()),
168        Token::BracketOpen => parse_array(lexer),
169        Token::BraceOpen => parse_map(lexer),
170        _ => Err(Error::UnexpectedToken(
171            Box::new(token.clone()),
172            lexer.span(),
173        )),
174    }
175}
176
177fn parse_string(s: &str, span: Span) -> Result<CBOR> {
178    if s.starts_with('"') && s.ends_with('"') {
179        let s = &s[1..s.len() - 1];
180        Ok(s.into())
181    } else {
182        Err(Error::UnrecognizedToken(span))
183    }
184}
185
186fn tag_for_name(name: &str) -> Option<Tag> {
187    with_tags!(|tags: &TagsStore| tags.tag_for_name(name))
188}
189
190fn known_value_for_name(name: &str) -> Option<KnownValue> {
191    let binding = known_values::KNOWN_VALUES.get();
192    let known_values = binding.as_ref().unwrap();
193    known_values.known_value_named(name).cloned()
194}
195
196fn parse_ur(ur: &UR, span: Span) -> Result<CBOR> {
197    let ur_type = ur.ur_type_str();
198    if let Some(tag) = tag_for_name(ur_type) {
199        Ok(CBOR::to_tagged_value(tag, ur.cbor()))
200    } else {
201        Err(Error::UnknownUrType(
202            ur_type.to_string(),
203            span.start + 3..span.start + 3 + ur_type.len(),
204        ))
205    }
206}
207
208fn parse_number_tag(
209    tag_value: TagValue,
210    lexer: &mut Lexer<'_, Token>,
211) -> Result<CBOR> {
212    let item = parse_item(lexer)?;
213    match expect_token(lexer) {
214        Ok(Token::ParenthesisClose) => {
215            Ok(CBOR::to_tagged_value(tag_value, item))
216        }
217        Ok(_) => Err(Error::UnmatchedParentheses(lexer.span())),
218        Err(e) => {
219            if e == Error::UnexpectedEndOfInput {
220                return Err(Error::UnmatchedParentheses(lexer.span()));
221            }
222            Err(e)
223        }
224    }
225}
226
227fn parse_name_tag(name: &str, lexer: &mut Lexer<'_, Token>) -> Result<CBOR> {
228    let span = lexer.span().start..lexer.span().end - 1;
229    let item = parse_item(lexer)?;
230    match expect_token(lexer)? {
231        Token::ParenthesisClose => {
232            if let Some(tag) = tag_for_name(name) {
233                Ok(CBOR::to_tagged_value(tag, item))
234            } else {
235                Err(Error::UnknownTagName(name.to_string(), span))
236            }
237        }
238        _ => Err(Error::UnmatchedParentheses(lexer.span())),
239    }
240}
241
242fn parse_array(lexer: &mut Lexer<'_, Token>) -> Result<CBOR> {
243    let mut items = Vec::new();
244    let mut awaits_comma = false;
245    let mut awaits_item = false;
246
247    loop {
248        match expect_token(lexer)? {
249            Token::Bool(b) if !awaits_comma => {
250                items.push(b.into());
251                awaits_item = false;
252            }
253            Token::Null if !awaits_comma => {
254                items.push(CBOR::null());
255                awaits_item = false;
256            }
257            Token::ByteStringHex(Ok(bytes)) if !awaits_comma => {
258                items.push(CBOR::to_byte_string(bytes));
259                awaits_item = false;
260            }
261            Token::ByteStringBase64(Ok(bytes)) if !awaits_comma => {
262                items.push(CBOR::to_byte_string(bytes));
263                awaits_item = false;
264            }
265            Token::Number(num) if !awaits_comma => {
266                items.push(num.into());
267                awaits_item = false;
268            }
269            Token::NaN if !awaits_comma => {
270                items.push(f64::NAN.into());
271                awaits_item = false;
272            }
273            Token::Infinity if !awaits_comma => {
274                items.push(f64::INFINITY.into());
275                awaits_item = false;
276            }
277            Token::NegInfinity if !awaits_comma => {
278                items.push(f64::NEG_INFINITY.into());
279                awaits_item = false;
280            }
281            Token::String(s) if !awaits_comma => {
282                items.push(parse_string(&s, lexer.span())?);
283                awaits_item = false;
284            }
285            Token::UR(Ok(ur)) if !awaits_comma => {
286                items.push(parse_ur(&ur, lexer.span())?);
287                awaits_item = false;
288            }
289            Token::TagValue(Ok(tag_value)) if !awaits_comma => {
290                items.push(parse_number_tag(tag_value, lexer)?);
291                awaits_item = false;
292            }
293            Token::TagName(name) if !awaits_comma => {
294                items.push(parse_name_tag(&name, lexer)?);
295                awaits_item = false;
296            }
297            Token::KnownValueNumber(Ok(value)) if !awaits_comma => {
298                items.push(KnownValue::new(value).into());
299                awaits_item = false;
300            }
301            Token::KnownValueName(name) if !awaits_comma => {
302                if let Some(known_value) = known_value_for_name(&name) {
303                    items.push(known_value.into());
304                } else {
305                    return Err(Error::UnknownKnownValueName(
306                        name,
307                        lexer.span(),
308                    ));
309                }
310                awaits_item = false;
311            }
312            Token::BracketOpen if !awaits_comma => {
313                items.push(parse_array(lexer)?);
314                awaits_item = false;
315            }
316            Token::BraceOpen if !awaits_comma => {
317                items.push(parse_map(lexer)?);
318                awaits_item = false;
319            }
320            Token::Comma if awaits_comma => {
321                awaits_item = true;
322            }
323            Token::BracketClose if !awaits_item => {
324                return Ok(items.into());
325            }
326            token => {
327                if awaits_comma {
328                    return Err(Error::ExpectedComma(lexer.span()));
329                }
330                return Err(Error::UnexpectedToken(
331                    Box::new(token),
332                    lexer.span(),
333                ));
334            }
335        }
336        awaits_comma = !awaits_item;
337    }
338}
339
340fn parse_map(lexer: &mut Lexer<'_, Token>) -> Result<CBOR> {
341    let mut map = Map::new();
342    let mut awaits_comma = false;
343    let mut awaits_key = false;
344
345    loop {
346        let token = match expect_token(lexer) {
347            Ok(tok) => tok,
348            Err(Error::UnexpectedEndOfInput) => {
349                return Err(Error::UnmatchedBraces(lexer.span()));
350            }
351            Err(e) => {
352                return Err(e);
353            }
354        };
355        match token {
356            Token::BraceClose if !awaits_key => {
357                return Ok(map.into());
358            }
359            Token::Comma if awaits_comma => {
360                awaits_key = true;
361            }
362            _ => {
363                if awaits_comma {
364                    return Err(Error::ExpectedComma(lexer.span()));
365                }
366                let key = parse_item_token(&token, lexer)?;
367                if let Ok(Token::Colon) = expect_token(lexer) {
368                    let value = match parse_item(lexer) {
369                        Err(Error::UnexpectedToken(token, span))
370                            if *token == Token::BraceClose =>
371                        {
372                            return Err(Error::ExpectedMapKey(span));
373                        }
374                        other => other?,
375                    };
376                    map.insert(key, value);
377                    awaits_key = false;
378                } else {
379                    return Err(Error::ExpectedColon(lexer.span()));
380                }
381            }
382        }
383        awaits_comma = !awaits_key;
384    }
385}