sdp_rs/tokenizers/
key_value.rs

1use crate::TResult;
2
3#[derive(Debug, PartialEq, Eq, Clone)]
4pub struct Tokenizer<'a, const C: char> {
5    pub key: &'a str,
6    pub value: &'a str,
7}
8
9impl<'a, const C: char> Tokenizer<'a, C> {
10    pub fn tokenize(part: &'a str) -> TResult<'a, Self> {
11        use crate::parser_utils::*;
12        use nom::{bytes::complete::tag, sequence::preceded};
13
14        let (rem, key_with_value) = preceded(tag(Self::prefix().as_str()), until_newline)(part)?;
15        let (value, key) = until_stopbreak_of(":")(key_with_value)?;
16
17        Ok((rem, (key, value).into()))
18    }
19
20    //TODO: this should be generated by a concat-related macro, but atm at stable this is not
21    //possible, will come back once const generics expands on stable
22    fn prefix() -> String {
23        format!("{}=", C)
24    }
25}
26
27impl<'a, const C: char> From<(&'a str, &'a str)> for Tokenizer<'a, C> {
28    fn from((key, value): (&'a str, &'a str)) -> Self {
29        Self { key, value }
30    }
31}
32
33#[cfg(test)]
34mod tests {
35    use super::*;
36
37    #[test]
38    fn tokenizer() {
39        let key_value = concat!("b=CT:128\r\nsomething");
40
41        assert_eq!(
42            Tokenizer::<'b'>::tokenize(key_value),
43            Ok((
44                "something",
45                Tokenizer {
46                    key: "CT",
47                    value: "128",
48                }
49            )),
50        );
51    }
52}