sdp_rs/tokenizers/time/
zone.rs1use super::zone_part;
2use crate::TResult;
3
4#[derive(Debug, PartialEq, Eq, Clone)]
5pub struct Tokenizer<'a> {
6 pub parts: Vec<zone_part::Tokenizer<'a>>,
7}
8
9impl<'a> Tokenizer<'a> {
10 pub fn tokenize(part: &'a str) -> TResult<'a, Self> {
11 use crate::parser_utils::*;
12 use nom::{bytes::complete::tag, multi::many1, sequence::preceded};
13
14 let (rem, line) = preceded(tag("z="), until_newline)(part)?;
15 let (_, parts) = many1(zone_part::Tokenizer::tokenize)(line)?;
16
17 Ok((rem, Self { parts }))
18 }
19}
20
21impl<'a> From<(&'a str, &'a str)> for Tokenizer<'a> {
22 fn from((adjustment, offset): (&'a str, &'a str)) -> Self {
23 Self {
24 parts: vec![(adjustment, offset).into()],
25 }
26 }
27}
28
29impl<'a, T: Into<zone_part::Tokenizer<'a>>> From<Vec<T>> for Tokenizer<'a> {
30 fn from(parts: Vec<T>) -> Self {
31 Self {
32 parts: parts.into_iter().map(Into::into).collect(),
33 }
34 }
35}
36
37#[cfg(test)]
38mod tests {
39 use super::*;
40
41 #[test]
42 fn tokenizer1() {
43 let zone = concat!("z=3730928400 -1h\r\nsomething");
44
45 assert_eq!(
46 Tokenizer::tokenize(zone),
47 Ok(("something", ("3730928400", "-1h").into())),
48 );
49 }
50
51 #[test]
52 fn tokenizer2() {
53 let zone = concat!("z=3730928400 -1h 3749680800 0\r\nsomething");
54
55 assert_eq!(
56 Tokenizer::tokenize(zone),
57 Ok((
58 "something",
59 vec![("3730928400", "-1h"), ("3749680800", "0")].into()
60 )),
61 );
62 }
63
64 #[test]
65 fn tokenizer3() {
66 let zone = concat!("z=3730928400 -1h 3749680800 0h\r\nsomething");
67
68 assert_eq!(
69 Tokenizer::tokenize(zone),
70 Ok((
71 "something",
72 vec![("3730928400", "-1h"), ("3749680800", "0h")].into()
73 )),
74 );
75 }
76}