sdp_rs/tokenizers/time/
repeat.rs1use crate::TResult;
2
3#[derive(Debug, PartialEq, Eq, Clone)]
4pub struct Tokenizer<'a> {
5 pub interval: &'a str,
6 pub duration: &'a str,
7 pub offsets: Vec<&'a str>,
8}
9
10impl<'a> Tokenizer<'a> {
11 pub fn tokenize(part: &'a str) -> TResult<'a, Self> {
12 use crate::parser_utils::*;
13 use nom::{bytes::complete::tag, multi::many0, sequence::preceded};
14
15 let (rem, line) = preceded(tag("r="), until_newline)(part)?;
16 let (line_rem, interval) = until_space(line)?;
17 let (offsets, duration) = until_space(line_rem)?;
18 let (offset, mut offsets) = many0(until_space)(offsets)?;
19
20 offsets.push(offset);
21
22 Ok((rem, (interval, duration, offsets).into()))
23 }
24}
25
26impl<'a> From<(&'a str, &'a str, Vec<&'a str>)> for Tokenizer<'a> {
27 fn from((interval, duration, offsets): (&'a str, &'a str, Vec<&'a str>)) -> Self {
28 Self {
29 interval,
30 duration,
31 offsets,
32 }
33 }
34}
35
36impl<'a> From<(&'a str, &'a str)> for Tokenizer<'a> {
37 fn from((interval, duration): (&'a str, &'a str)) -> Self {
38 Self {
39 interval,
40 duration,
41 offsets: vec![],
42 }
43 }
44}
45
46#[cfg(test)]
47mod tests {
48 use super::*;
49
50 #[test]
51 fn tokenizer1() {
52 let repeat = concat!("r=604800 3600 0\r\nsomething");
53
54 assert_eq!(
55 Ok(("something", ("604800", "3600", vec!["0"]).into())),
56 Tokenizer::tokenize(repeat)
57 );
58 }
59
60 #[test]
61 fn tokenizer2() {
62 let repeat = concat!("r=604800 3600 0 90000\r\nsomething");
63
64 assert_eq!(
65 Tokenizer::tokenize(repeat),
66 Ok(("something", ("604800", "3600", vec!["0", "90000"]).into())),
67 );
68 }
69
70 #[test]
71 fn tokenizer3() {
72 let repeat = concat!("r=7d 1h 0 25h\r\nsomething");
73
74 assert_eq!(
75 Tokenizer::tokenize(repeat),
76 Ok(("something", ("7d", "1h", vec!["0", "25h"]).into())),
77 );
78 }
79}