sdp_rs/tokenizers/time/
active.rs1use crate::TResult;
2
3#[derive(Debug, PartialEq, Eq, Clone)]
4pub struct Tokenizer<'a> {
5 pub start: &'a str,
6 pub stop: &'a str,
7}
8
9impl<'a> Tokenizer<'a> {
10 pub fn tokenize(part: &'a str) -> TResult<'a, Self> {
11 use crate::parser_utils::*;
12 use nom::{bytes::complete::tag, sequence::preceded};
13
14 let (rem, line) = preceded(tag("t="), until_newline)(part)?;
15 let (stop, start) = until_space(line)?;
16
17 Ok((rem, (start, stop).into()))
18 }
19}
20
21impl<'a> From<(&'a str, &'a str)> for Tokenizer<'a> {
22 fn from((start, stop): (&'a str, &'a str)) -> Self {
23 Self { start, stop }
24 }
25}
26
27#[cfg(test)]
28mod tests {
29 use super::*;
30
31 #[test]
32 fn tokenizer() {
33 let time = concat!("t=3724394400 3724398000\r\nsomething");
34
35 assert_eq!(
36 Tokenizer::tokenize(time),
37 Ok(("something", ("3724394400", "3724398000").into())),
38 );
39 }
40}