1pub trait StrUtils {
2 fn split_first_whitespace(&self) -> (&str, &str);
3
4 fn is_only_whitespace(&self) -> bool;
5}
6
7impl StrUtils for str {
8 fn split_first_whitespace(&self) -> (&str, &str) {
11 let mut first_whitespace_index = 0;
12
13 let len = self.len();
14 let bytes = self.as_bytes();
15 let mut i = 0;
16 while i < len {
18 let c = bytes[i] as char;
19 i += 1;
20
21 if c.is_whitespace() {
22 break;
23 } else {
24 first_whitespace_index += 1;
25 }
26 }
27 if first_whitespace_index > 0 && first_whitespace_index != self.len() {
28 return (&self[0..first_whitespace_index], &self[first_whitespace_index + 1..]);
29 } else {
30 return (self, "");
31 }
32 }
33
34 fn is_only_whitespace(&self) -> bool {
35 false
37 }
38}
39
40#[macro_export]
43macro_rules! include_test_file {
44 ($filename:expr) => {
45 include_str!(concat!("../resources/tests/", $filename))
46 };
47}
48
49#[macro_export]
51macro_rules! recursive_tokenize {
52 ($tail:expr) => {
53 Lexer::tokenize($tail)
54 };
55 ($tail:expr, $ret:expr) => {
56 if $tail.len() > 0 {
57 if let Ok(tail_tokens) = Lexer::tokenize($tail) {
58 for token in tail_tokens {
60 $ret.push(token);
61 }
62 }
63 }
64 };
65}
66
67#[macro_export]
68macro_rules! recursive_tokenize_with_init {
69 ($init:expr, $tail:expr) => {{
70 let mut ret = vec![$init];
71 recursive_tokenize!($tail, ret);
72 return Ok(ret);
73 }};
74}
75
76#[cfg(test)]
77mod test {
78 use super::*;
79
80 #[test]
81 fn test_split_first_whitespace() {
82 let text = r"\b I'm a bold string";
83 let split = text.split_first_whitespace();
84 assert_eq!(split, (r"\b", r"I'm a bold string"));
85 }
86}