1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
use std::{fmt::Debug, fs, path::Path};
use regex::Regex;
fn tokenize(line: &str) -> Vec<String> {
let re: Regex = Regex::new(r#"(".*?"|\S*)"#).unwrap();
let line = line.replace('\n', " ");
re.captures_iter(&line)
.flat_map(|cap| cap.iter().take(1).collect::<Vec<_>>())
.filter_map(|res| res.map(|mat| mat.as_str()))
.take_while(|s| !s.starts_with('#'))
.filter(|s| !s.is_empty())
.map(|s| s.replace("\\n", "\n"))
.collect()
}
pub fn from_rail_source(source: String) -> Vec<String> {
source.split('\n').flat_map(tokenize).collect()
}
pub fn from_rail_source_file<P>(path: P) -> Vec<String>
where
P: AsRef<Path> + Debug,
{
let error_msg = format!("Error reading file {:?}", path);
let source = fs::read_to_string(path).expect(&error_msg);
from_rail_source(source)
}
pub fn from_lib_list<P>(path: P) -> Vec<String>
where
P: AsRef<Path> + Debug,
{
let path: &Path = path.as_ref();
let base_dir = path.parent().unwrap();
fs::read_to_string(path)
.unwrap_or_else(|_| panic!("Unable to load library list file {:?}", path))
.split('\n')
.filter(|s| !s.is_empty() && !s.starts_with('#'))
.map(|filepath| base_dir.join(filepath).to_string_lossy().to_string())
.map(|file| {
if file.ends_with(".rail") {
Some(from_rail_source_file(file))
} else if file.ends_with(".txt") {
Some(from_lib_list(file))
} else {
None
}
})
.filter(|list| list.is_some())
.flat_map(|list| list.unwrap())
.collect::<Vec<_>>()
}
#[test]
pub fn token_test() {
let actual = "1 1 +";
let expected = vec!["1", "1", "+"];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_2() {
let actual = "\"hello\" \"there\"";
let expected = vec!["\"hello\"", "\"there\""];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_3() {
let actual = "\"hello there\"";
let expected = vec!["\"hello there\""];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_4() {
let actual = "\" hello there \"";
let expected = vec!["\" hello there \""];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_5() {
let actual = "1 2 \" hello three \" 4 5";
let expected = vec!["1", "2", "\" hello three \"", "4", "5"];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_6() {
let actual = "1 2 \"a # in a string is fine\" #but at the end is ignored";
let expected = vec!["1", "2", "\"a # in a string is fine\""];
assert_eq!(expected, tokenize(actual));
}
#[test]
pub fn token_test_7() {
let actual = "1 1 [ + ] call .s";
let expected = vec!["1", "1", "[", "+", "]", "call", ".s"];
assert_eq!(expected, tokenize(actual));
}