1use crate::lexer::{LexingError, Token};
2use logos::Logos;
3
4pub fn minify<T: AsRef<str>>(value: T) -> Result<String, LexingError> {
40 let mut lexer = Token::lexer(value.as_ref());
41 let mut result = String::new();
42 let mut last_token = None;
43
44 while let Some(token) = lexer.next() {
45 let token = match token {
46 Ok(token) => token,
47 Err(e) => return Err(e),
48 };
49
50 if needs_space(&token, &last_token) {
51 result.push(' ');
52 }
53
54 match token {
55 Token::BlockStringDelimiter => result.push_str(&token.parse_block_string(&mut lexer)),
56 _ => result.push_str(lexer.slice()),
57 }
58 last_token = Some(token);
59 }
60
61 Ok(result)
62}
63
64fn is_non_punctuator(token: &Token) -> bool {
65 !matches!(
66 token,
67 Token::BraceOpen
68 | Token::BraceClose
69 | Token::ParenOpen
70 | Token::ParenClose
71 | Token::BracketOpen
72 | Token::BracketClose
73 | Token::Colon
74 | Token::Equals
75 | Token::Exclamation
76 | Token::Question
77 | Token::Ellipsis
78 | Token::Ampersand
79 | Token::Pipe
80 | Token::Variable(_)
81 | Token::Directive(_)
82 )
83}
84
85fn needs_space_after_token(token: &Token) -> bool {
86 matches!(
87 token,
88 Token::Variable(_) | Token::String(_) | Token::Identifier(_) | Token::Directive(_)
89 )
90}
91
92fn needs_space_before_token(token: &Token) -> bool {
93 matches!(
94 token,
95 Token::Identifier(_) | Token::BlockStringDelimiter | Token::Ellipsis
96 )
97}
98
99fn needs_space(curr_token: &Token, last_token: &Option<Token>) -> bool {
100 match last_token {
101 Some(last) if is_non_punctuator(last) => {
102 is_non_punctuator(curr_token) || *curr_token == Token::Ellipsis
103 }
104 Some(last) if needs_space_after_token(last) => needs_space_before_token(curr_token),
105 _ => false,
106 }
107}
108
109#[cfg(test)]
110mod test {
111 use crate::lexer::LexingError;
112 use super::minify;
113 use indoc::indoc;
114
115 #[test]
116 fn strips_ignored_characters_from_graphql_query_document() {
117 let query = indoc! {r#"
118 query SomeQuery($foo: String!, $bar: String) {
119 someField(foo: $foo, bar: $bar) {
120 a
121 b {
122 c
123 d
124 }
125 }
126 }
127 "#};
128
129 let expected =
130 "query SomeQuery($foo:String!$bar:String){someField(foo:$foo bar:$bar){a b{c d}}}";
131
132 assert_eq!(minify(query).unwrap(), expected);
133 }
134
135 #[test]
136 fn strips_ignored_characters_from_graphql_sdl_document() {
137 let sdl = indoc! {r#"
138 """
139 Type description
140 """
141 type Foo {
142 """
143 Field description
144 """
145 bar: String
146 }
147 "#};
148
149 let expected = r#""""Type description""" type Foo{"""Field description""" bar:String}"#;
150
151 assert_eq!(minify(sdl).unwrap(), expected);
152 }
153
154 #[test]
155 fn errs_on_invalid_token() {
156 let query = "{ foo(arg: \"\n\"";
157
158 assert!(matches!(
159 minify(query),
160 Err(LexingError::UnterminatedString(_))
161 ));
162 }
163
164 #[test]
165 fn strips_non_parsable_document() {
166 let query = r#"{ foo(arg: "str""#;
167 let expected = r#"{foo(arg:"str""#;
168
169 assert_eq!(minify(query).unwrap(), expected);
170 }
171
172 #[test]
173 fn strips_documents_with_only_ignored_characters() {
174 assert_eq!(minify("\n").unwrap(), "");
175 assert_eq!(minify(",").unwrap(), "");
176 assert_eq!(minify(",,").unwrap(), "");
177 assert_eq!(minify("#comment\n, \n").unwrap(), "");
178 }
179
180 #[test]
181 fn strips_leading_and_trailing_ignored_tokens() {
182 assert_eq!(minify("\n1").unwrap(), "1");
183 assert_eq!(minify(",1").unwrap(), "1");
184 assert_eq!(minify(",,1").unwrap(), "1");
185 assert_eq!(minify("#comment\n, \n1").unwrap(), "1");
186
187 assert_eq!(minify("1\n").unwrap(), "1");
188 assert_eq!(minify("1,").unwrap(), "1");
189 assert_eq!(minify("1,,").unwrap(), "1");
190 assert_eq!(minify("1#comment\n, \n").unwrap(), "1");
191 }
192
193 #[test]
194 fn strips_ignored_tokens_between_punctuator_tokens() {
195 assert_eq!(minify("[,)").unwrap(), "[)");
196 assert_eq!(minify("[\r)").unwrap(), "[)");
197 assert_eq!(minify("[\r\r)").unwrap(), "[)");
198 assert_eq!(minify("[\r,)").unwrap(), "[)");
199 assert_eq!(minify("[,\n)").unwrap(), "[)");
200 }
201
202 #[test]
203 fn strips_ignored_tokens_between_punctuator_and_non_punctuator_tokens() {
204 assert_eq!(minify("[,1").unwrap(), "[1");
205 assert_eq!(minify("[\r1").unwrap(), "[1");
206 assert_eq!(minify("[\r\r1").unwrap(), "[1");
207 assert_eq!(minify("[\r,1").unwrap(), "[1");
208 assert_eq!(minify("[,\n1").unwrap(), "[1");
209 }
210
211 #[test]
212 fn replace_ignored_tokens_between_non_punctuator_tokens_and_spread_with_space() {
213 assert_eq!(minify("a ...").unwrap(), "a ...");
214 assert_eq!(minify("1 ...").unwrap(), "1 ...");
215 assert_eq!(minify("1 ... ...").unwrap(), "1 ......");
216 }
217
218 #[test]
219 fn replace_ignored_tokens_between_non_punctuator_tokens_with_space() {
220 assert_eq!(minify("1 2").unwrap(), "1 2");
221 assert_eq!(minify("\"\" \"\"").unwrap(), "\"\" \"\"");
222 assert_eq!(minify("a b").unwrap(), "a b");
223
224 assert_eq!(minify("a,1").unwrap(), "a 1");
225 assert_eq!(minify("a,,1").unwrap(), "a 1");
226 assert_eq!(minify("a 1").unwrap(), "a 1");
227 assert_eq!(minify("a \t 1").unwrap(), "a 1");
228 }
229
230 #[test]
231 fn does_not_strip_ignored_tokens_embedded_in_the_string() {
232 assert_eq!(minify("\" \"").unwrap(), "\" \"");
233 assert_eq!(minify("\",\"").unwrap(), "\",\"");
234 assert_eq!(minify("\",,\"").unwrap(), "\",,\"");
235 assert_eq!(minify("\",|\"").unwrap(), "\",|\"");
236 }
237
238 #[test]
239 fn does_not_strip_ignored_tokens_embedded_in_the_block_string() {
240 assert_eq!(minify("\"\"\",\"\"\"").unwrap(), "\"\"\",\"\"\"");
241 assert_eq!(minify("\"\"\",,\"\"\"").unwrap(), "\"\"\",,\"\"\"");
242 assert_eq!(minify("\"\"\",|\"\"\"").unwrap(), "\"\"\",|\"\"\"");
243 }
244
245 #[test]
246 fn strips_ignored_characters_inside_block_strings() {
247 assert_eq!(minify(r#""""""""#).unwrap(), r#""""""""#);
248 assert_eq!(minify(r#"""" """"#).unwrap(), r#""""""""#);
249
250 assert_eq!(minify(r#""""a""""#).unwrap(), r#""""a""""#);
251 assert_eq!(minify(r#"""" a""""#).unwrap(), r#"""" a""""#);
252 assert_eq!(minify(r#"""" a """"#).unwrap(), r#"""" a """"#);
253
254 assert_eq!(minify("\"\"\"\n\"\"\"").unwrap(), r#""""""""#);
255 assert_eq!(minify("\"\"\"a\nb\"\"\"").unwrap(), "\"\"\"a\nb\"\"\"");
256 assert_eq!(minify("\"\"\"a\rb\"\"\"").unwrap(), "\"\"\"a\nb\"\"\"");
257 assert_eq!(minify("\"\"\"a\r\nb\"\"\"").unwrap(), "\"\"\"a\nb\"\"\"");
258 assert_eq!(
259 minify("\"\"\"a\r\n\nb\"\"\"").unwrap(),
260 "\"\"\"a\n\nb\"\"\""
261 );
262
263 assert_eq!(minify("\"\"\"\\\n\"\"\"").unwrap(), "\"\"\"\\\n\"\"\"");
264 assert_eq!(minify("\"\"\"\"\n\"\"\"").unwrap(), "\"\"\"\"\n\"\"\"");
265 assert_eq!(
266 minify("\"\"\"\\\"\"\"\n\"\"\"").unwrap(),
267 "\"\"\"\\\"\"\"\"\"\""
268 );
269
270 assert_eq!(
271 minify("\"\"\"\na\n b\"\"\"").unwrap(),
272 "\"\"\"\na\n b\"\"\""
273 );
274 assert_eq!(minify("\"\"\"\n a\n b\"\"\"").unwrap(), "\"\"\"a\nb\"\"\"");
275 assert_eq!(
276 minify("\"\"\"\na\n b\nc\"\"\"").unwrap(),
277 "\"\"\"a\n b\nc\"\"\""
278 );
279 }
280
281 #[test]
282 fn test_kitchen_sink_query() {
283 let query = include_str!("../data/kitchen_sink_query.gql");
284 let expected = include_str!("../data/kitchen_sink_query_expected.gql");
285
286 assert_eq!(minify(query).unwrap(), expected);
287 }
288
289 #[test]
290 fn test_kitchen_sink_schema() {
291 let schema = include_str!("../data/kitchen_sink_schema.gql");
292 let expected = include_str!("../data/kitchen_sink_schema_expected.gql");
293
294 assert_eq!(minify(schema).unwrap(), expected);
295 }
296}