use std::{fmt, io};
use crate::js::token::{self, Keyword, ReservedChar, Token, Tokens};
use crate::js::utils::{get_array, get_variable_name_and_value_positions, VariableNameGenerator};
use std::collections::{HashMap, HashSet};
#[inline]
pub fn minify(source: &str) -> Minified<'_> {
Minified(token::tokenize(source).apply(crate::js::clean_tokens))
}
pub struct Minified<'a>(token::Tokens<'a>);
impl<'a> Minified<'a> {
pub fn write<W: io::Write>(self, w: W) -> io::Result<()> {
self.0.write(w)
}
}
impl<'a> fmt::Display for Minified<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
fn get_variables_name<'a>(
tokens: &'a Tokens<'a>,
) -> (HashSet<&'a str>, HashMap<&'a str, (usize, usize)>) {
let mut ret = HashSet::new();
let mut variables = HashMap::new();
let mut pos = 0;
while pos < tokens.len() {
if tokens[pos].is_keyword() || tokens[pos].is_other() {
if let Some((var_pos, Some(value_pos))) =
get_variable_name_and_value_positions(tokens, pos)
{
pos = value_pos;
if let Some(var_name) = tokens[var_pos].get_other() {
if !var_name.starts_with("r_") {
pos += 1;
continue;
}
ret.insert(var_name);
}
if let Some(s) = tokens[value_pos].get_string() {
variables.insert(s, (var_pos, value_pos));
}
}
}
pos += 1;
}
(ret, variables)
}
fn aggregate_strings_inner<'a, 'b: 'a>(
mut tokens: Tokens<'a>,
separation_token: Option<Token<'b>>,
) -> Tokens<'a> {
let mut new_vars = Vec::with_capacity(50);
let mut to_replace: Vec<(usize, usize)> = Vec::new();
for (var_name, positions) in {
let mut strs: HashMap<&Token<'_>, Vec<usize>> = HashMap::with_capacity(1000);
let mut validated: HashMap<&Token<'_>, String> = HashMap::with_capacity(100);
let mut var_gen = VariableNameGenerator::new(Some("r_"), 2);
let mut next_name = var_gen.to_string();
let (all_variables, values) = get_variables_name(&tokens);
while all_variables.contains(&next_name.as_str()) {
var_gen.next();
next_name = var_gen.to_string();
}
for pos in 0..tokens.len() {
let token = &tokens[pos];
if let Some(str_token) = token.get_string() {
if let Some((var_pos, string_pos)) = values.get(&str_token) {
if pos != *string_pos {
to_replace.push((pos, *var_pos));
}
continue;
}
let x = strs.entry(token).or_insert_with(|| Vec::with_capacity(1));
x.push(pos);
if x.len() > 1 && validated.get(token).is_none() {
let len = str_token.len();
if (x.len() + 2) * len
> next_name.len() + str_token.len() + 6 + x.len() * next_name.len()
{
validated.insert(token, next_name.clone());
var_gen.next();
next_name = var_gen.to_string();
while all_variables.contains(&next_name.as_str()) {
var_gen.next();
next_name = var_gen.to_string();
}
}
}
}
}
let mut ret = Vec::with_capacity(validated.len());
macro_rules! inner_loop {
($x:ident) => {{
let mut $x = $x.into_iter().collect::<Vec<_>>();
$x.sort_unstable_by(|a, b| a.1.cmp(&b.1));
$x
}};
}
for (token, var_name) in inner_loop!(validated) {
ret.push((var_name, strs.remove(&token).unwrap()));
var_gen.next();
}
ret
} {
if new_vars.is_empty() {
new_vars.push(Token::Keyword(Keyword::Var));
} else {
new_vars.push(Token::Char(ReservedChar::Comma));
}
new_vars.push(Token::CreatedVarDecl(format!(
"{}={}",
var_name, tokens[positions[0]]
)));
for pos in positions {
tokens.0[pos] = Token::CreatedVar(var_name.clone());
}
}
if !new_vars.is_empty() {
new_vars.push(Token::Char(ReservedChar::SemiColon));
}
for (to_replace_pos, variable_pos) in to_replace {
tokens.0[to_replace_pos] = tokens.0[variable_pos].clone();
}
if let Some(token) = separation_token {
new_vars.push(token);
}
new_vars.append(&mut tokens.0);
Tokens(new_vars)
}
#[inline]
pub fn aggregate_strings(tokens: Tokens<'_>) -> Tokens<'_> {
aggregate_strings_inner(tokens, None)
}
#[inline]
pub fn aggregate_strings_with_separation<'a, 'b: 'a>(
tokens: Tokens<'a>,
separation_token: Token<'b>,
) -> Tokens<'a> {
aggregate_strings_inner(tokens, Some(separation_token))
}
fn aggregate_strings_into_array_inner<'a, 'b: 'a, T: Fn(&Tokens<'a>, usize) -> bool>(
mut tokens: Tokens<'a>,
array_name: &str,
separation_token: Option<Token<'b>>,
filter: T,
) -> Tokens<'a> {
let mut to_insert = Vec::with_capacity(100);
let mut to_replace = Vec::with_capacity(100);
{
let mut to_ignore = HashSet::new();
let mut strs: HashMap<&str, (usize, Vec<usize>, bool)> = HashMap::with_capacity(1000);
let (current_array_values, need_recreate, mut end_bracket) =
match get_array(&tokens, array_name) {
Some((s, p)) => (s, false, p),
None => (Vec::new(), true, 0),
};
let mut validated: HashSet<&str> = HashSet::new();
let mut array_pos = 0;
for s in current_array_values.iter() {
if let Some(st) = tokens.0[*s].get_string() {
strs.insert(&st[1..st.len() - 1], (array_pos, vec![], false));
array_pos += 1;
validated.insert(&st[1..st.len() - 1]);
to_ignore.insert(*s);
}
}
let mut array_pos_str = array_pos.to_string();
for pos in 0..tokens.len() {
if to_ignore.contains(&pos) {
continue;
}
let token = &tokens[pos];
if let Some(str_token) = token.get_string() {
if !filter(&tokens, pos) {
continue;
}
let s = &str_token[1..str_token.len() - 1];
let x = strs
.entry(s)
.or_insert_with(|| (0, Vec::with_capacity(1), true));
x.1.push(pos);
if x.1.len() > 1 && !validated.contains(s) {
let len = s.len();
if len * x.1.len()
> (array_name.len() + array_pos_str.len() + 2) * x.1.len()
+ array_pos_str.len()
+ 2
{
validated.insert(&str_token[1..str_token.len() - 1]);
x.0 = array_pos;
array_pos += 1;
array_pos_str = array_pos.to_string();
}
}
}
}
let mut validated = validated.iter().map(|v| (strs[v].0, v)).collect::<Vec<_>>();
validated.sort_unstable_by(|(p1, _), (p2, _)| p2.cmp(p1));
if need_recreate && !validated.is_empty() {
if let Some(token) = separation_token {
to_insert.push((0, token));
}
to_insert.push((0, Token::Char(ReservedChar::SemiColon)));
to_insert.push((0, Token::Char(ReservedChar::CloseBracket)));
to_insert.push((0, Token::Char(ReservedChar::OpenBracket)));
to_insert.push((0, Token::CreatedVarDecl(format!("var {}=", array_name))));
end_bracket = 2;
}
let mut iter = validated.iter().peekable();
while let Some((array_pos, s)) = iter.next() {
let (_, ref tokens_pos, create_array_entry) = strs[*s];
let array_index = Token::CreatedVar(format!("{}[{}]", array_name, array_pos));
for token in tokens_pos.iter() {
to_replace.push((*token, array_index.clone()));
}
if !create_array_entry {
continue;
}
to_insert.push((end_bracket, Token::CreatedVar(format!("\"{}\"", *s))));
if iter.peek().is_none() && current_array_values.is_empty() {
continue;
}
to_insert.push((end_bracket, Token::Char(ReservedChar::Comma)));
}
}
for (pos, rep) in to_replace.into_iter() {
tokens.0[pos] = rep;
}
for (pos, rep) in to_insert.into_iter() {
tokens.0.insert(pos, rep);
}
tokens
}
#[inline]
pub fn aggregate_strings_into_array_with_separation<'a, 'b: 'a>(
tokens: Tokens<'a>,
array_name: &str,
separation_token: Token<'b>,
) -> Tokens<'a> {
aggregate_strings_into_array_inner(tokens, array_name, Some(separation_token), |_, _| true)
}
#[inline]
pub fn aggregate_strings_into_array_with_separation_filter<'a, 'b: 'a, T>(
tokens: Tokens<'a>,
array_name: &str,
separation_token: Token<'b>,
filter: T,
) -> Tokens<'a>
where
T: Fn(&Tokens<'a>, usize) -> bool,
{
aggregate_strings_into_array_inner(tokens, array_name, Some(separation_token), filter)
}
#[inline]
pub fn aggregate_strings_into_array<'a>(tokens: Tokens<'a>, array_name: &str) -> Tokens<'a> {
aggregate_strings_into_array_inner(tokens, array_name, None, |_, _| true)
}
#[inline]
pub fn aggregate_strings_into_array_filter<'a, T>(
tokens: Tokens<'a>,
array_name: &str,
filter: T,
) -> Tokens<'a>
where
T: Fn(&Tokens<'a>, usize) -> bool,
{
aggregate_strings_into_array_inner(tokens, array_name, None, filter)
}
#[inline]
pub fn simple_minify(source: &str) -> Tokens<'_> {
token::tokenize(source)
}
#[test]
fn aggregate_strings_in_array() {
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];\nvar x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| {
aggregate_strings_into_array_with_separation(
c,
"R",
Token::Char(ReservedChar::Backline),
)
})
.to_string();
assert_eq!(result, expected_result);
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "another nice string", "another nice string", "another nice string","cake!","cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"another nice string\",\"cake!\"];\n\
var x=[R[0],R[0],R[1],R[1],R[1],R[1],R[2],R[2],R[0],R[2],\
R[2],R[2]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| {
aggregate_strings_into_array_with_separation(
c,
"R",
Token::Char(ReservedChar::Backline),
)
})
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn aggregate_strings_in_array_filter() {
let source = r#"var searchIndex = {};searchIndex['duplicate_paths'] = {'aaaaaaaa': 'bbbbbbbb', 'bbbbbbbb': 'aaaaaaaa', 'duplicate_paths': 'aaaaaaaa'};"#;
let expected_result = "var R=[\"bbbbbbbb\",\"aaaaaaaa\"];\nvar searchIndex={};searchIndex['duplicate_paths']={R[1]:R[0],R[0]:R[1],'duplicate_paths':R[1]}";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| {
aggregate_strings_into_array_with_separation_filter(
c,
"R",
Token::Char(ReservedChar::Backline),
|tokens, pos| {
pos < 2
|| !tokens[pos - 1].eq_char(ReservedChar::OpenBracket)
|| tokens[pos - 2].get_other() != Some("searchIndex")
},
)
})
.to_string();
assert_eq!(result, expected_result);
let source = r#"var searchIndex = {};searchIndex['duplicate_paths'] = {'aaaaaaaa': 'bbbbbbbb', 'bbbbbbbb': 'aaaaaaaa', 'duplicate_paths': 'aaaaaaaa', 'x': 'duplicate_paths'};"#;
let expected_result = "var R=[\"bbbbbbbb\",\"aaaaaaaa\",\"duplicate_paths\"];\nvar searchIndex={};searchIndex['duplicate_paths']={R[1]:R[0],R[0]:R[1],R[2]:R[1],'x':R[2]}";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| {
aggregate_strings_into_array_with_separation_filter(
c,
"R",
Token::Char(ReservedChar::Backline),
|tokens, pos| {
pos < 2
|| !tokens[pos - 1].eq_char(ReservedChar::OpenBracket)
|| tokens[pos - 2].get_other() != Some("searchIndex")
},
)
})
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn aggregate_strings_in_array_existing() {
let source = r#"var R=[];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var y = 12;var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var y=12;var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var R=["osef1", "o2", "damn"];
var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"osef1\",\"o2\",\"damn\",\"a nice string\",\"cake!\"];\
var x=[R[3],R[3],\"another nice string\",R[4],R[4],R[3],R[4],R[4],R[4]]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates() {
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_aa=\"a nice string\",r_ba=\"cake!\";var x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]";
let result = simple_minify(source)
.apply(aggregate_strings)
.apply(crate::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn already_existing_var() {
let source = r#"var r_aa = "a nice string"; var x = ["a nice string", "a nice string",
"another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_ba=\"cake!\";var r_aa=\"a nice string\";var x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]";
let result = simple_minify(source)
.apply(aggregate_strings)
.apply(crate::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates_variables_already_exist() {
let source = r#"var r_aa=1;var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_ba=\"a nice string\",r_ca=\"cake!\";\
var r_aa=1;var x=[r_ba,r_ba,\
\"another nice string\",r_ca,r_ca,r_ba,r_ca,r_ca,r_ca]";
let result = simple_minify(source)
.apply(aggregate_strings)
.apply(crate::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates_with_separator() {
use self::token::ReservedChar;
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_aa=\"a nice string\",r_ba=\"cake!\";\nvar x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]";
let result = simple_minify(source)
.apply(crate::js::clean_tokens)
.apply(|f| aggregate_strings_with_separation(f, Token::Char(ReservedChar::Backline)))
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn clean_except() {
use self::token::ReservedChar;
let source = r#"var x = [1, 2, 3];
var y = "salut";
var z = "ok!";"#;
let expected = r#"var x=[1,2,3];
var y="salut";
var z="ok!""#;
let result = simple_minify(source)
.apply(|f| {
crate::js::clean_tokens_except(f, |c| c.get_char() != Some(ReservedChar::Backline))
})
.to_string();
assert_eq!(result, expected);
}
#[test]
fn clean_except2() {
use self::token::ReservedChar;
let source = "let x = [ 1, 2, \t3];";
let expected = "let x = [ 1, 2, 3];";
let result = simple_minify(source)
.apply(|f| {
crate::js::clean_tokens_except(f, |c| {
c.get_char() != Some(ReservedChar::Space)
&& c.get_char() != Some(ReservedChar::SemiColon)
})
})
.to_string();
assert_eq!(result, expected);
}
#[test]
fn clean_except3() {
use self::token::ReservedChar;
let source = "let x = [ 1, 2, \t3];";
let expected = "let x=[1,2,\t3];";
let result = simple_minify(source)
.apply(|f| {
crate::js::clean_tokens_except(f, |c| {
c.get_char() != Some(ReservedChar::Tab)
&& c.get_char() != Some(ReservedChar::SemiColon)
})
})
.to_string();
assert_eq!(result, expected);
}
#[test]
fn name_generator() {
let s = std::iter::repeat('a').take(36).collect::<String>();
let s = std::iter::repeat(s)
.take(20000)
.enumerate()
.map(|(pos, s)| format!("{}{}", s, pos))
.collect::<Vec<_>>();
let source = format!(
"var x = [{}];",
s.iter()
.map(|s| format!("\"{0}\",\"{0}\"", s))
.collect::<Vec<_>>()
.join(",")
);
let result = simple_minify(&source)
.apply(crate::js::clean_tokens)
.apply(aggregate_strings)
.to_string();
assert!(result.find(",r_aaa=").is_some());
assert!(result.find(",r_ab=").unwrap() < result.find(",r_ba=").unwrap());
}
#[test]
fn simple_quote() {
let source = r#"var x = "\\";"#;
let expected_result = r#"var x="\\""#;
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn js_minify_test() {
let source = r##"
var foo = "something";
var another_var = 2348323;
// who doesn't like comments?
/* and even longer comments?
like
on
a
lot
of
lines!
Fun!
*/
function far_away(x, y) {
var x2 = x + 4;
return x * x2 + y;
}
// this call is useless
far_away(another_var, 12);
// this call is useless too
far_away(another_var, 12);
"##;
let expected_result = "var foo=\"something\";var another_var=2348323;function far_away(x,y){\
var x2=x+4;return x*x2+y}far_away(another_var,12);far_away(another_var,\
12)";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn another_js_test() {
let source = r#"
/*! let's keep this license
*
* because everyone likes licenses!
*
* right?
*/
function forEach(data, func) {
for (var i = 0; i < data.length; ++i) {
func(data[i]);
}
}
forEach([0, 1, 2, 3, 4,
5, 6, 7, 8, 9], function (x) {
console.log(x);
});
// I think we're done?
console.log('done!');
"#;
let expected_result = r#"/*! let's keep this license
*
* because everyone likes licenses!
*
* right?
*/function forEach(data,func){for(var i=0;i<data.length;++i){func(data[i])}}forEach([0,1,2,3,4,5,6,7,8,9],function(x){console.log(x)});console.log('done!')"#;
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn comment_issue() {
let source = r#"
search_input.onchange = function(e) {
// Do NOT e.preventDefault() here. It will prevent pasting.
clearTimeout(searchTimeout);
// zero-timeout necessary here because at the time of event handler execution the
// pasted content is not in the input field yet. Shouldn’t make any difference for
// change, though.
setTimeout(search, 0);
};
"#;
let expected_result = "search_input.onchange=function(e){clearTimeout(searchTimeout);\
setTimeout(search,0)}";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn missing_whitespace() {
let source = r#"
for (var entry in results) {
if (results.hasOwnProperty(entry)) {
ar.push(results[entry]);
}
}"#;
let expected_result = "for(var entry in results){if(results.hasOwnProperty(entry)){\
ar.push(results[entry])}}";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn weird_regex_issue() {
let source = r#"
val = val.replace(/\_/g, "");
var valGenerics = extractGenerics(val);"#;
let expected_result = "val=val.replace(/\\_/g,\"\");var valGenerics=extractGenerics(val)";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn keep_space() {
fn inner_double_checks(source: &str, expected: &str) {
assert_eq!(minify(source).to_string(), expected);
let s = minify(source);
let mut out: Vec<u8> = Vec::new();
s.write(&mut out).unwrap();
assert_eq!(String::from_utf8(out).unwrap(), expected);
}
inner_double_checks("return 12;return x;", "return 12;return x");
inner_double_checks("t in e", "t in e");
inner_double_checks("t + 1 in e", "t+1 in e");
inner_double_checks("t - 1 in e", "t-1 in e");
inner_double_checks("'a' in e", "'a'in e");
inner_double_checks("/a/g in e", "/a/g in e");
inner_double_checks("/a/i in e", "/a/i in e");
inner_double_checks("t instanceof e", "t instanceof e");
inner_double_checks("t + 1 instanceof e", "t+1 instanceof e");
inner_double_checks("t - 1 instanceof e", "t-1 instanceof e");
inner_double_checks("'a' instanceof e", "'a'instanceof e");
inner_double_checks("/a/g instanceof e", "/a/g instanceof e");
inner_double_checks("/a/i instanceof e", "/a/i instanceof e");
inner_double_checks("function foo() { let x = 12; }", "function foo(){let x=12}");
inner_double_checks(
r#""use strict";
(function() {
const itemTypes = [
"mod",
"externcrate",
"import",
"struct",
];
const TY_PRIMITIVE = itemTypes;
function hasOwnPropertyRustdoc() {}
})();"#,
"\"use strict\";(function(){const itemTypes=[\"mod\",\"externcrate\",\"import\",\"struct\"\
,];const TY_PRIMITIVE=itemTypes;function hasOwnPropertyRustdoc(){}})()",
);
}
#[test]
fn test_remove_extra_whitespace_before_typeof() {
let source = "var x = typeof 'foo';var y = typeof x;case typeof 'foo': 'bla'";
let expected_result = "var x=typeof'foo';var y=typeof x;case typeof'foo':'bla'";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn test_remove_extra_whitespace_before_in() {
let source = r#"if ("key" in ev && typeof ev) { return true; }
if (x in ev && typeof ev) { return true; }
if (true in ev) { return true; }"#;
let expected_result = r#"if("key"in ev&&typeof ev){return true}if(x in ev&&typeof ev){return true}if(true in ev){return true}"#;
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn test_remove_extra_whitespace_before_operator() {
let source = "( x ) / 2; x / y;x /= y";
let expected_result = "(x)/2;x/y;x/=y";
assert_eq!(minify(source).to_string(), expected_result);
}
#[test]
fn check_regex_syntax() {
let source = "console.log(/MSIE|Trident|Edge/.test(window.navigator.userAgent));";
let expected = "console.log(/MSIE|Trident|Edge/.test(window.navigator.userAgent))";
assert_eq!(minify(source).to_string(), expected);
}
#[test]
fn minify_minified() {
let source = "function (i, n, a) { i[n].type.replace(/ *;(.|\\s)*/,\"\")===t&&a.push(i[n].MathJax.elementJax);return a}";
let expected = "function(i,n,a){i[n].type.replace(/ *;(.|\\s)*/,\"\")===t&&a.push(i[n].MathJax.elementJax);return a}";
assert_eq!(minify(source).to_string(), expected);
}
#[test]
fn check_string() {
let source = r###"
const a = 123;
const b = "123";
const c = `the number is ${a} <-- note the spaces here`;
const d = ` ${a} ${b} `;
"###;
let expected = "const a=123;const b=\"123\";const c=`the number is ${a} <-- note the spaces \
here`;const d=` ${a} ${b} `";
assert_eq!(minify(source).to_string(), expected);
}