use js::token::{self, Keyword, ReservedChar, Token, Tokens};
use js::utils::{
get_array,
get_variable_name_and_value_positions,
VariableNameGenerator,
};
use std::collections::{HashMap, HashSet};
#[inline]
pub fn minify(source: &str) -> String {
token::tokenize(source).apply(::js::clean_tokens).to_string()
}
fn get_variables_name<'a>(
tokens: &'a Tokens<'a>,
) -> (HashSet<&'a str>, HashMap<&'a str, (usize, usize)>) {
let mut ret = HashSet::new();
let mut variables = HashMap::new();
let mut pos = 0;
while pos < tokens.len() {
if tokens[pos].is_keyword() || tokens[pos].is_other() {
if let Some((var_pos, Some(value_pos))) = get_variable_name_and_value_positions(tokens, pos) {
pos = value_pos;
if let Some(var_name) = tokens[var_pos].get_other() {
if !var_name.starts_with("r_") {
pos += 1;
continue;
}
ret.insert(var_name);
}
if let Some(s) = tokens[value_pos].get_string() {
variables.insert(s, (var_pos, value_pos));
}
}
}
pos += 1;
}
(ret, variables)
}
#[inline]
fn aggregate_strings_inner<'a, 'b: 'a>(
mut tokens: Tokens<'a>,
separation_token: Option<Token<'b>>,
) -> Tokens<'a> {
let mut new_vars = Vec::with_capacity(50);
let mut to_replace: Vec<(usize, usize)> = Vec::new();
for (var_name, positions) in {
let mut strs: HashMap<&Token, Vec<usize>> = HashMap::with_capacity(1000);
let mut validated: HashMap<&Token, String> = HashMap::with_capacity(100);
let mut var_gen = VariableNameGenerator::new(Some("r_"), 2);
let mut next_name = var_gen.to_string();
let (all_variables, values) = get_variables_name(&tokens);
while all_variables.contains(&next_name.as_str()) {
var_gen.next();
next_name = var_gen.to_string();
}
for pos in 0..tokens.len() {
let token = &tokens[pos];
if let Some(str_token) = token.get_string() {
if let Some((var_pos, string_pos)) = values.get(&str_token) {
if pos != *string_pos {
to_replace.push((pos, *var_pos));
}
continue;
}
let x = strs.entry(token).or_insert_with(|| Vec::with_capacity(1));
x.push(pos);
if x.len() > 1 && validated.get(token).is_none() {
let len = str_token.len();
if (x.len() + 2 ) * len > next_name.len() + str_token.len() + 6 + x.len() * next_name.len() {
validated.insert(token, next_name.clone());
var_gen.next();
next_name = var_gen.to_string();
while all_variables.contains(&next_name.as_str()) {
var_gen.next();
next_name = var_gen.to_string();
}
}
}
}
}
let mut ret = Vec::with_capacity(validated.len());
macro_rules! inner_loop {
($x:ident) => {{
let mut $x = $x.into_iter().collect::<Vec<_>>();
$x.sort_unstable_by(|a, b| a.1.cmp(&b.1));
$x
}}
}
for (token, var_name) in inner_loop!(validated) {
ret.push((var_name, strs.remove(&token).unwrap()));
var_gen.next();
}
ret
} {
if new_vars.is_empty() {
new_vars.push(Token::Keyword(Keyword::Var));
} else {
new_vars.push(Token::Char(ReservedChar::Comma));
}
new_vars.push(Token::CreatedVarDecl(format!("{}={}", var_name, tokens[positions[0]])));
for pos in positions {
tokens.0[pos] = Token::CreatedVar(var_name.clone());
}
}
if !new_vars.is_empty() {
new_vars.push(Token::Char(ReservedChar::SemiColon));
}
for (to_replace_pos, variable_pos) in to_replace {
tokens.0[to_replace_pos] = tokens.0[variable_pos].clone();
}
if let Some(token) = separation_token {
new_vars.push(token);
}
new_vars.append(&mut tokens.0);
Tokens(new_vars)
}
#[inline]
pub fn aggregate_strings<'a>(tokens: Tokens<'a>) -> Tokens<'a> {
aggregate_strings_inner(tokens, None)
}
#[inline]
pub fn aggregate_strings_with_separation<'a, 'b: 'a>(
tokens: Tokens<'a>,
separation_token: Token<'b>,
) -> Tokens<'a> {
aggregate_strings_inner(tokens, Some(separation_token))
}
#[inline]
fn aggregate_strings_into_array_inner<'a, 'b: 'a>(
mut tokens: Tokens<'a>,
array_name: &str,
separation_token: Option<Token<'b>>,
) -> Tokens<'a> {
let mut to_insert = Vec::with_capacity(100);
let mut to_replace = Vec::with_capacity(100);
{
let mut to_ignore = HashSet::new();
let mut strs: HashMap<&str, (usize, Vec<usize>, bool)> = HashMap::with_capacity(1000);
let (current_array_values, need_recreate, mut end_bracket) = match get_array(&tokens, array_name) {
Some((s, p)) => (s, false, p),
None => (Vec::new(), true, 0),
};
let mut validated: HashSet<&str> = HashSet::new();
let mut array_pos = 0;
let mut array_pos_str;
for s in current_array_values.iter() {
if let Some(st) = tokens.0[*s].get_string() {
strs.insert(&st[1..st.len() - 1], (array_pos, vec![], false));
array_pos += 1;
validated.insert(&st[1..st.len() - 1]);
to_ignore.insert(*s);
}
}
array_pos_str = array_pos.to_string();
for pos in 0..tokens.len() {
if to_ignore.contains(&pos) {
continue;
}
let token = &tokens[pos];
if let Some(str_token) = token.get_string() {
let s = &str_token[1..str_token.len() - 1];
let x = strs.entry(s).or_insert_with(|| (0, Vec::with_capacity(1), true));
x.1.push(pos);
if x.1.len() > 1 && !validated.contains(s) {
let len = s.len();
if len * x.1.len() > (array_name.len() + array_pos_str.len() + 2) * x.1.len() + array_pos_str.len() + 2 {
validated.insert(&str_token[1..str_token.len() - 1]);
x.0 = array_pos;
array_pos += 1;
array_pos_str = array_pos.to_string();
}
}
}
}
let mut validated = validated.iter().map(|v| (strs[v].0, v)).collect::<Vec<_>>();
validated.sort_unstable_by(|(p1, _), (p2, _)| p2.cmp(p1));
if need_recreate {
if let Some(token) = separation_token {
to_insert.push((0, token));
}
to_insert.push((0, Token::Char(ReservedChar::SemiColon)));
to_insert.push((0, Token::Char(ReservedChar::CloseBracket)));
to_insert.push((0, Token::Char(ReservedChar::OpenBracket)));
to_insert.push((0, Token::CreatedVarDecl(format!("var {}=", array_name))));
end_bracket = 2;
}
let mut iter = validated.iter().peekable();
while let Some((array_pos, s)) = iter.next() {
let (_, ref tokens_pos, create_array_entry) = strs[*s];
let array_index = Token::CreatedVar(format!("{}[{}]", array_name, array_pos));
for token in tokens_pos.iter() {
to_replace.push((*token, array_index.clone()));
}
if !create_array_entry {
continue
}
to_insert.push((end_bracket, Token::CreatedVar(format!("\"{}\"", *s))));
if !iter.peek().is_some() {
if current_array_values.is_empty() {
continue;
}
}
to_insert.push((end_bracket, Token::Char(ReservedChar::Comma)));
}
}
for (pos, rep) in to_replace.into_iter() {
tokens.0[pos] = rep;
}
for (pos, rep) in to_insert.into_iter() {
tokens.0.insert(pos, rep);
}
tokens
}
#[inline]
pub fn aggregate_strings_into_array_with_separation<'a, 'b: 'a>(
tokens: Tokens<'a>,
array_name: &str,
separation_token: Token<'b>,
) -> Tokens<'a> {
aggregate_strings_into_array_inner(tokens, array_name, Some(separation_token))
}
#[inline]
pub fn aggregate_strings_into_array<'a>(
tokens: Tokens<'a>,
array_name: &str,
) -> Tokens<'a> {
aggregate_strings_into_array_inner(tokens, array_name, None)
}
#[inline]
pub fn simple_minify<'a>(source: &'a str) -> Tokens<'a> {
token::tokenize(source)
}
#[test]
fn aggregate_strings_in_array() {
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];\nvar x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array_with_separation(c, "R", Token::Char(ReservedChar::Backline)))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "another nice string", "another nice string", "another nice string","cake!","cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"another nice string\",\"cake!\"];\n\
var x=[R[0],R[0],R[1],R[1],R[1],R[1],R[2],R[2],R[0],R[2],\
R[2],R[2]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array_with_separation(c, "R", Token::Char(ReservedChar::Backline)))
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn aggregate_strings_in_array_existing() {
let source = r#"var R=[];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var y = 12;var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var y=12;var R=[\"a nice string\",\"cake!\"];var x=[R[0],R[0],\
\"another nice string\",R[1],R[1],R[0],R[1],R[1],R[1]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
let source = r#"var R=["osef1", "o2", "damn"];
var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var R=[\"osef1\",\"o2\",\"damn\",\"a nice string\",\"cake!\"];\
var x=[R[3],R[3],\"another nice string\",R[4],R[4],R[3],R[4],R[4],R[4]];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|c| aggregate_strings_into_array(c, "R"))
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates() {
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_aa=\"a nice string\",r_ba=\"cake!\";var x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba];";
let result = simple_minify(source).apply(aggregate_strings)
.apply(::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn already_existing_var() {
let source = r#"var r_aa = "a nice string"; var x = ["a nice string", "a nice string",
"another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_ba=\"cake!\";var r_aa=\"a nice string\";var x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba];";
let result = simple_minify(source).apply(aggregate_strings)
.apply(::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates_variables_already_exist() {
let source = r#"var r_aa=1;var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_ba=\"a nice string\",r_ca=\"cake!\";\
var r_aa=1;var x=[r_ba,r_ba,\
\"another nice string\",r_ca,r_ca,r_ba,r_ca,r_ca,r_ca];";
let result = simple_minify(source).apply(aggregate_strings)
.apply(::js::clean_tokens)
.to_string();
assert_eq!(result, expected_result);
}
#[test]
fn string_duplicates_with_separator() {
use self::token::ReservedChar;
let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!",
"cake!", "a nice string", "cake!", "cake!", "cake!"];"#;
let expected_result = "var r_aa=\"a nice string\",r_ba=\"cake!\";\nvar x=[r_aa,r_aa,\
\"another nice string\",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba];";
let result = simple_minify(source).apply(::js::clean_tokens)
.apply(|f| {
aggregate_strings_with_separation(f, Token::Char(ReservedChar::Backline))
}).to_string();
assert_eq!(result, expected_result);
}
#[test]
fn clean_except() {
use self::token::ReservedChar;
let source = r#"var x = [1, 2, 3];
var y = "salut";
var z = "ok!";"#;
let expected = r#"var x=[1,2,3];
var y="salut";
var z="ok!";"#;
let result = simple_minify(source).apply(|f| {
::js::clean_tokens_except(f, |c| {
c.get_char() != Some(ReservedChar::Backline)
})
}).to_string();
assert_eq!(result, expected);
}
#[test]
fn name_generator() {
let s = ::std::iter::repeat('a').take(36).collect::<String>();
let s = ::std::iter::repeat(s).take(20000)
.enumerate()
.map(|(pos, s)| format!("{}{}", s, pos))
.collect::<Vec<_>>();
let source = format!("var x = [{}];",
s.iter()
.map(|s| format!("\"{0}\",\"{0}\"", s))
.collect::<Vec<_>>()
.join(","));
let result = simple_minify(&source).apply(::js::clean_tokens)
.apply(aggregate_strings)
.to_string();
assert!(result.find(",r_aaa=").is_some());
assert!(result.find(",r_ab=").unwrap() < result.find(",r_ba=").unwrap());
}
#[test]
fn simple_quote() {
let source = r#"var x = "\\";"#;
let expected_result = r#"var x="\\";"#;
assert_eq!(minify(source), expected_result);
}
#[test]
fn js_minify_test() {
let source = r##"
var foo = "something";
var another_var = 2348323;
// who doesn't like comments?
/* and even longer comments?
like
on
a
lot
of
lines!
Fun!
*/
function far_away(x, y) {
var x2 = x + 4;
return x * x2 + y;
}
// this call is useless
far_away(another_var, 12);
// this call is useless too
far_away(another_var, 12);
"##;
let expected_result = "var foo=\"something\";var another_var=2348323;function far_away(x,y){\
var x2=x+4;return x*x2+y;}far_away(another_var,12);far_away(another_var,\
12);";
assert_eq!(minify(source), expected_result);
}
#[test]
fn another_js_test() {
let source = r#"
/*! let's keep this license
*
* because everyone likes licenses!
*
* right?
*/
function forEach(data, func) {
for (var i = 0; i < data.length; ++i) {
func(data[i]);
}
}
forEach([0, 1, 2, 3, 4,
5, 6, 7, 8, 9], function (x) {
console.log(x);
});
// I think we're done?
console.log('done!');
"#;
let expected_result = r#"/*! let's keep this license
*
* because everyone likes licenses!
*
* right?
*/function forEach(data,func){for(var i=0;i<data.length;++i){func(data[i]);}}forEach([0,1,2,3,4,5,6,7,8,9],function(x){console.log(x);});console.log('done!');"#;
assert_eq!(minify(source), expected_result);
}
#[test]
fn comment_issue() {
let source = r#"
search_input.onchange = function(e) {
// Do NOT e.preventDefault() here. It will prevent pasting.
clearTimeout(searchTimeout);
// zero-timeout necessary here because at the time of event handler execution the
// pasted content is not in the input field yet. Shouldn’t make any difference for
// change, though.
setTimeout(search, 0);
};
"#;
let expected_result = "search_input.onchange=function(e){clearTimeout(searchTimeout);\
setTimeout(search,0);};";
assert_eq!(minify(source), expected_result);
}
#[test]
fn missing_whitespace() {
let source = r#"
for (var entry in results) {
if (results.hasOwnProperty(entry)) {
ar.push(results[entry]);
}
}"#;
let expected_result = "for(var entry in results){if(results.hasOwnProperty(entry)){\
ar.push(results[entry]);}}";
assert_eq!(minify(source), expected_result);
}
#[test]
fn weird_regex_issue() {
let source = r#"
val = val.replace(/\_/g, "");
var valGenerics = extractGenerics(val);"#;
let expected_result = "val=val.replace(/\\_/g,\"\");var valGenerics=extractGenerics(val);";
assert_eq!(minify(source), expected_result);
}
#[test]
fn keep_space() {
let source = "return 12;return x;";
let expected_result = "return 12;return x;";
assert_eq!(minify(source), expected_result);
}