rs-jsonpath 0.1.0

JsonPath library
Documentation
use regex::Regex;

#[derive(Debug, Clone, PartialEq)]
pub struct ParsedJsonPath {
    pub op: String,
    pub key: String,
    pub args: Vec<String>,
}

#[derive(Debug, Clone, PartialEq)]
pub struct ParsedFilter {
    pub left: String,
    pub op: String,
    pub right: String,
}

pub fn s(s: &str) -> String {
    s.to_string()
}

pub fn tokenize(query: String) -> Result<Vec<String>, String> {
    let mut tokens: Vec<String> = vec![];
    let mut token = s("");
    for (i, c) in query.chars().enumerate() {
        token += c.to_string().as_str();
        match token.as_str() {
            "$" | "@" => {
                if i == 0 {
                    tokens.push(token);
                    token = s("");
                } else {
                    return Err(s("JsonPath must start with $ (or @ for filters)"));
                }
            }
            "." | "*" => continue,
            ".." => token = s("*"),
            _ => {
                if let Some(bracket_idx) = token.find("[") {
                    if c == ']' { 
                        if token.starts_with(".") {
                            tokens.push(token[1..bracket_idx].to_string());
                            tokens.push(token[bracket_idx..].to_string());
                        } else {
                            tokens.push(token[..bracket_idx].to_string());
                            tokens.push(token[bracket_idx..].to_string());
                        }
                        token = s("");
                    }
                } else {
                    if c == '.' {
                        if token.starts_with(".") {
                            tokens.push(token[1..token.len() - 1].to_string())
                        } else {
                            tokens.push(token[..token.len() - 1].to_string())
                        }
                        token = s(".");
                        continue;
                    }
                } 
            }
        }
    }
    if token.len() > 0 {
        if token.starts_with(".") {
            token = token[1..].to_string();
        }
        if token != "*" || tokens[tokens.len() - 1] != "*" {
            tokens.push(token);
        }
    }
    Ok(tokens)
}

pub fn parse_token(token: String) -> Result<ParsedJsonPath, String> {
    if token == "$" {
        return Ok(ParsedJsonPath {op: s("root"), key: s("$"), args: vec![]});
    }
    if token == "@" {
        return Ok(ParsedJsonPath {op: s("child"), key: s("@"), args: vec![]});
    }
    if token.starts_with("*") {
        let key = token[1..].to_string();
        return Ok(ParsedJsonPath { op: s("scan"), key: key, args: vec![]});
    }
    let op: String;
    let key: String;
    let mut args: Vec<String> = vec![];
    if let Some(bracket_idx) = token.find("[") {
        key = s("");
        let mut tail = s(&token[bracket_idx..]);
        if tail.len() < 3 {
            return Err(format!("content of [] in jsonPath expression should be >=3, {:?}", tail));
        }
        tail = tail[1..tail.len() - 1].to_string();
        if tail.contains("?") {
            // filter -------------------------------------------------
            op = s("filter");
            if tail.starts_with("?(") && tail.ends_with(")") {
                args = vec![tail[2..tail.len() - 1].trim().to_string()];
            }
        } else if tail.contains(":") {
            // range ----------------------------------------------
            op = s("range");
            args = tail.split(":").map(|arg| arg.trim().to_string()).collect();
            if args.len() != 2 {
                return Err(format!("only support one range(from, to): {:?}", args));
            }
        } else if tail == "*" {
            op = s("range");
        } else {
            // idx ------------------------------------------------
            op = s("idx");
            args = tail.split(",").map(|arg| arg.trim().to_string()).collect();
        }
        return Ok(ParsedJsonPath {op: op, key: key, args: args});
    }
    return Ok(ParsedJsonPath {op: s("key"), key: token, args: vec![]});
}

pub fn parse_filter(filter: String) -> Result<ParsedFilter, String> {
    Regex::new("^(.*?)(<=|<|==|>=|>)(.*?)$").map_err(|_| s("Regex error")).and_then(|re| {
        re.captures(filter.as_str()).and_then(|captures| {
            if captures.len() < 4 {
                None
            } else {
                let parsed = ParsedFilter{left:s(&captures[1].trim()), op:s(&captures[2].trim()), right:s(&captures[3].trim())};
                Some(parsed)
            }
        }) 
        .ok_or(s("Invalid filter expression"))
    })
}

pub fn eval_filter(left: &str, op: &str, right: &str) -> bool {
    println!("left: {:?}, op: {:?}, right {:?}", left, op, right);
    if op == "==" {
        println!("{:?}", left == right);
        return left == right;
    }
    if let (Ok(l), Ok(r)) = (left.parse::<i64>(), right.parse::<i64>()) {
        return match op {
            "<" => l < r,
            "<=" => l <= r,
            ">" => l > r,
            ">=" => l >= r,
            _ => false
        };
    }
    return false;
}

#[cfg(test)]
mod tests { 
    
    use super::*;

       #[test]
    fn it_tokenize_simple_expression() {
        let test = s("$.store");
        let tokens = tokenize(test);
        assert_eq!(tokens.unwrap(), vec!["$", "store"]);
    }

    #[test]
    fn it_tokenize_wild_expression() {
        let test = s("$..store");
        let tokens = tokenize(test);
        assert_eq!(tokens.unwrap(), vec!["$", "*store"]);
    }

    #[test]
    fn it_tokenize_bracket_expression() {
        let test = s("$.store[?(@.price < $.expensive)]");
        let tokens = tokenize(test);
        assert_eq!(tokens.unwrap(), vec!["$", "store", "[?(@.price < $.expensive)]"]);
    }

    #[test]
    fn it_tokenize_complex_expression() {
        let test = s("$.store..book[?(@.price < $.expensive)].price");
        let tokens = tokenize(test);
        assert_eq!(tokens.unwrap(), vec!["$", "store", "*book", "[?(@.price < $.expensive)]", "price"]);
    }

    #[test]
    fn it_parse_complex_token() {
        let test = vec!["$","store","*book", "[?(@.price < $.expensive)]","price","[1:5]","author","[1,5,6]","name","[*]","date","[1]"];
        let res: Vec<ParsedJsonPath> = test.iter().map(|x| parse_token(s(x)).unwrap()).collect();
        let expected = vec![
            ParsedJsonPath {op: s("root"), key: s("$"), args: vec![]},
            ParsedJsonPath {op: s("key"), key: s("store"), args: vec![]},
            ParsedJsonPath {op: s("scan"), key: s("book"), args: vec![]},
            ParsedJsonPath {op: s("filter"), key: s(""), args: vec![s("@.price < $.expensive")]},
            ParsedJsonPath {op: s("key"), key: s("price"), args: vec![]},
            ParsedJsonPath {op: s("range"), key: s(""), args: vec![s("1"), s("5")]},
            ParsedJsonPath {op: s("key"), key: s("author"), args: vec![]},
            ParsedJsonPath {op: s("idx"), key: s(""), args: vec![s("1"), s("5"), s("6")]},
            ParsedJsonPath {op: s("key"), key: s("name"), args: vec![]},
            ParsedJsonPath {op: s("range"), key: s(""), args: vec![]},
            ParsedJsonPath {op: s("key"), key: s("date"), args: vec![]},
            ParsedJsonPath {op: s("idx"), key: s(""), args: vec![s("1")]},
        ];
        assert_eq!(res, expected)
    }

    #[test]
    fn it_parse_filter() {
        let test = s("@.price < $.expensive");
        let tokens = parse_filter(test);
        assert_eq!(tokens.unwrap(), ParsedFilter{left:s("@.price"), op:s("<"), right:s("$.expensive")});
    }
}