extern crate serde_json;
extern crate regex;
mod tokenizer;
mod requests;
use serde_json::Value;
use tokenizer::*;
use requests::*;
pub fn lookup(json: String, json_path: String) -> Result<String, String> {
serde_json::from_str(json.as_str())
.map_err(|_| s("Unable to parse json"))
.and_then(|v| look(&v, &v, json_path))
}
pub fn look(root: &Value, json: &Value, json_path: String) -> Result<String,String> {
let tokens = tokenize(json_path);
tokens.and_then(|token_list| {
token_list.iter().fold(Ok(json.clone()), |value, token| {
value.and_then(|v| {
let operation = parse_token(token.clone());
operation.and_then(|op| request_json(&root, &v, &op))
})
})
})
.and_then(|res| {
serde_json::to_string(&res).map_err(|_| s("Unable to convert result"))
})
}
pub fn find_filter(root: &Value, json: &Value, args: &Vec<String>) -> Result<Value,String> {
let res_values = args.iter().map(|arg| {
let filter = parse_filter(arg.to_string());
let res: Result<Vec<Value>, String> = filter.and_then(|parsed_filter| {
let values: Result<Vec<Value>, String> = json.as_array()
.map(|array| {
array.iter().filter(|value| {
println!("{:?}", serde_json::to_string(value).unwrap());
let left = eval_expression(root, value, parsed_filter.left.clone());
let right = eval_expression(root, value, parsed_filter.right.clone());
if let (Ok(l),Ok(r)) = (left,right) {
eval_filter(l.as_str(), parsed_filter.op.as_str(), r.as_str())
} else {
false
}
})
.map(|value| value.clone())
.collect::<Vec<Value>>()
})
.ok_or(s("Unable to filter json"));
values
});
println!("res: {:?}", res);
res
})
.collect::<Result<Vec<Vec<Value>>, String>>();
res_values.map(|arg| {
arg.into_iter().flat_map(|arg| { arg.into_iter() }).collect::<Vec<Value>>()
}).map(|value| Value::Array(value))
}
pub fn eval_expression(root: &Value, json: &Value, expression: String) -> Result<String,String> {
if !expression.contains("$") && !expression.contains("@") {
Ok(expression)
} else {
look(root, json, expression)
}
}
pub fn request_json(root: &Value, json: &Value, operation: &ParsedJsonPath) -> Result<Value,String> {
match operation.op.as_str() {
"root" => Ok(root.clone()),
"child" => Ok(json.clone()),
"key" => Ok(find_key(json, operation.key.as_str())),
"scan" => {
let res = search_key(json, operation.key.as_str(), &vec![]);
println!("filter: {:?}", res);
Ok(Value::Array(res))
}
"idx" => find_indexes(json, &operation.args),
"range" => find_range(json, &operation.args),
"filter" => {
let res = find_filter(root, json, &operation.args);
println!("filter: {:?}", res);
res
}
_ => Err(s("Unable to parse JsonPath expression")),
}
}
#[cfg(test)]
mod tests {
use tokenizer::s;
use super::*;
#[test]
fn it_lookup() {
let data = r#"
{
"firstName": "John",
"lastName" : "doe",
"age" : 26,
"address" : {
"streetAddress": "naist street",
"city" : "Nara",
"type" : "630-0192"
},
"phoneNumbers": [
{
"type" : {"name": "iPhone", "type":"test"},
"number": "0123-4567-8888"
},
{
"type" : "home",
"number": "0123-4567-8910"
}
]
}
"#;
let jsonpath = "$..type[*].name";
let res = lookup(s(data), s(jsonpath)).unwrap();
let expected = r#"["iPhone"]"#;
assert_eq!(res, expected);
}
#[test]
fn it_lookup_with_filter() {
let data = r#"
{
"firstName": "John",
"lastName" : "doe",
"age" : 26,
"address" : {
"streetAddress": "naist street",
"city" : "Nara",
"type" : "630-0192"
},
"phoneNumbers": [
{
"type" : {"name": "iPhone", "type":"test"},
"number": "0123-4567-8888"
},
{
"type" : "home",
"number": "0123-4567-8910"
}
]
}
"#;
let jsonpath = r#"$.phoneNumbers[?(@.number == "0123-4567-8910")].type"#;
let res = lookup(s(data), s(jsonpath)).unwrap();
let expected = r#"["home"]"#;
assert_eq!(res, expected);
}
}