1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
#![feature(extend_one)]
#![feature(associated_type_bounds)]
extern crate proc_macro;
pub mod tree;
pub use tree::*;
pub mod codegen;
pub use codegen::*;
pub mod scope;
pub use scope::*;
use pyo3::prelude::*;
use std::include_str;
/// Takes a string of bytes and returns the Python-tokenized version of it.
pub fn parse<'a>(input: &'a str, filename: &str) -> PyResult<tree::Module> {
let pymodule_code = include_str!("parser.py");
Python::with_gil(|py| -> PyResult<Module> {
// We want to call tokenize.tokenize from Python.
let pymodule = PyModule::from_code(py, pymodule_code, "parser.py", "parser")?;
let t = pymodule.getattr("parse")?;
assert!(t.is_callable());
let args = (input, filename);
let tree: tree::Module = t.call1(args)?.extract()?;
Ok(tree)
})
}
/// Accepts any Python object and dumps it using the Python ast module.
pub fn ast_dump(o: &PyAny, indent: Option<u8>) -> PyResult<String> {
let pymodule_code = include_str!("ast_dump.py");
Python::with_gil(|py| -> PyResult<String> {
// We want to call tokenize.tokenize from Python.
let pymodule = PyModule::from_code(py, pymodule_code, "ast_dump.py", "parser")?;
let t = pymodule.getattr("ast_dump")?;
assert!(t.is_callable());
let args = (o, indent);
Ok(t.call1(args)?.extract()?)
})
}
pub fn sys_path() -> PyResult<Vec<String>> {
let pymodule_code = include_str!("path.py");
Python::with_gil(|py| -> PyResult<Vec<String>> {
let pymodule = PyModule::from_code(py, pymodule_code, "path.py", "path")?;
let t = pymodule.getattr("path").expect("Reading path variable from interpretter");
assert!(t.is_callable());
let args = ();
let paths: Vec<String> = t.call1(args)?.extract()?;
Ok(paths)
})
}
#[cfg(test)]
mod tests {
//use super::*;
/*
#[test]
fn check_token_stream() {
let result = parse("#test comment
def foo():
pass
", "test_case").unwrap();/*
println!("tokens: {:?}", result);
assert_eq!(result[0].token_text, "COMMENT");
assert_eq!(result[1].token_text, "NL");
assert_eq!(result[2].token_text, "NAME");
assert_eq!(result[3].token_text, "NAME");
assert_eq!(result[4].token_text, "OP");
assert_eq!(result[5].token_text, "OP");
assert_eq!(result[6].token_text, "OP");
assert_eq!(result[7].token_text, "NEWLINE");
assert_eq!(result[8].token_text, "INDENT");
assert_eq!(result[9].token_text, "NAME");
assert_eq!(result[10].token_text, "NEWLINE");
assert_eq!(result[11].token_text, "DEDENT");
assert_eq!(result[12].token_text, "ENDMARKER");*/
info!("{:?}", result);
}*/
}