use crate::{parsers, tokenize, Token, TokenStream};
use serde::{Deserialize, Serialize};
use serde_wasm_bindgen::to_value;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn init_panic_hook() {
console_error_panic_hook::set_once();
}
const IDENTIFIER: &str = "identifier";
const WHITESPACE: &str = "whitespace";
const KEYWORD: &str = "keyword";
const LITERAL: &str = "literal";
const OPERATOR: &str = "operator";
const COMMENT: &str = "comment";
#[derive(Serialize, Deserialize)]
pub struct TokenInfo {
pub kind: String,
pub content: String,
pub line: u32,
pub column: u32,
pub length: u32,
}
#[derive(Serialize, Deserialize)]
pub struct ErrorInfo {
pub line: usize,
pub column: usize,
pub message: String,
}
#[derive(Serialize, Deserialize)]
pub struct ParseStats {
pub token_count: u32,
pub node_count: u32,
pub tree_depth: u32,
}
#[derive(Serialize, Deserialize)]
pub struct PlaygroundOutput {
pub tokens: Option<Vec<TokenInfo>>,
pub cst: Option<CstNode>,
pub errors: Vec<ErrorInfo>,
pub parse_time_ms: f64,
pub stats: ParseStats,
}
#[derive(Serialize, Deserialize)]
pub struct CstNode {
pub kind: String,
pub range: [u32; 2],
pub children: Vec<CstNode>,
}
fn convert_cst_node(node: &parsers::cst::CstNode) -> CstNode {
convert_cst_node_with_offset(node, 0).0
}
fn convert_cst_node_with_offset(node: &parsers::cst::CstNode, start_offset: u32) -> (CstNode, u32) {
let text_len = u32::try_from(node.text().len()).unwrap_or(0);
let end_offset = start_offset + text_len;
let mut current_offset = start_offset;
let children: Vec<CstNode> = node
.children()
.iter()
.map(|child| {
let (child_node, next_offset) = convert_cst_node_with_offset(child, current_offset);
current_offset = next_offset;
child_node
})
.collect();
let wasm_node = CstNode {
kind: format!("{:?}", node.kind()),
range: [start_offset, end_offset],
children,
};
(wasm_node, end_offset)
}
fn count_nodes(node: &CstNode) -> u32 {
1 + node.children.iter().map(count_nodes).sum::<u32>()
}
fn tree_depth(node: &CstNode) -> u32 {
if node.children.is_empty() {
1
} else {
1 + node.children.iter().map(tree_depth).max().unwrap_or(0)
}
}
#[wasm_bindgen]
pub fn parse_vb6_code(
code: &str,
_file_type: &str, ) -> Result<JsValue, JsError> {
let mut source_stream = crate::SourceStream::new("test.bas", code);
let (token_stream_opt, _failures) = tokenize(&mut source_stream).unpack();
let token_stream = token_stream_opt.unwrap();
let tokens = produce_tokens(token_stream.clone());
let cst = parsers::cst::parse(token_stream.clone());
let cst_node = convert_cst_node(&cst.to_root_node());
let token_count = u32::try_from(tokens.len())?;
let parse_stats = ParseStats {
token_count,
node_count: count_nodes(&cst_node),
tree_depth: tree_depth(&cst_node),
};
let playground_output = PlaygroundOutput {
tokens: Some(tokens),
cst: Some(cst_node),
errors: vec![],
parse_time_ms: 0.0f64,
stats: parse_stats,
};
Ok(to_value(&playground_output).unwrap())
}
#[must_use]
pub fn produce_tokens(token_stream: TokenStream) -> Vec<TokenInfo> {
let mut tokens = vec![];
let mut column = 1;
let mut line = 1;
for (text, token) in token_stream.into_tokens() {
let kind = match token {
Token::Whitespace => WHITESPACE.to_string(),
Token::Identifier => IDENTIFIER.to_string(),
Token::DateTimeLiteral
| Token::DecimalLiteral
| Token::SingleLiteral
| Token::DoubleLiteral
| Token::StringLiteral
| Token::IntegerLiteral
| Token::LongLiteral => LITERAL.to_string(),
Token::EndOfLineComment | Token::RemComment => COMMENT.to_string(),
_ => {
if token.is_keyword() {
KEYWORD.to_string()
} else if token.is_operator() {
OPERATOR.to_string()
} else {
format!("{token:?}")
}
}
};
if token == Token::Newline {
line += 1;
column = 0;
}
let content = text.to_string();
let length = u32::try_from(content.chars().count()).unwrap_or(0);
let token = TokenInfo {
kind,
content,
line,
column,
length,
};
column += length;
tokens.push(token);
}
tokens
}
#[wasm_bindgen]
pub fn tokenize_vb6_code(code: &str) -> Result<JsValue, JsError> {
let mut source_stream = crate::SourceStream::new("test.bas", code);
let (token_stream_opt, _failures) = tokenize(&mut source_stream).unpack();
let token_stream = token_stream_opt.unwrap();
let tokens = produce_tokens(token_stream);
Ok(to_value(&tokens).unwrap())
}