use crate::core::ExtractFromLiteral;
use crate::grammar::{pt, Ctx, Tok};
use crate::sdk::Error;
#[derive(Debug, Clone)]
pub struct TokenDef {
pub name: String,
pub is_extract: bool,
}
#[derive(Debug, Clone)]
pub enum TokenRule {
IgnoreLiteral(String ),
IgnoreRegExp(String ),
Literal(String , String ),
RegExp(String , String ),
}
pub fn parse_token_def(pt: &pt::DefineTokenTypeStatement, ctx: &mut Ctx) -> Option<()> {
if pt.m_kw_extract {
ctx.tbs.set(
&pt.ast.m_token_type,
Tok::Decor {
tag: "unused".to_owned(),
base: Box::new(Tok::SToken),
},
)
}
if !ctx.val.add_token(TokenDef {
name: pt.m_token_type.clone(),
is_extract: pt.m_kw_extract,
}) {
let name = &pt.m_token_type;
let msg = format!("Duplicate token definition: {name}");
let help = "Remove or rename the duplicate definition".to_owned();
ctx
.err
.push(Error::from_token(&pt.ast.m_token_type, msg, help));
}
None
}
pub fn parse_token_ignore_rule(
pt: &pt::DefineIgnoreTokenRuleStatement,
ctx: &mut Ctx,
) -> Option<()> {
let token_rule = match pt.m_value.as_ref() {
pt::LiteralOrRegExp::TokenLiteral(literal) => {
TokenRule::IgnoreLiteral(literal.m_t.strip_quotes())
}
pt::LiteralOrRegExp::TokenRegExp(regexp) => {
TokenRule::IgnoreRegExp(regexp.m_t.strip_and_escape_regex())
}
};
ctx.val.add_token_rule(token_rule);
None
}
pub fn parse_token_rule(pt: &pt::DefineTokenRuleStatement, ctx: &mut Ctx) -> Option<()> {
let token_rule = match pt.m_value.as_ref() {
pt::LiteralOrRegExp::TokenLiteral(literal) => {
TokenRule::Literal(pt.m_token_type.clone(), literal.m_t.strip_quotes())
}
pt::LiteralOrRegExp::TokenRegExp(regexp) => {
TokenRule::RegExp(pt.m_token_type.clone(), regexp.m_t.strip_and_escape_regex())
}
};
ctx.val.add_token_rule(token_rule);
None
}