1use crate::core::ExtractFromLiteral;
3use crate::grammar::{pt, Ctx, Tok};
4use crate::sdk::Error;
5
6#[derive(Debug, Clone)]
14pub struct TokenDef {
15 pub name: String,
17 pub is_extract: bool,
19}
20
21#[derive(Debug, Clone)]
28pub enum TokenRule {
29 IgnoreLiteral(String ),
31 IgnoreRegExp(String ),
33 Literal(String , String ),
35 RegExp(String , String ),
37}
38
39pub fn parse_token_def(pt: &pt::DefineTokenTypeStatement, ctx: &mut Ctx) -> Option<()> {
40 if pt.m_kw_extract {
41 ctx.tbs.set(
43 &pt.ast.m_token_type,
44 Tok::Decor {
45 tag: "unused".to_owned(),
46 base: Box::new(Tok::SToken),
47 },
48 )
49 }
50
51 if !ctx.val.add_token(TokenDef {
52 name: pt.m_token_type.clone(),
53 is_extract: pt.m_kw_extract,
54 }) {
55 let name = &pt.m_token_type;
56 let msg = format!("Duplicate token definition: {name}");
57 let help = "Remove or rename the duplicate definition".to_owned();
58 ctx.err
59 .push(Error::from_token(&pt.ast.m_token_type, msg, help));
60 }
61 None
62}
63
64pub fn parse_token_ignore_rule(
65 pt: &pt::DefineIgnoreTokenRuleStatement,
66 ctx: &mut Ctx,
67) -> Option<()> {
68 let token_rule = match pt.m_value.as_ref() {
69 pt::LiteralOrRegExp::TokenLiteral(literal) => {
70 TokenRule::IgnoreLiteral(literal.m_t.strip_quotes())
71 }
72 pt::LiteralOrRegExp::TokenRegExp(regexp) => {
73 TokenRule::IgnoreRegExp(regexp.m_t.strip_and_escape_regex())
74 }
75 };
76
77 ctx.val.add_token_rule(token_rule);
78 None
79}
80
81pub fn parse_token_rule(pt: &pt::DefineTokenRuleStatement, ctx: &mut Ctx) -> Option<()> {
82 let token_rule = match pt.m_value.as_ref() {
83 pt::LiteralOrRegExp::TokenLiteral(literal) => {
84 TokenRule::Literal(pt.m_token_type.clone(), literal.m_t.strip_quotes())
85 }
86 pt::LiteralOrRegExp::TokenRegExp(regexp) => {
87 TokenRule::RegExp(pt.m_token_type.clone(), regexp.m_t.strip_and_escape_regex())
88 }
89 };
90
91 ctx.val.add_token_rule(token_rule);
92 None
93}