Struct ItemSets

Source
pub struct ItemSets(/* private fields */);
Expand description

All item sets of a grammar.

Implementations§

Source§

impl ItemSets

Source

pub fn new(sets: Vec<ItemSet>) -> ItemSets

Create a new list of item sets.

Source

pub fn compute(grammar: &Grammar) -> ItemSets

Compute the item sets for a grammar.

Examples found in repository?
examples/tribble2.rs (line 34)
12fn main() {
13    // Build the grammar in David Tribble's example 1.
14    let mut g = Grammar::new();
15    let ntExpr = g.add_nonterminal("Expr");
16    let ntFactor = g.add_nonterminal("Factor");
17    let tnum = g.add_terminal("num");
18    let tlpar = g.add_terminal("'('");
19    let trpar = g.add_terminal("')'");
20    let tplus = g.add_terminal("'+'");
21    g.add_rule(Rule::new(ntExpr, vec![ntFactor.into()]));
22    g.add_rule(Rule::new(
23        ntExpr,
24        vec![tlpar.into(), ntExpr.into(), trpar.into()],
25    ));
26    g.add_rule(Rule::new(ntFactor, vec![tnum.into()]));
27    g.add_rule(Rule::new(ntFactor, vec![tplus.into(), ntFactor.into()]));
28    g.add_rule(Rule::new(
29        ntFactor,
30        vec![ntFactor.into(), tplus.into(), tnum.into()],
31    ));
32
33    // Compute the item sets for the grammar.
34    let is = ItemSets::compute(&g);
35    println!("{}", is.pretty(&g));
36
37    // Generate the parser code.
38    // let sm = StateMachine::try_from(&is).unwrap();
39    // let backend = Backend::new();
40    // generate_parser(
41    //     &mut File::create("tests/generated/tribble2_parser.rs").unwrap(),
42    //     &backend,
43    //     &sm,
44    //     &g,
45    // ).unwrap();
46}
More examples
Hide additional examples
examples/tribble1.rs (line 35)
12fn main() {
13    // Build the grammar in David Tribble's example 11.
14    let mut g = Grammar::new();
15    let (ntS, ntA, ntB) = (
16        g.add_nonterminal("S"),
17        g.add_nonterminal("A"),
18        g.add_nonterminal("B"),
19    );
20    let (ta, tb, tc, td, te) = (
21        g.add_terminal("a"),
22        g.add_terminal("b"),
23        g.add_terminal("c"),
24        g.add_terminal("d"),
25        g.add_terminal("e"),
26    );
27    g.add_rule(Rule::new(ntS, vec![ta.into(), ntA.into(), td.into()]));
28    g.add_rule(Rule::new(ntS, vec![ta.into(), ntB.into(), te.into()]));
29    g.add_rule(Rule::new(ntS, vec![tb.into(), ntA.into(), te.into()]));
30    g.add_rule(Rule::new(ntS, vec![tb.into(), ntB.into(), td.into()]));
31    g.add_rule(Rule::new(ntA, vec![tc.into()]));
32    g.add_rule(Rule::new(ntB, vec![tc.into()]));
33
34    // Compute the first sets for the grammar.
35    let is = ItemSets::compute(&g);
36    println!("{}", is.pretty(&g));
37
38    // Generate the parser code.
39    let sm = StateMachine::try_from(&is).unwrap();
40    let mut backend = Backend::new();
41    backend.add_nonterminal(ntS, "NodeS");
42    backend.add_nonterminal(ntA, "NodeA");
43    backend.add_nonterminal(ntB, "NodeB");
44    backend.add_terminal(grammar::END, "Token::Eof");
45    backend.add_terminal(ta, "Token::A");
46    backend.add_terminal(tb, "Token::B");
47    backend.add_terminal(tc, "Token::C");
48    backend.add_terminal(td, "Token::D");
49    backend.add_terminal(te, "Token::E");
50    generate_parser(
51        &mut File::create("tests/generated/tribble1_parser.rs").unwrap(),
52        &backend,
53        &sm,
54        &g,
55    ).unwrap();
56}
examples/glr-analysis.rs (line 44)
9fn main() {
10    // Build the following grammar which has a shift/reduce conflict in the
11    // first item set, which can be resolved with an additional lookahead token.
12    //
13    //   S : A B z ;
14    //
15    //   B : B y C | C ;
16    //   C : x ;
17    //
18    //   A : D | E ;
19    //   D : x ;
20    //   E : epsilon ;
21    //
22    let mut g = Grammar::new();
23
24    let nt_s = g.add_nonterminal("S");
25    let nt_a = g.add_nonterminal("A");
26    let nt_b = g.add_nonterminal("B");
27    let nt_c = g.add_nonterminal("C");
28    let nt_d = g.add_nonterminal("D");
29    let nt_e = g.add_nonterminal("E");
30    let t_x = g.add_terminal("x");
31    let t_y = g.add_terminal("y");
32    let t_z = g.add_terminal("z");
33
34    g.add_rule(Rule::new(nt_s, vec![nt_a.into(), nt_b.into(), t_z.into()]));
35    g.add_rule(Rule::new(nt_b, vec![nt_b.into(), t_y.into(), nt_c.into()]));
36    g.add_rule(Rule::new(nt_b, vec![nt_c.into()]));
37    g.add_rule(Rule::new(nt_c, vec![t_x.into()]));
38    g.add_rule(Rule::new(nt_a, vec![nt_d.into()]));
39    g.add_rule(Rule::new(nt_a, vec![nt_e.into()]));
40    g.add_rule(Rule::new(nt_d, vec![t_x.into()]));
41    g.add_rule(Rule::new(nt_e, vec![]));
42
43    // Compute the item sets for the grammar.
44    let is = ItemSets::compute(&g);
45    println!("{}", is.pretty(&g));
46
47    // Perform the GLR analysis.
48    let ga = GlrAnalysis::compute(&g, &is);
49    println!("{:#?}", ga);
50
51    // Generate the parser code.
52    // let sm = StateMachine::try_from(&is).unwrap();
53    // let backend = Backend::new();
54    // generate_parser(
55    //     &mut File::create("tests/generated/tribble2_parser.rs").unwrap(),
56    //     &backend,
57    //     &sm,
58    //     &g,
59    // ).unwrap();
60}
examples/update-grammar.rs (line 115)
12fn main() {
13    // Build the grammar for grammars (how meta!).
14    let mut g = Grammar::new();
15
16    let nt_desc = g.add_nonterminal("desc");
17    let nt_item = g.add_nonterminal("item");
18    let nt_token_decl = g.add_nonterminal("token_decl");
19    let nt_token_name = g.add_nonterminal("token_name");
20    let nt_rule_decl = g.add_nonterminal("rule_decl");
21    let nt_rule_list = g.add_nonterminal("rule_list");
22    let nt_variant = g.add_nonterminal("variant");
23    let nt_sequence_or_epsilon = g.add_nonterminal("sequence_or_epsilon");
24    let nt_sequence = g.add_nonterminal("sequence");
25
26    let t_ident = g.add_terminal("IDENT");
27    let t_code = g.add_terminal("CODE");
28    let t_kw_token = g.add_terminal("'token'");
29    let t_kw_epsilon = g.add_terminal("'epsilon'");
30    let t_kw_end = g.add_terminal("'end'");
31    let t_lparen = g.add_terminal("'('");
32    let t_rparen = g.add_terminal("')'");
33    let t_lbrace = g.add_terminal("'{'");
34    let t_rbrace = g.add_terminal("'}'");
35    let t_period = g.add_terminal("'.'");
36    let t_colon = g.add_terminal("':'");
37    let t_comma = g.add_terminal("','");
38    let t_semicolon = g.add_terminal("';'");
39    let t_pipe = g.add_terminal("'|'");
40
41    // desc : desc item | item | desc ';' | ';' ;
42    let r_desc_a = g.add_rule(Rule::new(nt_desc, vec![nt_desc.into(), nt_item.into()]));
43    let r_desc_b = g.add_rule(Rule::new(nt_desc, vec![nt_item.into()]));
44    let r_desc_c = g.add_rule(Rule::new(nt_desc, vec![nt_desc.into(), t_semicolon.into()]));
45    let r_desc_d = g.add_rule(Rule::new(nt_desc, vec![t_semicolon.into()]));
46
47    // item : token_decl | rule_decl ;
48    let r_item_a = g.add_rule(Rule::new(nt_item, vec![nt_token_decl.into()]));
49    let r_item_b = g.add_rule(Rule::new(nt_item, vec![nt_rule_decl.into()]));
50
51    // token_decl : 'token' token_name '(' CODE ')' ';' ;
52    let r_token_decl = g.add_rule(Rule::new(
53        nt_token_decl,
54        vec![
55            t_kw_token.into(),
56            nt_token_name.into(),
57            t_lparen.into(),
58            t_code.into(),
59            t_rparen.into(),
60            t_semicolon.into(),
61        ],
62    ));
63
64    // token_name : IDENT | 'end' ;
65    let r_token_name_a = g.add_rule(Rule::new(nt_token_name, vec![t_ident.into()]));
66    let r_token_name_b = g.add_rule(Rule::new(nt_token_name, vec![t_kw_end.into()]));
67
68    // rule_decl : IDENT '(' CODE ')' '{' rule_list '}' ;
69    let r_rule_decl = g.add_rule(Rule::new(
70        nt_rule_decl,
71        vec![
72            t_ident.into(),
73            t_lparen.into(),
74            t_code.into(),
75            t_rparen.into(),
76            t_lbrace.into(),
77            nt_rule_list.into(),
78            t_rbrace.into(),
79        ],
80    ));
81
82    // rule_list : rule_list variant | variant;
83    let r_rule_list_a = g.add_rule(Rule::new(
84        nt_rule_list,
85        vec![nt_rule_list.into(), nt_variant.into()],
86    ));
87    let r_rule_list_b = g.add_rule(Rule::new(nt_rule_list, vec![nt_variant.into()]));
88
89    // variant : sequence_or_epsilon '(' CODE ')' ';'
90    let r_variant = g.add_rule(Rule::new(
91        nt_variant,
92        vec![
93            nt_sequence_or_epsilon.into(),
94            t_lparen.into(),
95            t_code.into(),
96            t_rparen.into(),
97            t_semicolon.into(),
98        ],
99    ));
100
101    // sequence_or_epsilon : sequence | 'epsilon' ;
102    let r_sequence_or_epsilon_a =
103        g.add_rule(Rule::new(nt_sequence_or_epsilon, vec![nt_sequence.into()]));
104    let r_sequence_or_epsilon_b =
105        g.add_rule(Rule::new(nt_sequence_or_epsilon, vec![t_kw_epsilon.into()]));
106
107    // sequence : sequence IDENT | IDENT ;
108    let r_sequence_a = g.add_rule(Rule::new(
109        nt_sequence,
110        vec![nt_sequence.into(), t_ident.into()],
111    ));
112    let r_sequence_b = g.add_rule(Rule::new(nt_sequence, vec![t_ident.into()]));
113
114    // Compute the item sets for the grammar.
115    let is = ItemSets::compute(&g);
116    eprintln!("Perplex Grammar Item Sets:");
117    eprintln!("{}", is.pretty(&g));
118
119    // Configure the code generation backend.
120    let mut backend = Backend::new();
121
122    backend.add_nonterminal(nt_desc, "ast::Desc");
123    backend.add_nonterminal(nt_item, "ast::Item");
124    backend.add_nonterminal(nt_token_decl, "ast::TokenDecl");
125    backend.add_nonterminal(nt_token_name, "ast::TokenName");
126    backend.add_nonterminal(nt_rule_decl, "ast::RuleDecl");
127    backend.add_nonterminal(nt_rule_list, "Vec<ast::Variant>");
128    backend.add_nonterminal(nt_variant, "ast::Variant");
129    backend.add_nonterminal(nt_sequence_or_epsilon, "Vec<String>");
130    backend.add_nonterminal(nt_sequence, "Vec<String>");
131
132    backend.add_terminal(grammar::END, "None");
133    backend.add_terminal(t_ident, "Some(Token::Ident(_))");
134    backend.add_terminal(t_code, "Some(Token::Code(_))");
135    backend.add_terminal(t_kw_token, "Some(Token::Keyword(Keyword::Token))");
136    backend.add_terminal(t_kw_epsilon, "Some(Token::Keyword(Keyword::Epsilon))");
137    backend.add_terminal(t_kw_end, "Some(Token::Keyword(Keyword::End))");
138    backend.add_terminal(t_lparen, "Some(Token::LParen)");
139    backend.add_terminal(t_rparen, "Some(Token::RParen)");
140    backend.add_terminal(t_lbrace, "Some(Token::LBrace)");
141    backend.add_terminal(t_rbrace, "Some(Token::RBrace)");
142    backend.add_terminal(t_period, "Some(Token::Period)");
143    backend.add_terminal(t_colon, "Some(Token::Colon)");
144    backend.add_terminal(t_comma, "Some(Token::Comma)");
145    backend.add_terminal(t_semicolon, "Some(Token::Semicolon)");
146    backend.add_terminal(t_pipe, "Some(Token::Pipe)");
147
148    backend.add_reduction_function(r_desc_a, "reduce_desc_a");
149    backend.add_reduction_function(r_desc_b, "reduce_desc_b");
150    backend.add_reduction_function(r_desc_c, "reduce_desc_c");
151    backend.add_reduction_function(r_desc_d, "reduce_desc_d");
152    backend.add_reduction_function(r_item_a, "reduce_item_a");
153    backend.add_reduction_function(r_item_b, "reduce_item_b");
154    backend.add_reduction_function(r_token_decl, "reduce_token_decl");
155    backend.add_reduction_function(r_token_name_a, "reduce_token_name_a");
156    backend.add_reduction_function(r_token_name_b, "reduce_token_name_b");
157    backend.add_reduction_function(r_rule_decl, "reduce_rule_decl");
158    backend.add_reduction_function(r_rule_list_a, "reduce_rule_list_a");
159    backend.add_reduction_function(r_rule_list_b, "reduce_rule_list_b");
160    backend.add_reduction_function(r_variant, "reduce_variant");
161    backend.add_reduction_function(r_sequence_or_epsilon_a, "reduce_sequence_or_epsilon_a");
162    backend.add_reduction_function(r_sequence_or_epsilon_b, "reduce_sequence_or_epsilon_b");
163    backend.add_reduction_function(r_sequence_a, "reduce_sequence_a");
164    backend.add_reduction_function(r_sequence_b, "reduce_sequence_b");
165
166    // // Generate the parser code.
167    // let sm = StateMachine::try_from(&is).expect("failed to generate state machine");
168    // let stdout = std::io::stdout();
169    // generate_parser(&mut stdout.lock(), &backend, &sm, &g).expect("failed to generate parser code");
170
171    // Generate the parser code.
172    let mut path = PathBuf::from(file!());
173    path.pop();
174    path.pop();
175    path.push("src");
176    path.push("parser_states.rs");
177    eprintln!("Generating parser code in {:?}", path);
178    let sm = StateMachine::try_from(&is).expect("failed to generate state machine");
179    generate_parser(&mut File::create(path).unwrap(), &backend, &sm, &g)
180        .expect("failed to generate parser code");
181}
Source

pub fn all(&self) -> &[ItemSet]

Get the item sets in the grammar.

Source

pub fn pretty<'a>( &'a self, grammar: &'a Grammar, ) -> Pretty<&'a Grammar, &'a Self>

Get a pretty printer for this item set.

Examples found in repository?
examples/tribble2.rs (line 35)
12fn main() {
13    // Build the grammar in David Tribble's example 1.
14    let mut g = Grammar::new();
15    let ntExpr = g.add_nonterminal("Expr");
16    let ntFactor = g.add_nonterminal("Factor");
17    let tnum = g.add_terminal("num");
18    let tlpar = g.add_terminal("'('");
19    let trpar = g.add_terminal("')'");
20    let tplus = g.add_terminal("'+'");
21    g.add_rule(Rule::new(ntExpr, vec![ntFactor.into()]));
22    g.add_rule(Rule::new(
23        ntExpr,
24        vec![tlpar.into(), ntExpr.into(), trpar.into()],
25    ));
26    g.add_rule(Rule::new(ntFactor, vec![tnum.into()]));
27    g.add_rule(Rule::new(ntFactor, vec![tplus.into(), ntFactor.into()]));
28    g.add_rule(Rule::new(
29        ntFactor,
30        vec![ntFactor.into(), tplus.into(), tnum.into()],
31    ));
32
33    // Compute the item sets for the grammar.
34    let is = ItemSets::compute(&g);
35    println!("{}", is.pretty(&g));
36
37    // Generate the parser code.
38    // let sm = StateMachine::try_from(&is).unwrap();
39    // let backend = Backend::new();
40    // generate_parser(
41    //     &mut File::create("tests/generated/tribble2_parser.rs").unwrap(),
42    //     &backend,
43    //     &sm,
44    //     &g,
45    // ).unwrap();
46}
More examples
Hide additional examples
examples/tribble1.rs (line 36)
12fn main() {
13    // Build the grammar in David Tribble's example 11.
14    let mut g = Grammar::new();
15    let (ntS, ntA, ntB) = (
16        g.add_nonterminal("S"),
17        g.add_nonterminal("A"),
18        g.add_nonterminal("B"),
19    );
20    let (ta, tb, tc, td, te) = (
21        g.add_terminal("a"),
22        g.add_terminal("b"),
23        g.add_terminal("c"),
24        g.add_terminal("d"),
25        g.add_terminal("e"),
26    );
27    g.add_rule(Rule::new(ntS, vec![ta.into(), ntA.into(), td.into()]));
28    g.add_rule(Rule::new(ntS, vec![ta.into(), ntB.into(), te.into()]));
29    g.add_rule(Rule::new(ntS, vec![tb.into(), ntA.into(), te.into()]));
30    g.add_rule(Rule::new(ntS, vec![tb.into(), ntB.into(), td.into()]));
31    g.add_rule(Rule::new(ntA, vec![tc.into()]));
32    g.add_rule(Rule::new(ntB, vec![tc.into()]));
33
34    // Compute the first sets for the grammar.
35    let is = ItemSets::compute(&g);
36    println!("{}", is.pretty(&g));
37
38    // Generate the parser code.
39    let sm = StateMachine::try_from(&is).unwrap();
40    let mut backend = Backend::new();
41    backend.add_nonterminal(ntS, "NodeS");
42    backend.add_nonterminal(ntA, "NodeA");
43    backend.add_nonterminal(ntB, "NodeB");
44    backend.add_terminal(grammar::END, "Token::Eof");
45    backend.add_terminal(ta, "Token::A");
46    backend.add_terminal(tb, "Token::B");
47    backend.add_terminal(tc, "Token::C");
48    backend.add_terminal(td, "Token::D");
49    backend.add_terminal(te, "Token::E");
50    generate_parser(
51        &mut File::create("tests/generated/tribble1_parser.rs").unwrap(),
52        &backend,
53        &sm,
54        &g,
55    ).unwrap();
56}
examples/glr-analysis.rs (line 45)
9fn main() {
10    // Build the following grammar which has a shift/reduce conflict in the
11    // first item set, which can be resolved with an additional lookahead token.
12    //
13    //   S : A B z ;
14    //
15    //   B : B y C | C ;
16    //   C : x ;
17    //
18    //   A : D | E ;
19    //   D : x ;
20    //   E : epsilon ;
21    //
22    let mut g = Grammar::new();
23
24    let nt_s = g.add_nonterminal("S");
25    let nt_a = g.add_nonterminal("A");
26    let nt_b = g.add_nonterminal("B");
27    let nt_c = g.add_nonterminal("C");
28    let nt_d = g.add_nonterminal("D");
29    let nt_e = g.add_nonterminal("E");
30    let t_x = g.add_terminal("x");
31    let t_y = g.add_terminal("y");
32    let t_z = g.add_terminal("z");
33
34    g.add_rule(Rule::new(nt_s, vec![nt_a.into(), nt_b.into(), t_z.into()]));
35    g.add_rule(Rule::new(nt_b, vec![nt_b.into(), t_y.into(), nt_c.into()]));
36    g.add_rule(Rule::new(nt_b, vec![nt_c.into()]));
37    g.add_rule(Rule::new(nt_c, vec![t_x.into()]));
38    g.add_rule(Rule::new(nt_a, vec![nt_d.into()]));
39    g.add_rule(Rule::new(nt_a, vec![nt_e.into()]));
40    g.add_rule(Rule::new(nt_d, vec![t_x.into()]));
41    g.add_rule(Rule::new(nt_e, vec![]));
42
43    // Compute the item sets for the grammar.
44    let is = ItemSets::compute(&g);
45    println!("{}", is.pretty(&g));
46
47    // Perform the GLR analysis.
48    let ga = GlrAnalysis::compute(&g, &is);
49    println!("{:#?}", ga);
50
51    // Generate the parser code.
52    // let sm = StateMachine::try_from(&is).unwrap();
53    // let backend = Backend::new();
54    // generate_parser(
55    //     &mut File::create("tests/generated/tribble2_parser.rs").unwrap(),
56    //     &backend,
57    //     &sm,
58    //     &g,
59    // ).unwrap();
60}
examples/update-grammar.rs (line 117)
12fn main() {
13    // Build the grammar for grammars (how meta!).
14    let mut g = Grammar::new();
15
16    let nt_desc = g.add_nonterminal("desc");
17    let nt_item = g.add_nonterminal("item");
18    let nt_token_decl = g.add_nonterminal("token_decl");
19    let nt_token_name = g.add_nonterminal("token_name");
20    let nt_rule_decl = g.add_nonterminal("rule_decl");
21    let nt_rule_list = g.add_nonterminal("rule_list");
22    let nt_variant = g.add_nonterminal("variant");
23    let nt_sequence_or_epsilon = g.add_nonterminal("sequence_or_epsilon");
24    let nt_sequence = g.add_nonterminal("sequence");
25
26    let t_ident = g.add_terminal("IDENT");
27    let t_code = g.add_terminal("CODE");
28    let t_kw_token = g.add_terminal("'token'");
29    let t_kw_epsilon = g.add_terminal("'epsilon'");
30    let t_kw_end = g.add_terminal("'end'");
31    let t_lparen = g.add_terminal("'('");
32    let t_rparen = g.add_terminal("')'");
33    let t_lbrace = g.add_terminal("'{'");
34    let t_rbrace = g.add_terminal("'}'");
35    let t_period = g.add_terminal("'.'");
36    let t_colon = g.add_terminal("':'");
37    let t_comma = g.add_terminal("','");
38    let t_semicolon = g.add_terminal("';'");
39    let t_pipe = g.add_terminal("'|'");
40
41    // desc : desc item | item | desc ';' | ';' ;
42    let r_desc_a = g.add_rule(Rule::new(nt_desc, vec![nt_desc.into(), nt_item.into()]));
43    let r_desc_b = g.add_rule(Rule::new(nt_desc, vec![nt_item.into()]));
44    let r_desc_c = g.add_rule(Rule::new(nt_desc, vec![nt_desc.into(), t_semicolon.into()]));
45    let r_desc_d = g.add_rule(Rule::new(nt_desc, vec![t_semicolon.into()]));
46
47    // item : token_decl | rule_decl ;
48    let r_item_a = g.add_rule(Rule::new(nt_item, vec![nt_token_decl.into()]));
49    let r_item_b = g.add_rule(Rule::new(nt_item, vec![nt_rule_decl.into()]));
50
51    // token_decl : 'token' token_name '(' CODE ')' ';' ;
52    let r_token_decl = g.add_rule(Rule::new(
53        nt_token_decl,
54        vec![
55            t_kw_token.into(),
56            nt_token_name.into(),
57            t_lparen.into(),
58            t_code.into(),
59            t_rparen.into(),
60            t_semicolon.into(),
61        ],
62    ));
63
64    // token_name : IDENT | 'end' ;
65    let r_token_name_a = g.add_rule(Rule::new(nt_token_name, vec![t_ident.into()]));
66    let r_token_name_b = g.add_rule(Rule::new(nt_token_name, vec![t_kw_end.into()]));
67
68    // rule_decl : IDENT '(' CODE ')' '{' rule_list '}' ;
69    let r_rule_decl = g.add_rule(Rule::new(
70        nt_rule_decl,
71        vec![
72            t_ident.into(),
73            t_lparen.into(),
74            t_code.into(),
75            t_rparen.into(),
76            t_lbrace.into(),
77            nt_rule_list.into(),
78            t_rbrace.into(),
79        ],
80    ));
81
82    // rule_list : rule_list variant | variant;
83    let r_rule_list_a = g.add_rule(Rule::new(
84        nt_rule_list,
85        vec![nt_rule_list.into(), nt_variant.into()],
86    ));
87    let r_rule_list_b = g.add_rule(Rule::new(nt_rule_list, vec![nt_variant.into()]));
88
89    // variant : sequence_or_epsilon '(' CODE ')' ';'
90    let r_variant = g.add_rule(Rule::new(
91        nt_variant,
92        vec![
93            nt_sequence_or_epsilon.into(),
94            t_lparen.into(),
95            t_code.into(),
96            t_rparen.into(),
97            t_semicolon.into(),
98        ],
99    ));
100
101    // sequence_or_epsilon : sequence | 'epsilon' ;
102    let r_sequence_or_epsilon_a =
103        g.add_rule(Rule::new(nt_sequence_or_epsilon, vec![nt_sequence.into()]));
104    let r_sequence_or_epsilon_b =
105        g.add_rule(Rule::new(nt_sequence_or_epsilon, vec![t_kw_epsilon.into()]));
106
107    // sequence : sequence IDENT | IDENT ;
108    let r_sequence_a = g.add_rule(Rule::new(
109        nt_sequence,
110        vec![nt_sequence.into(), t_ident.into()],
111    ));
112    let r_sequence_b = g.add_rule(Rule::new(nt_sequence, vec![t_ident.into()]));
113
114    // Compute the item sets for the grammar.
115    let is = ItemSets::compute(&g);
116    eprintln!("Perplex Grammar Item Sets:");
117    eprintln!("{}", is.pretty(&g));
118
119    // Configure the code generation backend.
120    let mut backend = Backend::new();
121
122    backend.add_nonterminal(nt_desc, "ast::Desc");
123    backend.add_nonterminal(nt_item, "ast::Item");
124    backend.add_nonterminal(nt_token_decl, "ast::TokenDecl");
125    backend.add_nonterminal(nt_token_name, "ast::TokenName");
126    backend.add_nonterminal(nt_rule_decl, "ast::RuleDecl");
127    backend.add_nonterminal(nt_rule_list, "Vec<ast::Variant>");
128    backend.add_nonterminal(nt_variant, "ast::Variant");
129    backend.add_nonterminal(nt_sequence_or_epsilon, "Vec<String>");
130    backend.add_nonterminal(nt_sequence, "Vec<String>");
131
132    backend.add_terminal(grammar::END, "None");
133    backend.add_terminal(t_ident, "Some(Token::Ident(_))");
134    backend.add_terminal(t_code, "Some(Token::Code(_))");
135    backend.add_terminal(t_kw_token, "Some(Token::Keyword(Keyword::Token))");
136    backend.add_terminal(t_kw_epsilon, "Some(Token::Keyword(Keyword::Epsilon))");
137    backend.add_terminal(t_kw_end, "Some(Token::Keyword(Keyword::End))");
138    backend.add_terminal(t_lparen, "Some(Token::LParen)");
139    backend.add_terminal(t_rparen, "Some(Token::RParen)");
140    backend.add_terminal(t_lbrace, "Some(Token::LBrace)");
141    backend.add_terminal(t_rbrace, "Some(Token::RBrace)");
142    backend.add_terminal(t_period, "Some(Token::Period)");
143    backend.add_terminal(t_colon, "Some(Token::Colon)");
144    backend.add_terminal(t_comma, "Some(Token::Comma)");
145    backend.add_terminal(t_semicolon, "Some(Token::Semicolon)");
146    backend.add_terminal(t_pipe, "Some(Token::Pipe)");
147
148    backend.add_reduction_function(r_desc_a, "reduce_desc_a");
149    backend.add_reduction_function(r_desc_b, "reduce_desc_b");
150    backend.add_reduction_function(r_desc_c, "reduce_desc_c");
151    backend.add_reduction_function(r_desc_d, "reduce_desc_d");
152    backend.add_reduction_function(r_item_a, "reduce_item_a");
153    backend.add_reduction_function(r_item_b, "reduce_item_b");
154    backend.add_reduction_function(r_token_decl, "reduce_token_decl");
155    backend.add_reduction_function(r_token_name_a, "reduce_token_name_a");
156    backend.add_reduction_function(r_token_name_b, "reduce_token_name_b");
157    backend.add_reduction_function(r_rule_decl, "reduce_rule_decl");
158    backend.add_reduction_function(r_rule_list_a, "reduce_rule_list_a");
159    backend.add_reduction_function(r_rule_list_b, "reduce_rule_list_b");
160    backend.add_reduction_function(r_variant, "reduce_variant");
161    backend.add_reduction_function(r_sequence_or_epsilon_a, "reduce_sequence_or_epsilon_a");
162    backend.add_reduction_function(r_sequence_or_epsilon_b, "reduce_sequence_or_epsilon_b");
163    backend.add_reduction_function(r_sequence_a, "reduce_sequence_a");
164    backend.add_reduction_function(r_sequence_b, "reduce_sequence_b");
165
166    // // Generate the parser code.
167    // let sm = StateMachine::try_from(&is).expect("failed to generate state machine");
168    // let stdout = std::io::stdout();
169    // generate_parser(&mut stdout.lock(), &backend, &sm, &g).expect("failed to generate parser code");
170
171    // Generate the parser code.
172    let mut path = PathBuf::from(file!());
173    path.pop();
174    path.pop();
175    path.push("src");
176    path.push("parser_states.rs");
177    eprintln!("Generating parser code in {:?}", path);
178    let sm = StateMachine::try_from(&is).expect("failed to generate state machine");
179    generate_parser(&mut File::create(path).unwrap(), &backend, &sm, &g)
180        .expect("failed to generate parser code");
181}
Source

pub fn compress(&mut self)

Compress all item sets.

Trait Implementations§

Source§

impl Clone for ItemSets

Source§

fn clone(&self) -> ItemSets

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for ItemSets

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl Hash for ItemSets

Source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · Source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where H: Hasher, Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
Source§

impl Index<ItemSetId> for ItemSets

Source§

type Output = ItemSet

The returned type after indexing.
Source§

fn index(&self, index: ItemSetId) -> &ItemSet

Performs the indexing (container[index]) operation. Read more
Source§

impl IndexMut<ItemSetId> for ItemSets

Source§

fn index_mut(&mut self, index: ItemSetId) -> &mut ItemSet

Performs the mutable indexing (container[index]) operation. Read more
Source§

impl PartialEq for ItemSets

Source§

fn eq(&self, other: &ItemSets) -> bool

Tests for self and other values to be equal, and is used by ==.
1.0.0 · Source§

fn ne(&self, other: &Rhs) -> bool

Tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
Source§

impl Eq for ItemSets

Source§

impl StructuralPartialEq for ItemSets

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<Q, K> Equivalent<K> for Q
where Q: Eq + ?Sized, K: Borrow<Q> + ?Sized,

Source§

fn equivalent(&self, key: &K) -> bool

Compare self to key and return true if they are equal.
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.