1#![deny(missing_debug_implementations)]
11#![deny(missing_docs)]
12#![deny(rust_2018_idioms)]
13
14mod error;
15mod lexer;
16mod parser;
17
18use std::{ops, str::FromStr};
19
20pub use error::{Error, Result};
21
22pub fn rust_grammar() -> Grammar {
24 let src = include_str!("../rust.ungram");
25 src.parse().unwrap()
26}
27
28#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
33pub struct Node(usize);
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
40pub struct Token(usize);
41
42#[derive(Default, Debug)]
44pub struct Grammar {
45 nodes: Vec<NodeData>,
46 tokens: Vec<TokenData>,
47}
48
49impl FromStr for Grammar {
50 type Err = Error;
51 fn from_str(s: &str) -> Result<Self> {
52 let tokens = lexer::tokenize(s)?;
53 parser::parse(tokens)
54 }
55}
56
57impl Grammar {
58 pub fn iter(&self) -> impl Iterator<Item = Node> + '_ {
60 (0..self.nodes.len()).map(Node)
61 }
62
63 pub fn tokens(&self) -> impl Iterator<Item = Token> + '_ {
65 (0..self.tokens.len()).map(Token)
66 }
67}
68
69impl ops::Index<Node> for Grammar {
70 type Output = NodeData;
71 fn index(&self, Node(index): Node) -> &NodeData {
72 &self.nodes[index]
73 }
74}
75
76impl ops::Index<Token> for Grammar {
77 type Output = TokenData;
78 fn index(&self, Token(index): Token) -> &TokenData {
79 &self.tokens[index]
80 }
81}
82
83#[derive(Debug)]
85pub struct NodeData {
86 pub name: String,
90 pub rule: Rule,
94}
95
96#[derive(Debug)]
98pub struct TokenData {
99 pub name: String,
101}
102
103#[derive(Debug, Eq, PartialEq)]
105pub enum Rule {
106 Labeled {
108 label: String,
110 rule: Box<Rule>,
112 },
113 Node(Node),
115 Token(Token),
117 Seq(Vec<Rule>),
119 Alt(Vec<Rule>),
121 Opt(Box<Rule>),
123 Rep(Box<Rule>),
125}
126
127#[test]
128fn smoke() {
129 let grammar = include_str!("../ungrammar.ungram");
130 let grammar = grammar.parse::<Grammar>().unwrap();
131 drop(grammar)
132}
133
134#[test]
135fn test_rust_grammar() {
136 let _ = rust_grammar();
137}