1pub use crate::ast_impl::{Decl, Located, SurfaceExpr};
6pub use crate::error_impl::{ParseError, ParseErrorKind};
7pub use crate::lexer::Lexer;
8pub use crate::parser_impl::Parser;
9pub use crate::tokens::{Span, Token, TokenKind};
10
11use super::functions::*;
12
13#[allow(dead_code)]
15#[allow(missing_docs)]
16#[derive(Debug, Clone)]
17pub struct ParseErrorSimple {
18 pub pos: usize,
19 pub message: String,
20 pub recovered: bool,
21}
22impl ParseErrorSimple {
23 #[allow(dead_code)]
24 #[allow(missing_docs)]
25 pub fn new(pos: usize, msg: impl Into<String>) -> Self {
26 Self {
27 pos,
28 message: msg.into(),
29 recovered: false,
30 }
31 }
32 #[allow(dead_code)]
33 #[allow(missing_docs)]
34 pub fn recovered(mut self) -> Self {
35 self.recovered = true;
36 self
37 }
38}
39#[allow(dead_code)]
41#[allow(missing_docs)]
42pub struct CheckpointStack {
43 pub(super) stack: Vec<ParseCheckpoint>,
44}
45impl CheckpointStack {
46 #[allow(dead_code)]
47 #[allow(missing_docs)]
48 pub fn new() -> Self {
49 Self { stack: Vec::new() }
50 }
51 #[allow(dead_code)]
52 #[allow(missing_docs)]
53 pub fn push(&mut self, cp: ParseCheckpoint) {
54 self.stack.push(cp);
55 }
56 #[allow(dead_code)]
57 #[allow(missing_docs)]
58 pub fn pop(&mut self) -> Option<ParseCheckpoint> {
59 self.stack.pop()
60 }
61 #[allow(dead_code)]
62 #[allow(missing_docs)]
63 pub fn peek(&self) -> Option<&ParseCheckpoint> {
64 self.stack.last()
65 }
66 #[allow(dead_code)]
67 #[allow(missing_docs)]
68 pub fn depth(&self) -> usize {
69 self.stack.len()
70 }
71 #[allow(dead_code)]
72 #[allow(missing_docs)]
73 pub fn is_empty(&self) -> bool {
74 self.stack.is_empty()
75 }
76}
77#[allow(dead_code)]
79#[allow(missing_docs)]
80#[derive(Debug, Clone)]
81pub struct ParseAmbiguity {
82 pub position: usize,
83 pub alternatives: Vec<String>,
84 pub resolved_to: Option<String>,
85}
86impl ParseAmbiguity {
87 #[allow(dead_code)]
88 #[allow(missing_docs)]
89 pub fn new(pos: usize, alternatives: Vec<String>) -> Self {
90 Self {
91 position: pos,
92 alternatives,
93 resolved_to: None,
94 }
95 }
96 #[allow(dead_code)]
97 #[allow(missing_docs)]
98 pub fn resolve(&mut self, choice: impl Into<String>) {
99 self.resolved_to = Some(choice.into());
100 }
101 #[allow(dead_code)]
102 #[allow(missing_docs)]
103 pub fn is_resolved(&self) -> bool {
104 self.resolved_to.is_some()
105 }
106}
107#[allow(dead_code)]
109#[allow(missing_docs)]
110#[derive(Clone, Debug)]
111pub struct ParseCheckpoint {
112 pub position: usize,
113 pub depth: usize,
114 pub error_count: usize,
115}
116impl ParseCheckpoint {
117 #[allow(dead_code)]
118 #[allow(missing_docs)]
119 pub fn save(cursor: &TokenCursor, errors: usize) -> Self {
120 Self {
121 position: cursor.position,
122 depth: cursor.depth,
123 error_count: errors,
124 }
125 }
126 #[allow(dead_code)]
127 #[allow(missing_docs)]
128 pub fn restore(&self, cursor: &mut TokenCursor) {
129 cursor.position = self.position;
130 cursor.depth = self.depth;
131 }
132}
133#[derive(Clone, Debug, PartialEq, Eq, Hash)]
135#[allow(missing_docs)]
136pub struct ParseCacheKey {
137 pub hash: u64,
139 pub len: usize,
141}
142impl ParseCacheKey {
143 #[allow(missing_docs)]
145 pub fn from_src(src: &str) -> Self {
146 let hash = fnv1a(src.as_bytes());
147 Self {
148 hash,
149 len: src.len(),
150 }
151 }
152}
153#[allow(dead_code)]
155#[allow(missing_docs)]
156pub struct AmbiguityRegistry {
157 ambiguities: Vec<ParseAmbiguity>,
158}
159impl AmbiguityRegistry {
160 #[allow(dead_code)]
161 #[allow(missing_docs)]
162 pub fn new() -> Self {
163 Self {
164 ambiguities: Vec::new(),
165 }
166 }
167 #[allow(dead_code)]
168 #[allow(missing_docs)]
169 pub fn report(&mut self, amb: ParseAmbiguity) {
170 self.ambiguities.push(amb);
171 }
172 #[allow(dead_code)]
173 #[allow(missing_docs)]
174 pub fn count(&self) -> usize {
175 self.ambiguities.len()
176 }
177 #[allow(dead_code)]
178 #[allow(missing_docs)]
179 pub fn unresolved(&self) -> usize {
180 self.ambiguities.iter().filter(|a| !a.is_resolved()).count()
181 }
182}
183#[allow(dead_code)]
185#[allow(missing_docs)]
186#[derive(Clone, Debug)]
187pub struct ParseConfig {
188 pub max_depth: usize,
189 pub max_errors: usize,
190 pub recover_from_errors: bool,
191 pub strict_mode: bool,
192 #[allow(missing_docs)]
193 pub track_whitespace: bool,
194 pub allow_holes: bool,
195}
196impl ParseConfig {
197 #[allow(dead_code)]
198 #[allow(missing_docs)]
199 pub fn default_config() -> Self {
200 Self {
201 max_depth: 1000,
202 max_errors: 50,
203 recover_from_errors: true,
204 strict_mode: false,
205 track_whitespace: false,
206 allow_holes: true,
207 }
208 }
209 #[allow(dead_code)]
210 #[allow(missing_docs)]
211 pub fn strict() -> Self {
212 Self {
213 strict_mode: true,
214 recover_from_errors: false,
215 ..Self::default_config()
216 }
217 }
218 #[allow(dead_code)]
219 #[allow(missing_docs)]
220 pub fn lenient() -> Self {
221 Self {
222 strict_mode: false,
223 recover_from_errors: true,
224 max_errors: 200,
225 ..Self::default_config()
226 }
227 }
228}
229#[allow(dead_code)]
231#[allow(missing_docs)]
232#[derive(Default, Debug, Clone)]
233pub struct ParseStatsExt {
234 pub tokens_consumed: u64,
235 pub nodes_created: u64,
236 pub backtrack_count: u64,
237 pub error_count: u64,
238 #[allow(missing_docs)]
239 pub max_depth_reached: usize,
240 pub parse_time_us: u64,
241}
242impl ParseStatsExt {
243 #[allow(dead_code)]
244 #[allow(missing_docs)]
245 pub fn new() -> Self {
246 Self::default()
247 }
248 #[allow(dead_code)]
249 #[allow(missing_docs)]
250 pub fn efficiency(&self) -> f64 {
251 if self.tokens_consumed == 0 {
252 return 0.0;
253 }
254 let useful = self.tokens_consumed.saturating_sub(self.backtrack_count);
255 useful as f64 / self.tokens_consumed as f64
256 }
257 #[allow(dead_code)]
258 #[allow(missing_docs)]
259 pub fn error_rate(&self) -> f64 {
260 if self.nodes_created == 0 {
261 return 0.0;
262 }
263 self.error_count as f64 / self.nodes_created as f64
264 }
265 #[allow(dead_code)]
266 #[allow(missing_docs)]
267 pub fn summary(&self) -> String {
268 format!(
269 "tokens={} nodes={} backtracks={} errors={} depth={} time={}us efficiency={:.1}%",
270 self.tokens_consumed,
271 self.nodes_created,
272 self.backtrack_count,
273 self.error_count,
274 self.max_depth_reached,
275 self.parse_time_us,
276 self.efficiency() * 100.0,
277 )
278 }
279}
280#[allow(dead_code)]
281#[allow(missing_docs)]
282#[derive(Clone, Debug)]
283pub struct PackratEntry {
284 pub end_pos: usize,
285 pub success: bool,
286 pub result_repr: String,
287}
288#[allow(dead_code)]
290#[allow(missing_docs)]
291pub struct ParseFuel {
292 pub(super) remaining: usize,
293}
294impl ParseFuel {
295 #[allow(dead_code)]
296 #[allow(missing_docs)]
297 pub fn new(fuel: usize) -> Self {
298 Self { remaining: fuel }
299 }
300 #[allow(dead_code)]
301 #[allow(missing_docs)]
302 pub fn consume(&mut self, amount: usize) -> bool {
303 if self.remaining >= amount {
304 self.remaining -= amount;
305 true
306 } else {
307 false
308 }
309 }
310 #[allow(dead_code)]
311 #[allow(missing_docs)]
312 pub fn has_fuel(&self) -> bool {
313 self.remaining > 0
314 }
315 #[allow(dead_code)]
316 #[allow(missing_docs)]
317 pub fn remaining(&self) -> usize {
318 self.remaining
319 }
320 #[allow(dead_code)]
321 #[allow(missing_docs)]
322 pub fn refuel(&mut self, amount: usize) {
323 self.remaining = self.remaining.saturating_add(amount);
324 }
325}
326#[derive(Debug, Default)]
328#[allow(missing_docs)]
329pub struct ParseBatch {
330 pub entries: Vec<(String, String)>,
332}
333impl ParseBatch {
334 #[allow(missing_docs)]
336 pub fn new() -> Self {
337 Self::default()
338 }
339 #[allow(missing_docs)]
341 pub fn add(&mut self, name: &str, src: &str) {
342 self.entries.push((name.to_string(), src.to_string()));
343 }
344 #[allow(missing_docs)]
346 pub fn len(&self) -> usize {
347 self.entries.len()
348 }
349 #[allow(missing_docs)]
351 pub fn is_empty(&self) -> bool {
352 self.entries.is_empty()
353 }
354 #[allow(missing_docs)]
356 pub fn execute(self) -> ParseSession {
357 let mut session = ParseSession::new();
358 for (name, src) in self.entries {
359 session.parse_file(&name, &src);
360 }
361 session
362 }
363}
364#[allow(dead_code)]
366#[allow(missing_docs)]
367#[derive(Clone, Debug)]
368pub struct TokenCursor {
369 pub position: usize,
370 pub end: usize,
371 pub depth: usize,
372}
373impl TokenCursor {
374 #[allow(dead_code)]
375 #[allow(missing_docs)]
376 pub fn new(end: usize) -> Self {
377 Self {
378 position: 0,
379 end,
380 depth: 0,
381 }
382 }
383 #[allow(dead_code)]
384 #[allow(missing_docs)]
385 pub fn advance(&mut self) {
386 if self.position < self.end {
387 self.position += 1;
388 }
389 }
390 #[allow(dead_code)]
391 #[allow(missing_docs)]
392 pub fn retreat(&mut self) {
393 if self.position > 0 {
394 self.position -= 1;
395 }
396 }
397 #[allow(dead_code)]
398 #[allow(missing_docs)]
399 pub fn is_at_end(&self) -> bool {
400 self.position >= self.end
401 }
402 #[allow(dead_code)]
403 #[allow(missing_docs)]
404 pub fn remaining(&self) -> usize {
405 self.end.saturating_sub(self.position)
406 }
407 #[allow(dead_code)]
408 #[allow(missing_docs)]
409 pub fn enter_scope(&mut self) {
410 self.depth += 1;
411 }
412 #[allow(dead_code)]
413 #[allow(missing_docs)]
414 pub fn exit_scope(&mut self) {
415 if self.depth > 0 {
416 self.depth -= 1;
417 }
418 }
419 #[allow(dead_code)]
420 #[allow(missing_docs)]
421 pub fn is_at_root(&self) -> bool {
422 self.depth == 0
423 }
424}
425#[allow(dead_code)]
427#[allow(missing_docs)]
428pub enum CombResult<T> {
429 Ok(T, usize),
430 Err(String, usize),
431}
432impl<T> CombResult<T> {
433 #[allow(dead_code)]
434 #[allow(missing_docs)]
435 pub fn is_ok(&self) -> bool {
436 matches!(self, CombResult::Ok(_, _))
437 }
438 #[allow(dead_code)]
439 #[allow(missing_docs)]
440 pub fn is_err(&self) -> bool {
441 matches!(self, CombResult::Err(_, _))
442 }
443 #[allow(dead_code)]
444 #[allow(missing_docs)]
445 pub fn position(&self) -> usize {
446 match self {
447 CombResult::Ok(_, p) | CombResult::Err(_, p) => *p,
448 }
449 }
450}
451#[allow(dead_code)]
453#[allow(missing_docs)]
454#[derive(Debug, Clone, PartialEq, Eq)]
455pub enum LookaheadResult {
456 Matches(usize),
457 NoMatch,
458 Ambiguous,
459}
460#[allow(dead_code)]
462#[allow(missing_docs)]
463pub struct FixityRegistry {
464 entries: std::collections::HashMap<String, Fixity>,
465}
466impl FixityRegistry {
467 #[allow(dead_code)]
468 #[allow(missing_docs)]
469 pub fn new() -> Self {
470 let mut reg = Self {
471 entries: std::collections::HashMap::new(),
472 };
473 reg.add("+", Fixity::InfixLeft(65));
474 reg.add("-", Fixity::InfixLeft(65));
475 reg.add("*", Fixity::InfixLeft(70));
476 reg.add("/", Fixity::InfixLeft(70));
477 reg.add("^", Fixity::InfixRight(75));
478 reg.add("=", Fixity::InfixNone(50));
479 reg.add("<", Fixity::InfixNone(50));
480 reg.add(">", Fixity::InfixNone(50));
481 reg.add("&&", Fixity::InfixRight(35));
482 reg.add("||", Fixity::InfixRight(30));
483 reg
484 }
485 #[allow(dead_code)]
486 #[allow(missing_docs)]
487 pub fn add(&mut self, op: impl Into<String>, fixity: Fixity) {
488 self.entries.insert(op.into(), fixity);
489 }
490 #[allow(dead_code)]
491 #[allow(missing_docs)]
492 pub fn lookup(&self, op: &str) -> Option<&Fixity> {
493 self.entries.get(op)
494 }
495 #[allow(dead_code)]
496 #[allow(missing_docs)]
497 pub fn count(&self) -> usize {
498 self.entries.len()
499 }
500}
501#[allow(dead_code)]
503#[allow(missing_docs)]
504pub struct RecoveryDecision {
505 pub strategy: RecoveryStrategy,
506 pub tokens_to_skip: usize,
507 pub message: String,
508}
509impl RecoveryDecision {
510 #[allow(dead_code)]
511 #[allow(missing_docs)]
512 pub fn skip(n: usize, msg: impl Into<String>) -> Self {
513 Self {
514 strategy: RecoveryStrategy::Skip,
515 tokens_to_skip: n,
516 message: msg.into(),
517 }
518 }
519 #[allow(dead_code)]
520 #[allow(missing_docs)]
521 pub fn sync(msg: impl Into<String>) -> Self {
522 Self {
523 strategy: RecoveryStrategy::SyncToKeyword,
524 tokens_to_skip: 0,
525 message: msg.into(),
526 }
527 }
528 #[allow(dead_code)]
529 #[allow(missing_docs)]
530 pub fn abandon(msg: impl Into<String>) -> Self {
531 Self {
532 strategy: RecoveryStrategy::Abandon,
533 tokens_to_skip: 0,
534 message: msg.into(),
535 }
536 }
537}
538#[allow(dead_code)]
540#[allow(missing_docs)]
541pub struct ParseResultWithErrors<T> {
542 pub value: Option<T>,
543 pub errors: Vec<ParseErrorSimple>,
544}
545impl<T> ParseResultWithErrors<T> {
546 #[allow(dead_code)]
547 #[allow(missing_docs)]
548 pub fn ok(value: T) -> Self {
549 Self {
550 value: Some(value),
551 errors: Vec::new(),
552 }
553 }
554 #[allow(dead_code)]
555 #[allow(missing_docs)]
556 pub fn err(e: ParseErrorSimple) -> Self {
557 Self {
558 value: None,
559 errors: vec![e],
560 }
561 }
562 #[allow(dead_code)]
563 #[allow(missing_docs)]
564 pub fn ok_with_errors(value: T, errors: Vec<ParseErrorSimple>) -> Self {
565 Self {
566 value: Some(value),
567 errors,
568 }
569 }
570 #[allow(dead_code)]
571 #[allow(missing_docs)]
572 pub fn is_ok(&self) -> bool {
573 self.value.is_some()
574 }
575 #[allow(dead_code)]
576 #[allow(missing_docs)]
577 pub fn has_errors(&self) -> bool {
578 !self.errors.is_empty()
579 }
580 #[allow(dead_code)]
581 #[allow(missing_docs)]
582 pub fn error_count(&self) -> usize {
583 self.errors.len()
584 }
585}
586#[derive(Debug)]
588#[allow(missing_docs)]
589pub struct ParseFileResult {
590 pub filename: String,
592 pub decls: Vec<Located<Decl>>,
594 #[allow(missing_docs)]
596 pub errors: Vec<ParseError>,
597}
598impl ParseFileResult {
599 #[allow(missing_docs)]
601 pub fn is_ok(&self) -> bool {
602 self.errors.is_empty()
603 }
604 #[allow(missing_docs)]
606 pub fn decl_count(&self) -> usize {
607 self.decls.len()
608 }
609}
610#[allow(dead_code)]
611#[allow(missing_docs)]
612#[derive(Debug, Clone)]
613pub struct TraceEvent {
614 pub rule: String,
615 pub start_pos: usize,
616 pub end_pos: usize,
617 pub success: bool,
618}
619#[allow(dead_code)]
621#[allow(missing_docs)]
622pub struct PackratTable {
623 entries: std::collections::HashMap<(usize, String), PackratEntry>,
624}
625impl PackratTable {
626 #[allow(dead_code)]
627 #[allow(missing_docs)]
628 pub fn new() -> Self {
629 Self {
630 entries: std::collections::HashMap::new(),
631 }
632 }
633 #[allow(dead_code)]
634 #[allow(missing_docs)]
635 pub fn lookup(&self, pos: usize, rule: &str) -> Option<&PackratEntry> {
636 self.entries.get(&(pos, rule.to_string()))
637 }
638 #[allow(dead_code)]
639 #[allow(missing_docs)]
640 pub fn store(&mut self, pos: usize, rule: impl Into<String>, entry: PackratEntry) {
641 self.entries.insert((pos, rule.into()), entry);
642 }
643 #[allow(dead_code)]
644 #[allow(missing_docs)]
645 pub fn size(&self) -> usize {
646 self.entries.len()
647 }
648 #[allow(dead_code)]
649 #[allow(missing_docs)]
650 pub fn hit_rate_estimate(&self) -> f64 {
651 if self.entries.is_empty() {
652 return 0.0;
653 }
654 let hits = self.entries.values().filter(|e| e.success).count();
655 hits as f64 / self.entries.len() as f64
656 }
657}
658#[allow(dead_code)]
660#[allow(missing_docs)]
661pub struct RecoveryLog {
662 entries: Vec<(usize, RecoveryDecision)>,
663}
664impl RecoveryLog {
665 #[allow(dead_code)]
666 #[allow(missing_docs)]
667 pub fn new() -> Self {
668 Self {
669 entries: Vec::new(),
670 }
671 }
672 #[allow(dead_code)]
673 #[allow(missing_docs)]
674 pub fn record(&mut self, pos: usize, decision: RecoveryDecision) {
675 self.entries.push((pos, decision));
676 }
677 #[allow(dead_code)]
678 #[allow(missing_docs)]
679 pub fn count(&self) -> usize {
680 self.entries.len()
681 }
682 #[allow(dead_code)]
683 #[allow(missing_docs)]
684 pub fn strategies_used(&self) -> Vec<RecoveryStrategy> {
685 self.entries.iter().map(|(_, d)| d.strategy).collect()
686 }
687 #[allow(dead_code)]
688 #[allow(missing_docs)]
689 pub fn abandon_count(&self) -> usize {
690 self.entries
691 .iter()
692 .filter(|(_, d)| d.strategy == RecoveryStrategy::Abandon)
693 .count()
694 }
695}
696#[allow(dead_code)]
698#[allow(missing_docs)]
699pub struct ParseFrame {
700 pub rule: String,
701 pub start_pos: usize,
702 pub depth: usize,
703 pub in_type: bool,
704 #[allow(missing_docs)]
705 pub in_pattern: bool,
706}
707impl ParseFrame {
708 #[allow(dead_code)]
709 #[allow(missing_docs)]
710 pub fn new(rule: impl Into<String>, pos: usize, depth: usize) -> Self {
711 Self {
712 rule: rule.into(),
713 start_pos: pos,
714 depth,
715 in_type: false,
716 in_pattern: false,
717 }
718 }
719 #[allow(dead_code)]
720 #[allow(missing_docs)]
721 pub fn for_type(mut self) -> Self {
722 self.in_type = true;
723 self
724 }
725 #[allow(dead_code)]
726 #[allow(missing_docs)]
727 pub fn for_pattern(mut self) -> Self {
728 self.in_pattern = true;
729 self
730 }
731}
732#[allow(dead_code)]
734#[allow(missing_docs)]
735#[derive(Clone, Debug, PartialEq, Eq)]
736pub struct SourcePos {
737 pub file: String,
738 pub line: usize,
739 pub column: usize,
740 pub byte_offset: usize,
741}
742impl SourcePos {
743 #[allow(dead_code)]
744 #[allow(missing_docs)]
745 pub fn new(file: impl Into<String>, line: usize, col: usize, offset: usize) -> Self {
746 Self {
747 file: file.into(),
748 line,
749 column: col,
750 byte_offset: offset,
751 }
752 }
753 #[allow(dead_code)]
754 #[allow(missing_docs)]
755 pub fn unknown() -> Self {
756 Self::new("<unknown>", 0, 0, 0)
757 }
758 #[allow(dead_code)]
759 #[allow(missing_docs)]
760 pub fn display(&self) -> String {
761 format!("{}:{}:{}", self.file, self.line + 1, self.column + 1)
762 }
763}
764#[derive(Clone, Debug)]
766#[allow(missing_docs)]
767pub struct SourceMap {
768 pub(super) line_starts: Vec<usize>,
770 source_len: usize,
772}
773impl SourceMap {
774 #[allow(missing_docs)]
776 pub fn new(src: &str) -> Self {
777 let mut line_starts = vec![0];
778 for (i, b) in src.bytes().enumerate() {
779 if b == b'\n' {
780 line_starts.push(i + 1);
781 }
782 }
783 Self {
784 line_starts,
785 source_len: src.len(),
786 }
787 }
788 #[allow(missing_docs)]
790 pub fn offset_to_line_col(&self, offset: usize) -> (u32, u32) {
791 let line = match self.line_starts.binary_search(&offset) {
792 Ok(i) => i,
793 Err(i) => i.saturating_sub(1),
794 };
795 let col = offset - self.line_starts[line];
796 ((line + 1) as u32, (col + 1) as u32)
797 }
798 #[allow(missing_docs)]
800 pub fn num_lines(&self) -> usize {
801 self.line_starts.len()
802 }
803 #[allow(missing_docs)]
805 pub fn source_len(&self) -> usize {
806 self.source_len
807 }
808}
809#[derive(Clone, Debug)]
811#[allow(missing_docs)]
812pub struct ParseBuffer {
813 tokens: std::collections::VecDeque<Token>,
814 max_lookahead: usize,
816}
817impl ParseBuffer {
818 #[allow(missing_docs)]
820 pub fn new(max_lookahead: usize) -> Self {
821 Self {
822 tokens: std::collections::VecDeque::new(),
823 max_lookahead,
824 }
825 }
826 #[allow(missing_docs)]
828 pub fn push(&mut self, tok: Token) {
829 if self.tokens.len() >= self.max_lookahead {
830 self.tokens.pop_front();
831 }
832 self.tokens.push_back(tok);
833 }
834 #[allow(missing_docs)]
836 pub fn front(&self) -> Option<&Token> {
837 self.tokens.front()
838 }
839 #[allow(missing_docs)]
841 pub fn pop(&mut self) -> Option<Token> {
842 self.tokens.pop_front()
843 }
844 #[allow(missing_docs)]
846 pub fn len(&self) -> usize {
847 self.tokens.len()
848 }
849 #[allow(missing_docs)]
851 pub fn is_empty(&self) -> bool {
852 self.tokens.is_empty()
853 }
854 #[allow(missing_docs)]
856 pub fn clear(&mut self) {
857 self.tokens.clear();
858 }
859}
860#[allow(dead_code)]
862#[allow(missing_docs)]
863pub struct ExpectedSet {
864 expected: Vec<String>,
865}
866impl ExpectedSet {
867 #[allow(dead_code)]
868 #[allow(missing_docs)]
869 pub fn new() -> Self {
870 Self {
871 expected: Vec::new(),
872 }
873 }
874 #[allow(dead_code)]
875 #[allow(missing_docs)]
876 pub fn add(&mut self, what: impl Into<String>) {
877 self.expected.push(what.into());
878 }
879 #[allow(dead_code)]
880 #[allow(missing_docs)]
881 pub fn clear(&mut self) {
882 self.expected.clear();
883 }
884 #[allow(dead_code)]
885 #[allow(missing_docs)]
886 pub fn is_empty(&self) -> bool {
887 self.expected.is_empty()
888 }
889 #[allow(dead_code)]
890 #[allow(missing_docs)]
891 pub fn count(&self) -> usize {
892 self.expected.len()
893 }
894 #[allow(dead_code)]
895 #[allow(missing_docs)]
896 pub fn to_message(&self) -> String {
897 match self.expected.len() {
898 0 => "nothing expected".to_string(),
899 1 => format!("expected {}", self.expected[0]),
900 _ => {
901 let last = &self.expected[self.expected.len() - 1];
902 let rest = &self.expected[..self.expected.len() - 1];
903 format!("expected {} or {}", rest.join(", "), last)
904 }
905 }
906 }
907}
908#[allow(dead_code)]
910#[allow(missing_docs)]
911#[derive(Clone, Debug, PartialEq, Eq)]
912pub enum Fixity {
913 InfixLeft(u8),
914 InfixRight(u8),
915 InfixNone(u8),
916 Prefix(u8),
917 Postfix(u8),
918}
919impl Fixity {
920 #[allow(dead_code)]
921 #[allow(missing_docs)]
922 pub fn precedence(&self) -> u8 {
923 match self {
924 Fixity::InfixLeft(p)
925 | Fixity::InfixRight(p)
926 | Fixity::InfixNone(p)
927 | Fixity::Prefix(p)
928 | Fixity::Postfix(p) => *p,
929 }
930 }
931 #[allow(dead_code)]
932 #[allow(missing_docs)]
933 pub fn is_infix(&self) -> bool {
934 matches!(
935 self,
936 Fixity::InfixLeft(_) | Fixity::InfixRight(_) | Fixity::InfixNone(_)
937 )
938 }
939 #[allow(dead_code)]
940 #[allow(missing_docs)]
941 pub fn is_right_assoc(&self) -> bool {
942 matches!(self, Fixity::InfixRight(_))
943 }
944}
945#[derive(Debug, Default)]
947#[allow(missing_docs)]
948pub struct ParseSession {
949 pub file_names: Vec<String>,
951 pub results: Vec<ParseFileResult>,
953 #[allow(missing_docs)]
955 pub stats: ParseStats,
956}
957impl ParseSession {
958 #[allow(missing_docs)]
960 pub fn new() -> Self {
961 Self::default()
962 }
963 #[allow(missing_docs)]
965 pub fn parse_file(&mut self, filename: &str, src: &str) {
966 let mut errors = Vec::new();
967 let tokens = Lexer::new(src).tokenize();
968 let mut parser = Parser::new(tokens);
969 let mut decls = Vec::new();
970 loop {
971 match parser.parse_decl() {
972 Ok(d) => decls.push(d),
973 Err(e) if e.is_eof() => break,
974 Err(e) => {
975 errors.push(e);
976 break;
977 }
978 }
979 }
980 self.stats.files_parsed += 1;
981 self.stats.decls_parsed += decls.len() as u64;
982 self.stats.errors_total += errors.len() as u64;
983 self.file_names.push(filename.to_string());
984 self.results.push(ParseFileResult {
985 filename: filename.to_string(),
986 decls,
987 errors,
988 });
989 }
990 #[allow(missing_docs)]
992 pub fn all_ok(&self) -> bool {
993 self.results.iter().all(|r| r.is_ok())
994 }
995 #[allow(missing_docs)]
997 pub fn all_errors(&self) -> Vec<&ParseError> {
998 self.results.iter().flat_map(|r| r.errors.iter()).collect()
999 }
1000 #[allow(missing_docs)]
1002 pub fn total_decls(&self) -> usize {
1003 self.results.iter().map(|r| r.decl_count()).sum()
1004 }
1005 #[allow(missing_docs)]
1007 pub fn file_count(&self) -> usize {
1008 self.file_names.len()
1009 }
1010}
1011#[derive(Clone, Debug)]
1014#[allow(missing_docs)]
1015pub struct TokenStream {
1016 pub(super) tokens: Vec<Token>,
1017 pub(super) pos: usize,
1018}
1019impl TokenStream {
1020 #[allow(missing_docs)]
1022 pub fn new(tokens: Vec<Token>) -> Self {
1023 Self { tokens, pos: 0 }
1024 }
1025 #[allow(missing_docs)]
1027 pub fn from_src(src: &str) -> Self {
1028 Self::new(Lexer::new(src).tokenize())
1029 }
1030 #[allow(missing_docs)]
1032 pub fn peek(&self) -> Option<&Token> {
1033 self.tokens.get(self.pos)
1034 }
1035 #[allow(clippy::should_implement_trait)]
1037 #[allow(missing_docs)]
1038 pub fn next(&mut self) -> Option<&Token> {
1039 let tok = self.tokens.get(self.pos)?;
1040 self.pos += 1;
1041 Some(tok)
1042 }
1043 #[allow(missing_docs)]
1045 pub fn is_empty(&self) -> bool {
1046 self.pos >= self.tokens.len()
1047 }
1048 #[allow(missing_docs)]
1050 pub fn remaining(&self) -> usize {
1051 self.tokens.len().saturating_sub(self.pos)
1052 }
1053 #[allow(missing_docs)]
1055 pub fn total_len(&self) -> usize {
1056 self.tokens.len()
1057 }
1058 #[allow(missing_docs)]
1060 pub fn reset(&mut self) {
1061 self.pos = 0;
1062 }
1063 #[allow(missing_docs)]
1065 pub fn collect_remaining(&self) -> Vec<&Token> {
1066 self.tokens[self.pos..].iter().collect()
1067 }
1068}
1069#[derive(Clone, Debug, Default)]
1071#[allow(missing_docs)]
1072pub struct ParseStats {
1073 pub files_parsed: u64,
1075 pub decls_parsed: u64,
1077 #[allow(missing_docs)]
1079 pub errors_total: u64,
1080 pub tokens_lexed: u64,
1082 pub bytes_processed: u64,
1084}
1085impl ParseStats {
1086 #[allow(missing_docs)]
1088 pub fn new() -> Self {
1089 Self::default()
1090 }
1091 #[allow(missing_docs)]
1093 pub fn avg_decls_per_file(&self) -> f64 {
1094 if self.files_parsed == 0 {
1095 0.0
1096 } else {
1097 self.decls_parsed as f64 / self.files_parsed as f64
1098 }
1099 }
1100 #[allow(missing_docs)]
1102 pub fn error_rate(&self) -> f64 {
1103 if self.decls_parsed == 0 {
1104 0.0
1105 } else {
1106 self.errors_total as f64 / self.decls_parsed as f64
1107 }
1108 }
1109 #[allow(missing_docs)]
1111 pub fn is_clean(&self) -> bool {
1112 self.errors_total == 0
1113 }
1114}
1115#[derive(Clone, Debug, PartialEq, Eq)]
1117#[allow(missing_docs)]
1118pub enum AnnotationKind {
1119 Info,
1121 Deprecated,
1123 Suggestion,
1125}
1126#[allow(dead_code)]
1128#[allow(missing_docs)]
1129#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1130pub enum RecoveryStrategy {
1131 Skip,
1132 InsertToken,
1133 SyncToKeyword,
1134 Abandon,
1135}
1136#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
1138#[allow(missing_docs)]
1139pub enum ParseQuality {
1140 Failed,
1142 Partial,
1144 WithWarnings,
1146 Clean,
1148}
1149impl ParseQuality {
1150 #[allow(missing_docs)]
1152 pub fn rate(errors: usize, warnings: usize) -> Self {
1153 if errors > 0 {
1154 ParseQuality::Failed
1155 } else if warnings > 0 {
1156 ParseQuality::WithWarnings
1157 } else {
1158 ParseQuality::Clean
1159 }
1160 }
1161 #[allow(missing_docs)]
1163 pub fn is_usable(&self) -> bool {
1164 *self >= ParseQuality::Partial
1165 }
1166}
1167#[derive(Debug, Default)]
1170#[allow(missing_docs)]
1171pub struct ParsePipeline {
1172 pub stages: Vec<String>,
1174}
1175impl ParsePipeline {
1176 #[allow(missing_docs)]
1178 pub fn new() -> Self {
1179 Self::default()
1180 }
1181 #[allow(missing_docs)]
1183 pub fn add_stage(&mut self, name: &str) {
1184 self.stages.push(name.to_string());
1185 }
1186 #[allow(missing_docs)]
1188 pub fn stage_count(&self) -> usize {
1189 self.stages.len()
1190 }
1191 #[allow(missing_docs)]
1193 pub fn execute(&self, src: &str) -> TokenStream {
1194 TokenStream::from_src(src)
1195 }
1196}
1197#[allow(dead_code)]
1199#[allow(missing_docs)]
1200pub struct DepthLimiter {
1201 current: usize,
1202 max: usize,
1203}
1204impl DepthLimiter {
1205 #[allow(dead_code)]
1206 #[allow(missing_docs)]
1207 pub fn new(max: usize) -> Self {
1208 Self { current: 0, max }
1209 }
1210 #[allow(dead_code)]
1211 #[allow(missing_docs)]
1212 pub fn enter(&mut self) -> bool {
1213 if self.current >= self.max {
1214 return false;
1215 }
1216 self.current += 1;
1217 true
1218 }
1219 #[allow(dead_code)]
1220 #[allow(missing_docs)]
1221 pub fn exit(&mut self) {
1222 if self.current > 0 {
1223 self.current -= 1;
1224 }
1225 }
1226 #[allow(dead_code)]
1227 #[allow(missing_docs)]
1228 pub fn depth(&self) -> usize {
1229 self.current
1230 }
1231 #[allow(dead_code)]
1232 #[allow(missing_docs)]
1233 pub fn is_at_limit(&self) -> bool {
1234 self.current >= self.max
1235 }
1236}
1237#[derive(Clone, Debug)]
1239#[allow(missing_docs)]
1240pub struct ParseAnnotation {
1241 pub kind: AnnotationKind,
1243 pub span: Span,
1245 #[allow(missing_docs)]
1247 pub message: String,
1248}
1249impl ParseAnnotation {
1250 #[allow(missing_docs)]
1252 pub fn new(kind: AnnotationKind, span: Span, message: &str) -> Self {
1253 Self {
1254 kind,
1255 span,
1256 message: message.to_string(),
1257 }
1258 }
1259 #[allow(missing_docs)]
1261 pub fn info(span: Span, message: &str) -> Self {
1262 Self::new(AnnotationKind::Info, span, message)
1263 }
1264 #[allow(missing_docs)]
1266 pub fn deprecated(span: Span, message: &str) -> Self {
1267 Self::new(AnnotationKind::Deprecated, span, message)
1268 }
1269}
1270#[allow(dead_code)]
1272#[allow(missing_docs)]
1273pub struct PrattContext {
1274 pub min_prec: u8,
1275 pub depth: usize,
1276 pub max_depth: usize,
1277}
1278impl PrattContext {
1279 #[allow(dead_code)]
1280 #[allow(missing_docs)]
1281 pub fn new(min_prec: u8) -> Self {
1282 Self {
1283 min_prec,
1284 depth: 0,
1285 max_depth: 200,
1286 }
1287 }
1288 #[allow(dead_code)]
1289 #[allow(missing_docs)]
1290 pub fn with_min_prec(&self, p: u8) -> Self {
1291 Self {
1292 min_prec: p,
1293 depth: self.depth + 1,
1294 max_depth: self.max_depth,
1295 }
1296 }
1297 #[allow(dead_code)]
1298 #[allow(missing_docs)]
1299 pub fn is_too_deep(&self) -> bool {
1300 self.depth >= self.max_depth
1301 }
1302}
1303#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
1305#[allow(missing_docs)]
1306pub struct ParseMode {
1307 pub allow_tactics: bool,
1309 pub recover_on_error: bool,
1311 #[allow(missing_docs)]
1313 pub lenient: bool,
1314}
1315impl ParseMode {
1316 #[allow(missing_docs)]
1318 pub fn strict() -> Self {
1319 Self {
1320 allow_tactics: false,
1321 recover_on_error: false,
1322 lenient: false,
1323 }
1324 }
1325 #[allow(missing_docs)]
1327 pub fn lenient() -> Self {
1328 Self {
1329 allow_tactics: false,
1330 recover_on_error: true,
1331 lenient: true,
1332 }
1333 }
1334}
1335#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
1337#[allow(missing_docs)]
1338pub struct TokenKindSet {
1339 bits: u64,
1340}
1341impl TokenKindSet {
1342 #[allow(missing_docs)]
1344 pub fn empty() -> Self {
1345 Self { bits: 0 }
1346 }
1347 #[allow(missing_docs)]
1349 pub fn insert(&mut self, idx: u32) {
1350 if idx < 64 {
1351 self.bits |= 1 << idx;
1352 }
1353 }
1354 #[allow(missing_docs)]
1356 pub fn contains(&self, idx: u32) -> bool {
1357 idx < 64 && (self.bits >> idx) & 1 != 0
1358 }
1359 #[allow(missing_docs)]
1361 pub fn is_empty(&self) -> bool {
1362 self.bits == 0
1363 }
1364 #[allow(missing_docs)]
1366 pub fn union(self, other: Self) -> Self {
1367 Self {
1368 bits: self.bits | other.bits,
1369 }
1370 }
1371 #[allow(missing_docs)]
1373 pub fn intersect(self, other: Self) -> Self {
1374 Self {
1375 bits: self.bits & other.bits,
1376 }
1377 }
1378}
1379#[allow(dead_code)]
1381#[allow(missing_docs)]
1382pub struct ParseTrace {
1383 events: Vec<TraceEvent>,
1384 max_events: usize,
1385}
1386impl ParseTrace {
1387 #[allow(dead_code)]
1388 #[allow(missing_docs)]
1389 pub fn new(max_events: usize) -> Self {
1390 Self {
1391 events: Vec::new(),
1392 max_events,
1393 }
1394 }
1395 #[allow(dead_code)]
1396 #[allow(missing_docs)]
1397 pub fn enter(&mut self, rule: impl Into<String>, pos: usize) -> usize {
1398 let idx = self.events.len();
1399 if self.events.len() < self.max_events {
1400 self.events.push(TraceEvent {
1401 rule: rule.into(),
1402 start_pos: pos,
1403 end_pos: pos,
1404 success: false,
1405 });
1406 }
1407 idx
1408 }
1409 #[allow(dead_code)]
1410 #[allow(missing_docs)]
1411 pub fn exit(&mut self, idx: usize, end_pos: usize, success: bool) {
1412 if let Some(e) = self.events.get_mut(idx) {
1413 e.end_pos = end_pos;
1414 e.success = success;
1415 }
1416 }
1417 #[allow(dead_code)]
1418 #[allow(missing_docs)]
1419 pub fn success_count(&self) -> usize {
1420 self.events.iter().filter(|e| e.success).count()
1421 }
1422 #[allow(dead_code)]
1423 #[allow(missing_docs)]
1424 pub fn fail_count(&self) -> usize {
1425 self.events.iter().filter(|e| !e.success).count()
1426 }
1427 #[allow(dead_code)]
1428 #[allow(missing_docs)]
1429 pub fn total(&self) -> usize {
1430 self.events.len()
1431 }
1432 #[allow(dead_code)]
1433 #[allow(missing_docs)]
1434 pub fn most_failing_rule(&self) -> Option<&str> {
1435 let mut counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
1436 for e in &self.events {
1437 if !e.success {
1438 *counts.entry(e.rule.as_str()).or_insert(0) += 1;
1439 }
1440 }
1441 counts.into_iter().max_by_key(|(_, c)| *c).map(|(r, _)| r)
1442 }
1443}
1444#[allow(dead_code)]
1446#[allow(missing_docs)]
1447pub struct ParseStack {
1448 frames: Vec<ParseFrame>,
1449}
1450impl ParseStack {
1451 #[allow(dead_code)]
1452 #[allow(missing_docs)]
1453 pub fn new() -> Self {
1454 Self { frames: Vec::new() }
1455 }
1456 #[allow(dead_code)]
1457 #[allow(missing_docs)]
1458 pub fn push(&mut self, frame: ParseFrame) {
1459 self.frames.push(frame);
1460 }
1461 #[allow(dead_code)]
1462 #[allow(missing_docs)]
1463 pub fn pop(&mut self) -> Option<ParseFrame> {
1464 self.frames.pop()
1465 }
1466 #[allow(dead_code)]
1467 #[allow(missing_docs)]
1468 pub fn depth(&self) -> usize {
1469 self.frames.len()
1470 }
1471 #[allow(dead_code)]
1472 #[allow(missing_docs)]
1473 pub fn current_rule(&self) -> Option<&str> {
1474 self.frames.last().map(|f| f.rule.as_str())
1475 }
1476 #[allow(dead_code)]
1477 #[allow(missing_docs)]
1478 pub fn in_type(&self) -> bool {
1479 self.frames.iter().any(|f| f.in_type)
1480 }
1481 #[allow(dead_code)]
1482 #[allow(missing_docs)]
1483 pub fn in_pattern(&self) -> bool {
1484 self.frames.iter().any(|f| f.in_pattern)
1485 }
1486 #[allow(dead_code)]
1487 #[allow(missing_docs)]
1488 pub fn rules_string(&self) -> String {
1489 self.frames
1490 .iter()
1491 .map(|f| f.rule.as_str())
1492 .collect::<Vec<_>>()
1493 .join(" > ")
1494 }
1495}
1496#[derive(Clone, Debug, Default)]
1498#[allow(missing_docs)]
1499pub struct ParseErrorSummary {
1500 pub total: usize,
1502 pub by_file: Vec<(String, usize)>,
1504}
1505impl ParseErrorSummary {
1506 #[allow(missing_docs)]
1508 pub fn from_session(session: &ParseSession) -> Self {
1509 let mut by_file = Vec::new();
1510 let mut total = 0;
1511 for r in &session.results {
1512 let n = r.errors.len();
1513 if n > 0 {
1514 by_file.push((r.filename.clone(), n));
1515 total += n;
1516 }
1517 }
1518 Self { total, by_file }
1519 }
1520 #[allow(missing_docs)]
1522 pub fn is_clean(&self) -> bool {
1523 self.total == 0
1524 }
1525 #[allow(missing_docs)]
1527 pub fn worst_file(&self) -> Option<&str> {
1528 self.by_file
1529 .iter()
1530 .max_by_key(|(_, n)| *n)
1531 .map(|(f, _)| f.as_str())
1532 }
1533}