1use codespan::ByteIndex;
2use codespan_reporting::{diagnostic::Label, files::Files as _};
3use lalrpop_util::ErrorRecovery;
4
5use crate::{
6 files::{FileId, Files},
7 identifier::{Ident, LocIdent},
8 lexer::Token,
9 position::RawSpan,
10 utils::mk_span,
11};
12use std::ops::Range;
13
14#[derive(Clone, PartialEq, Eq, Debug)]
15pub enum LexicalError {
16 UnmatchedCloseBrace(usize),
18 InvalidEscapeSequence(usize),
20 InvalidAsciiEscapeCode(usize),
22 InvalidUnicodeEscapeCode(Range<usize>),
24 StringDelimiterMismatch {
27 opening_delimiter: Range<usize>,
28 closing_delimiter: Range<usize>,
29 },
30 Generic(Range<usize>),
32}
33
34#[derive(Debug, Copy, Clone, PartialEq, Eq)]
39pub enum InvalidRecordTypeError {
40 InvalidField(RawSpan),
43 IsOpen(RawSpan),
45 HasInclude(RawSpan),
47 InterpolatedField(RawSpan),
49 RepeatedField { orig: RawSpan, dup: RawSpan },
51}
52
53impl InvalidRecordTypeError {
54 pub fn labels(&self) -> Vec<Label<FileId>> {
55 let label = |span: &RawSpan| {
56 Label::secondary(span.src_id, span.start.to_usize()..span.end.to_usize())
57 };
58 match self {
59 InvalidRecordTypeError::InvalidField(pos) => {
60 vec![label(pos).with_message("invalid field for a record type literal")]
61 }
62 InvalidRecordTypeError::IsOpen(pos) => {
63 vec![label(pos).with_message("cannot have ellipsis in a record type literal")]
64 }
65 InvalidRecordTypeError::HasInclude(pos) => {
66 vec![label(pos).with_message("cannot have `include` statements in a record type")]
67 }
68 InvalidRecordTypeError::InterpolatedField(pos) => {
69 vec![label(pos).with_message("this field uses interpolation")]
70 }
71 InvalidRecordTypeError::RepeatedField { orig, dup } => {
72 vec![
73 label(orig).with_message("first occurrence"),
74 label(dup).with_message("second occurrence"),
75 ]
76 }
77 }
78 }
79
80 pub fn notes(&self) -> Option<String> {
81 match self {
82 InvalidRecordTypeError::InvalidField(_) => Some(
83 "Value assignments such as `<field> = <expr>`, and metadata \
84 annotation (annotation, documentation, etc.) are forbidden."
85 .into(),
86 ),
87 InvalidRecordTypeError::InterpolatedField(_) => {
88 Some("String interpolation in field names is forbidden in record types".into())
89 }
90 InvalidRecordTypeError::RepeatedField { .. } => {
91 Some("Repeated field names are forbidden".into())
92 }
93 _ => None,
94 }
95 }
96}
97
98#[derive(Debug, PartialEq, Eq, Clone)]
100pub enum ParseError {
101 UnexpectedEOF(FileId, Vec<String>),
103 UnexpectedToken(
105 RawSpan,
106 Vec<String>,
107 ),
108 ExtraToken(RawSpan),
110 UnmatchedCloseBrace(RawSpan),
113 InvalidEscapeSequence(RawSpan),
115 InvalidAsciiEscapeCode(RawSpan),
117 InvalidUnicodeEscapeCode(RawSpan),
119 StringDelimiterMismatch {
122 opening_delimiter: RawSpan,
123 closing_delimiter: RawSpan,
124 },
125 ExternalFormatError(
127 String, String, Option<RawSpan>,
130 ),
131 UnboundTypeVariables(Vec<LocIdent>),
133 InvalidRecordType {
138 record_span: RawSpan,
140 tail_span: Option<RawSpan>,
142 cause: InvalidRecordTypeError,
144 },
145 RecursiveLetPattern(RawSpan),
148 PatternInLetBlock(RawSpan),
150 TypeVariableKindMismatch { ty_var: LocIdent, span: RawSpan },
158 TypedFieldWithoutDefinition {
173 field_span: RawSpan,
175 annot_span: RawSpan,
177 },
178 InterpolationInStaticPath {
181 input: String,
182 path_elem_span: RawSpan,
183 },
184 DuplicateIdentInRecordPattern {
186 ident: LocIdent,
188 prev_ident: LocIdent,
190 },
191 DuplicateIdentInLetBlock {
193 ident: LocIdent,
195 prev_ident: LocIdent,
197 },
198 DisabledFeature { feature: String, span: RawSpan },
200 InvalidContract(RawSpan),
205 InvalidImportFormat { span: RawSpan },
207 SigilExprMissingColon(RawSpan),
209 UnknownSigilSelector { selector: String, span: RawSpan },
211 UnknownSigilAttribute {
213 selector: String,
214 attribute: String,
215 span: RawSpan,
216 },
217 MultipleFieldDecls {
220 ident: Ident,
222 include_span: RawSpan,
225 other_span: RawSpan,
228 },
229}
230
231impl ParseError {
232 pub(crate) fn from_lalrpop<T>(
233 error: lalrpop_util::ParseError<usize, T, ParseOrLexError>,
234 file_id: FileId,
235 ) -> ParseError {
236 match error {
237 lalrpop_util::ParseError::InvalidToken { location } => {
238 ParseError::UnexpectedToken(mk_span(file_id, location, location + 1), Vec::new())
239 }
240 lalrpop_util::ParseError::UnrecognizedToken {
241 token: (start, _, end),
242 expected,
243 } => ParseError::UnexpectedToken(mk_span(file_id, start, end), expected),
244 lalrpop_util::ParseError::UnrecognizedEof { expected, .. } => {
245 ParseError::UnexpectedEOF(file_id, expected)
246 }
247 lalrpop_util::ParseError::ExtraToken {
248 token: (start, _, end),
249 } => ParseError::ExtraToken(mk_span(file_id, start, end)),
250 lalrpop_util::ParseError::User {
251 error: ParseOrLexError::Lexical(e),
252 } => Self::from_lexical(e, file_id),
253 lalrpop_util::ParseError::User {
254 error: ParseOrLexError::Parse(e),
255 } => e,
256 }
257 }
258
259 fn from_lexical(error: LexicalError, file_id: FileId) -> ParseError {
260 match error {
261 LexicalError::Generic(range) => {
262 ParseError::UnexpectedToken(mk_span(file_id, range.start, range.end), Vec::new())
263 }
264 LexicalError::UnmatchedCloseBrace(location) => {
265 ParseError::UnmatchedCloseBrace(mk_span(file_id, location, location + 1))
266 }
267 LexicalError::InvalidEscapeSequence(location) => {
268 ParseError::InvalidEscapeSequence(mk_span(file_id, location, location + 1))
269 }
270 LexicalError::InvalidAsciiEscapeCode(location) => {
271 ParseError::InvalidAsciiEscapeCode(mk_span(file_id, location, location + 2))
272 }
273 LexicalError::InvalidUnicodeEscapeCode(location) => {
274 ParseError::InvalidUnicodeEscapeCode(mk_span(file_id, location.start, location.end))
275 }
276 LexicalError::StringDelimiterMismatch {
277 opening_delimiter,
278 closing_delimiter,
279 } => ParseError::StringDelimiterMismatch {
280 opening_delimiter: mk_span(file_id, opening_delimiter.start, opening_delimiter.end),
281 closing_delimiter: mk_span(file_id, closing_delimiter.start, closing_delimiter.end),
282 },
283 }
284 }
285
286 pub fn from_serde_json(error: serde_json::Error, location: Option<(FileId, &Files)>) -> Self {
287 use codespan::ByteOffset;
288
289 let line_span = if error.line() == 0 {
294 None
295 } else {
296 location.and_then(|(file_id, files)| files.line_index(file_id, error.line() - 1).ok())
297 };
298
299 let start =
300 line_span.map(|ls| ByteIndex::from(((ls + error.column()) as u32).saturating_sub(1)));
301 ParseError::ExternalFormatError(
302 String::from("json"),
303 error.to_string(),
304 start.map(|start| RawSpan {
305 src_id: location.unwrap().0,
307 start,
308 end: start + ByteOffset::from(1),
309 }),
310 )
311 }
312
313 pub fn from_yaml(error: saphyr_parser::ScanError, file_id: Option<FileId>) -> Self {
314 use codespan::{ByteIndex, ByteOffset};
315
316 let start = ByteIndex::from(error.marker().index() as u32);
317 ParseError::ExternalFormatError(
318 String::from("yaml"),
319 error.to_string(),
320 file_id.map(|src_id| RawSpan {
321 src_id,
322 start,
323 end: start + ByteOffset::from(1),
324 }),
325 )
326 }
327
328 pub fn from_toml(error: toml_edit::TomlError, file_id: FileId) -> Self {
329 use codespan::{ByteIndex, ByteOffset};
330
331 let span = error.span();
332 ParseError::ExternalFormatError(
333 String::from("toml"),
334 error.to_string(),
335 span.map(|span| RawSpan {
336 src_id: file_id,
337 start: ByteIndex::from(span.start as u32),
338 end: ByteIndex(span.end as u32) + ByteOffset::from(1),
339 }),
340 )
341 }
342
343 #[cfg(feature = "nix-experimental")]
344 pub fn from_nix(error: &str, _file_id: FileId) -> Self {
345 ParseError::ExternalFormatError(String::from("nix"), error.to_string(), None)
347 }
348}
349
350#[derive(Clone, PartialEq, Eq, Debug)]
353pub enum ParseOrLexError {
354 Lexical(LexicalError),
355 Parse(ParseError),
356}
357
358impl From<ParseError> for ParseOrLexError {
359 fn from(e: ParseError) -> Self {
360 Self::Parse(e)
361 }
362}
363
364impl From<LexicalError> for ParseOrLexError {
365 fn from(e: LexicalError) -> Self {
366 Self::Lexical(e)
367 }
368}
369
370impl<T> From<ParseError> for lalrpop_util::ParseError<usize, T, ParseOrLexError> {
371 fn from(e: ParseError) -> Self {
372 lalrpop_util::ParseError::User {
373 error: ParseOrLexError::Parse(e),
374 }
375 }
376}
377
378#[derive(Debug, PartialEq, Eq, Clone, Default)]
379pub struct ParseErrors {
380 pub errors: Vec<ParseError>,
381}
382
383impl ParseErrors {
384 pub fn new(errors: Vec<ParseError>) -> ParseErrors {
385 ParseErrors { errors }
386 }
387
388 pub fn errors(self) -> Option<Vec<ParseError>> {
389 if self.errors.is_empty() {
390 None
391 } else {
392 Some(self.errors)
393 }
394 }
395
396 pub fn no_errors(&self) -> bool {
397 self.errors.is_empty()
398 }
399
400 pub const fn none() -> ParseErrors {
401 ParseErrors { errors: Vec::new() }
402 }
403
404 pub(crate) fn from_recoverable(
405 errs: Vec<ErrorRecovery<usize, Token<'_>, ParseOrLexError>>,
406 file_id: FileId,
407 ) -> Self {
408 ParseErrors {
409 errors: errs
410 .into_iter()
411 .map(|e| ParseError::from_lalrpop(e.error, file_id))
412 .collect(),
413 }
414 }
415}
416
417impl From<ParseError> for ParseErrors {
418 fn from(e: ParseError) -> ParseErrors {
419 ParseErrors { errors: vec![e] }
420 }
421}
422
423impl From<Vec<ParseError>> for ParseErrors {
424 fn from(errors: Vec<ParseError>) -> ParseErrors {
425 ParseErrors { errors }
426 }
427}