nickel_lang_core/error/
mod.rs

1//! Error types and error reporting.
2//!
3//! Define error types for different phases of the execution, together with functions to generate a
4//! [codespan](https://crates.io/crates/codespan-reporting) diagnostic from them.
5use codespan::ByteIndex;
6pub use codespan_reporting::diagnostic::{Diagnostic, Label, LabelStyle};
7
8use codespan_reporting::files::Files as _;
9use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
10use lalrpop_util::ErrorRecovery;
11use malachite::base::num::conversion::traits::ToSci;
12
13use ouroboros::self_referencing;
14
15use crate::{
16    bytecode::ast::{
17        alloc::{AstAlloc, CloneTo},
18        compat::ToMainline as _,
19        typ::{EnumRow, RecordRow, Type},
20        Ast,
21    },
22    cache::InputFormat,
23    eval::callstack::CallStack,
24    files::{FileId, Files},
25    identifier::{Ident, LocIdent},
26    label::{
27        self,
28        ty_path::{self, PathSpan},
29        MergeKind, MergeLabel,
30    },
31    parser::{
32        self,
33        error::{InvalidRecordTypeError, LexicalError, ParseError as InternalParseError},
34        lexer::Token,
35        utils::mk_span,
36    },
37    position::{RawSpan, TermPos},
38    repl,
39    serialize::{ExportFormat, NickelPointer},
40    term::{pattern::Pattern, record::FieldMetadata, Number, RichTerm, Term},
41    typ::{TypeF, VarKindDiscriminant},
42};
43
44pub mod report;
45pub mod suggest;
46pub mod warning;
47
48pub use warning::Warning;
49
50/// A `Reporter` is basically a callback function for reporting errors and/or warnings.
51///
52/// The error type `E` is a generic parameter, so the same object can be a `Reporter`
53/// of various different things.
54pub trait Reporter<E> {
55    /// Called when there is something (`e`) for the reporter to report.
56    fn report(&mut self, e: E);
57
58    /// A utility function for reporting error variants.
59    ///
60    /// When this is called with an `Ok(_)` it does nothing; when called with an `Err(e)`
61    /// it reports `e`.
62    fn report_result<T, E2>(&mut self, result: Result<T, E2>)
63    where
64        Self: Sized,
65        E2: Into<E>,
66    {
67        if let Err(e) = result {
68            self.report(e.into());
69        }
70    }
71}
72
73impl<E, R: Reporter<E>> Reporter<E> for &mut R {
74    fn report(&mut self, e: E) {
75        R::report(*self, e)
76    }
77}
78
79/// A [`Reporter`] that just collects errors.
80pub struct Sink<E> {
81    pub errors: Vec<E>,
82}
83
84impl<E> Default for Sink<E> {
85    fn default() -> Self {
86        Sink { errors: Vec::new() }
87    }
88}
89
90impl<E> Reporter<E> for Sink<E> {
91    fn report(&mut self, e: E) {
92        self.errors.push(e);
93    }
94}
95
96/// A [`Reporter`] that throws away all its errors.
97pub struct NullReporter {}
98
99impl<E> Reporter<E> for NullReporter {
100    fn report(&mut self, _e: E) {}
101}
102
103/// A general error occurring during either parsing or evaluation.
104#[derive(Debug, Clone, PartialEq)]
105pub enum Error {
106    EvalError(EvalError),
107    TypecheckError(TypecheckError),
108    ParseErrors(ParseErrors),
109    ImportError(ImportError),
110    ExportError(ExportError),
111    IOError(IOError),
112    ReplError(ReplError),
113}
114
115/// An error occurring during evaluation.
116#[derive(Debug, Clone, PartialEq)]
117pub enum EvalError {
118    /// A blame occurred: a contract has been broken somewhere.
119    BlameError {
120        /// The argument failing the contract. If the argument has been forced by the contract,
121        /// `evaluated_arg` provides the final value.
122        evaluated_arg: Option<RichTerm>,
123        /// The label of the corresponding contract.
124        label: label::Label,
125        /// The callstack when the blame error was raised.
126        call_stack: CallStack,
127    },
128    /// A field required by a record contract is missing a definition.
129    MissingFieldDef {
130        id: LocIdent,
131        metadata: FieldMetadata,
132        pos_record: TermPos,
133        pos_access: TermPos,
134    },
135    /// Mismatch between the expected type and the actual type of an expression.
136    TypeError {
137        /// The expected type.
138        expected: String,
139        /// A freeform message.
140        message: String,
141        /// Position of the original unevaluated expression.
142        orig_pos: TermPos,
143        /// The evaluated expression.
144        term: RichTerm,
145    },
146    /// `TypeError` when evaluating a unary primop
147    UnaryPrimopTypeError {
148        primop: String,
149        expected: String,
150        arg_pos: TermPos,
151        arg_evaluated: RichTerm,
152    },
153    /// `TypeError` when evaluating a binary primop
154    NAryPrimopTypeError {
155        primop: String,
156        expected: String,
157        arg_number: usize,
158        arg_pos: TermPos,
159        arg_evaluated: RichTerm,
160        op_pos: TermPos,
161    },
162    /// Tried to evaluate a term which wasn't parsed correctly.
163    ParseError(ParseError),
164    /// A term which is not a function has been applied to an argument.
165    NotAFunc(
166        /* term */ RichTerm,
167        /* arg */ RichTerm,
168        /* app position */ TermPos,
169    ),
170    /// A field access, or another record operation requiring the existence of a specific field,
171    /// has been performed on a record missing that field.
172    FieldMissing {
173        // The name of the missing field.
174        id: LocIdent,
175        // The actual fields of the record used to suggest similar fields.
176        field_names: Vec<LocIdent>,
177        // The primitive operation that required the field to exist.
178        operator: String,
179        // The position of the record value which is missing the field.
180        pos_record: TermPos,
181        // The position of the primitive operation application.
182        pos_op: TermPos,
183    },
184    /// Too few arguments were provided to a builtin function.
185    NotEnoughArgs(
186        /* required arg count */ usize,
187        /* primitive */ String,
188        TermPos,
189    ),
190    /// Attempted to merge incompatible values: for example, tried to merge two distinct default
191    /// values into one record field.
192    MergeIncompatibleArgs {
193        /// The left operand of the merge.
194        left_arg: RichTerm,
195        /// The right operand of the merge.
196        right_arg: RichTerm,
197        /// Additional error-reporting data.
198        merge_label: MergeLabel,
199    },
200    /// An unbound identifier was referenced.
201    UnboundIdentifier(LocIdent, TermPos),
202    /// An element in the evaluation Cache was entered during its own update.
203    InfiniteRecursion(CallStack, TermPos),
204    /// A serialization error occurred during a call to the builtin `serialize`.
205    SerializationError(ExportError),
206    /// A parse error occurred during a call to the builtin `deserialize`.
207    DeserializationError(
208        String,  /* format */
209        String,  /* error message */
210        TermPos, /* position of the call to deserialize */
211    ),
212    /// A parse error occurred during a call to the builtin `deserialize`.
213    ///
214    /// This differs from `DeserializationError` in that the inner error
215    /// isn't just a string: it can refer to positions.
216    DeserializationErrorWithInner {
217        format: InputFormat,
218        inner: ParseError,
219        /// Position of the call to deserialize.
220        pos: TermPos,
221    },
222    /// A polymorphic record contract was broken somewhere.
223    IllegalPolymorphicTailAccess {
224        action: IllegalPolymorphicTailAction,
225        evaluated_arg: Option<RichTerm>,
226        label: label::Label,
227        call_stack: CallStack,
228    },
229    /// Two non-equatable terms of the same type (e.g. functions) were compared for equality.
230    IncomparableValues {
231        eq_pos: TermPos,
232        left: RichTerm,
233        right: RichTerm,
234    },
235    /// A value didn't match any branch of a `match` expression at runtime. This is a specialized
236    /// version of [Self::NonExhaustiveMatch] when all branches are enum patterns. In this case,
237    /// the error message is more informative than the generic one.
238    NonExhaustiveEnumMatch {
239        /// The list of expected patterns. Currently, those are just enum tags.
240        expected: Vec<LocIdent>,
241        /// The original term matched
242        found: RichTerm,
243        /// The position of the `match` expression
244        pos: TermPos,
245    },
246    NonExhaustiveMatch {
247        /// The original term matched.
248        value: RichTerm,
249        /// The position of the `match` expression
250        pos: TermPos,
251    },
252    FailedDestructuring {
253        /// The original term matched.
254        value: RichTerm,
255        /// The pattern that failed to match.
256        pattern: Pattern,
257    },
258    /// Tried to query a field of something that wasn't a record.
259    QueryNonRecord {
260        /// Position of the original unevaluated expression.
261        pos: TermPos,
262        /// The identifier that we tried to query.
263        id: LocIdent,
264        /// Evaluated expression
265        value: RichTerm,
266    },
267    /// An unexpected internal error.
268    InternalError(String, TermPos),
269    /// Errors occurring rarely enough to not deserve a dedicated variant.
270    Other(String, TermPos),
271}
272
273#[derive(Clone, Debug, Eq, PartialEq)]
274pub enum IllegalPolymorphicTailAction {
275    FieldAccess { field: String },
276    Map,
277    Merge,
278    FieldRemove { field: String },
279    Freeze,
280}
281
282impl IllegalPolymorphicTailAction {
283    fn message(&self) -> String {
284        use IllegalPolymorphicTailAction::*;
285
286        match self {
287            FieldAccess { field } => {
288                format!("cannot access field `{field}` sealed by a polymorphic contract")
289            }
290            Map => "cannot map over a record sealed by a polymorphic contract".to_owned(),
291            Merge => "cannot merge a record sealed by a polymorphic contract".to_owned(),
292            FieldRemove { field } => {
293                format!("cannot remove field `{field}` sealed by a polymorphic contract")
294            }
295            Freeze => "cannot freeze a record sealed by a polymorphic contract".to_owned(),
296        }
297    }
298}
299
300pub const UNKNOWN_SOURCE_NAME: &str = "<unknown> (generated by evaluation)";
301
302/// An error occurring during the static typechecking phase.
303#[self_referencing(pub_extras)]
304#[derive(Debug)]
305pub struct TypecheckError {
306    /// The allocator hosting the types and AST nodes.
307    alloc: AstAlloc,
308    /// The actual error data.
309    #[borrows(alloc)]
310    #[covariant]
311    pub error: TypecheckErrorData<'this>,
312}
313
314impl Clone for TypecheckError {
315    fn clone(&self) -> Self {
316        TypecheckError::new(AstAlloc::new(), |alloc| {
317            // We must clone the "shallow" layer of the error data to satisfy the `CloneTo`
318            // interface
319            TypecheckErrorData::clone_to(self.borrow_error().clone(), alloc)
320        })
321    }
322}
323
324impl PartialEq for TypecheckError {
325    fn eq(&self, other: &Self) -> bool {
326        self.borrow_error() == other.borrow_error()
327    }
328}
329
330/// The various kinds of typechecking errors.
331#[derive(Debug, PartialEq, Clone)]
332pub enum TypecheckErrorData<'ast> {
333    /// An unbound identifier was referenced.
334    UnboundIdentifier(LocIdent),
335    /// A specific row was expected to be in the type of an expression, but was not.
336    MissingRow {
337        id: LocIdent,
338        expected: Type<'ast>,
339        inferred: Type<'ast>,
340        pos: TermPos,
341    },
342    /// A dynamic tail was expected to be in the type of an expression, but was not.
343    MissingDynTail {
344        expected: Type<'ast>,
345        inferred: Type<'ast>,
346        pos: TermPos,
347    },
348    /// A specific row was not expected to be in the type of an expression.
349    ExtraRow {
350        id: LocIdent,
351        expected: Type<'ast>,
352        inferred: Type<'ast>,
353        pos: TermPos,
354    },
355    /// A additional dynamic tail was not expected to be in the type of an expression.
356    ExtraDynTail {
357        expected: Type<'ast>,
358        inferred: Type<'ast>,
359        pos: TermPos,
360    },
361    /// A parametricity violation involving a row-kinded type variable.
362    ///
363    /// For example, in a function like this:
364    ///
365    /// ```nickel
366    /// let f : forall a. { x: String, y: String } -> { x: String; a } =
367    ///   fun r => r
368    /// in ...
369    /// ```
370    ///
371    /// this error would be raised with `{ ; a }` as the `tail` type and
372    /// `{ y : String }` as the `violating_type`.
373    ForallParametricityViolation {
374        kind: VarKindDiscriminant,
375        tail: Type<'ast>,
376        violating_type: Type<'ast>,
377        pos: TermPos,
378    },
379    /// An unbound type variable was referenced.
380    UnboundTypeVariable(LocIdent),
381    /// The actual (inferred or annotated) type of an expression is incompatible with its expected
382    /// type.
383    TypeMismatch {
384        expected: Type<'ast>,
385        inferred: Type<'ast>,
386        pos: TermPos,
387    },
388    /// The actual (inferred or annotated) record row type of an expression is incompatible with
389    /// its expected record row type. Specialized version of [Self::TypeMismatch] with additional
390    /// row-specific information.
391    RecordRowMismatch {
392        id: LocIdent,
393        expected: Type<'ast>,
394        inferred: Type<'ast>,
395        cause: Box<TypecheckErrorData<'ast>>,
396        pos: TermPos,
397    },
398    /// Same as [Self::RecordRowMismatch] but for enum types.
399    EnumRowMismatch {
400        id: LocIdent,
401        expected: Type<'ast>,
402        inferred: Type<'ast>,
403        cause: Option<Box<TypecheckErrorData<'ast>>>,
404        pos: TermPos,
405    },
406    /// Two incompatible types have been deduced for the same identifier of a row type.
407    ///
408    /// This is similar to [Self::RecordRowMismatch] but occurs in a slightly different situation.
409    /// Consider a unification variable `t`, which is a placeholder to be filled by a concrete type
410    /// later in the typechecking phase.  If `t` appears as the tail of a row type, i.e. the type
411    /// of some expression is inferred to be `{ field: Type; t}`, then `t` must not be unified
412    /// later with a type including a different declaration for field, such as `field: Type2`.
413    ///
414    /// A [constraint][crate::typecheck::unif::RowConstrs] is added accordingly, and if this
415    /// constraint is violated (that is if `t` does end up being unified with a type of the form `{
416    /// .., field: Type2, .. }`), [Self::RecordRowConflict] is raised.  We do not necessarily have
417    /// access to the original `field: Type` declaration, as opposed to [Self::RecordRowMismatch],
418    /// which corresponds to the direct failure to unify `{ .. , x: T1, .. }` and `{ .., x: T2, ..
419    /// }`.
420    RecordRowConflict {
421        /// The row that couldn't be added to the record type, because it already existed with a
422        /// different type assignement.
423        row: RecordRow<'ast>,
424        expected: Type<'ast>,
425        inferred: Type<'ast>,
426        pos: TermPos,
427    },
428    /// Same as [Self::RecordRowConflict] but for enum types.
429    EnumRowConflict {
430        /// The row that couldn't be added to the record type, because it already existed with a
431        /// different type assignement.
432        row: EnumRow<'ast>,
433        expected: Type<'ast>,
434        inferred: Type<'ast>,
435        pos: TermPos,
436    },
437    /// Type mismatch on a subtype of an an arrow type.
438    ///
439    /// The unification of two arrow types requires the unification of the domain and the codomain
440    /// (and recursively so, if they are themselves arrow types). When the unification of a subtype
441    /// fails, we want to report which part of the arrow types is problematic, and why, rather than
442    /// a generic `TypeMismatch`. Indeed, failing to unify two arrow types is a common type error
443    /// which deserves a good reporting, that can be caused e.g. by applying a function to an
444    /// argument of a wrong type in some cases:
445    ///
446    /// ```text
447    /// let id_mono = fun x => x in let _ign = id_mono true in id_mono 0 : Number
448    /// ```
449    ///
450    /// This specific error stores additionally the [type path][crate::label::ty_path] that
451    /// identifies the subtype where unification failed and the corresponding error.
452    ArrowTypeMismatch {
453        expected: Type<'ast>,
454        inferred: Type<'ast>,
455        /// The path to the incompatible type components
456        type_path: ty_path::Path,
457        cause: Box<TypecheckErrorData<'ast>>,
458        pos: TermPos,
459    },
460    /// Within statically typed code, the typechecker must reject terms containing nonsensical
461    /// contracts such as `let C = { foo : (4 + 1) } in ({ foo = 5 } | C)`, which will fail at
462    /// runtime.
463    ///
464    /// The typechecker is currently quite conservative and simply forbids to store any custom
465    /// contract in a type that appears in term position. Note that this restriction
466    /// doesn't apply to annotations, which aren't considered part of the statically typed block.
467    /// For example, `{foo = 5} | {foo : (4 + 1)}` is accepted by the typechecker.
468    CtrTypeInTermPos {
469        /// The term that was in a flat type (the `(4 + 1)` in the example above).
470        contract: Ast<'ast>,
471        /// The position of the entire type (the `{foo : 5}` in the example above).
472        pos: TermPos,
473    },
474    /// Unsound generalization.
475    ///
476    /// When typechecking polymorphic expressions, polymorphic variables introduced by a `forall`
477    /// are substituted with rigid type variables, which can only unify with a free unification
478    /// variable. However, the condition that the unification variable is free isn't enough.
479    ///
480    /// Consider the following example:
481    ///
482    /// ```nickel
483    /// (fun x => let y : forall a. a = x in (y : Number)) : _
484    /// ```
485    ///
486    /// This example must be rejected, as it is an identity function that casts any value to
487    /// something of type `Number`. It will typically fail with a contract error if applied to a
488    /// string, for example.
489    ///
490    /// But when `let y : forall a. a = x` is typechecked, `x` is affected to a free unification
491    /// variable `_a`, which isn't determined yet. The unsoundess comes from the fact that `_a` was
492    /// introduced **before** the block with the `forall a. a` annotation, and thus shouldn't be
493    /// allowed to be generalized (unified with a rigid type variable) at this point.
494    ///
495    /// Nickel uses an algorithm coming from the OCaml implementation, recognizing that the
496    /// discipline needed to reject those case is similar to region-based memory management. See
497    /// [crate::typecheck] for more details. This error indicates that a case similar to the above
498    /// example happened.
499    VarLevelMismatch {
500        /// The user-defined type variable (the rigid type variable during unification) that
501        /// couldn't be unified.
502        type_var: LocIdent,
503        /// The position of the expression that was being typechecked as `type_var`.
504        pos: TermPos,
505    },
506    /// Record-dict subtyping failed because the record was inhomogeneous.
507    InhomogeneousRecord {
508        /// One row of the record had this type.
509        row_a: Type<'ast>,
510        /// Another row of the record had this type.
511        row_b: Type<'ast>,
512        /// The position of the expression of record type.
513        pos: TermPos,
514    },
515    /// Invalid or-pattern.
516    ///
517    /// This error is raised when the patterns composing an or-pattern don't have the precise
518    /// same set of free variables. For example, `'Foo x or 'Bar y`.
519    OrPatternVarsMismatch {
520        /// A variable which isn't present in all the other patterns (there might be more of them,
521        /// this is just a sample).
522        var: LocIdent,
523        /// The position of the whole or-pattern.
524        pos: TermPos,
525    },
526    /// An error occured during the resolution of an import.
527    ///
528    /// Since RFC007, imports aren't pre-processed anymore, and import resolution can happen
529    /// interleaved with typechecking. In particular, in order to typecheck expressions of the form
530    /// `import "file.ncl"`, the typechecker might ask to resolve the import, which can lead to any
531    /// import error.
532    ImportError(ImportError),
533}
534
535#[derive(Debug, PartialEq, Eq, Clone, Default)]
536pub struct ParseErrors {
537    pub errors: Vec<ParseError>,
538}
539
540impl ParseErrors {
541    pub fn new(errors: Vec<ParseError>) -> ParseErrors {
542        ParseErrors { errors }
543    }
544
545    pub fn errors(self) -> Option<Vec<ParseError>> {
546        if self.errors.is_empty() {
547            None
548        } else {
549            Some(self.errors)
550        }
551    }
552
553    pub fn no_errors(&self) -> bool {
554        self.errors.is_empty()
555    }
556
557    pub const fn none() -> ParseErrors {
558        ParseErrors { errors: Vec::new() }
559    }
560
561    pub fn from_recoverable(
562        errs: Vec<ErrorRecovery<usize, Token<'_>, parser::error::ParseError>>,
563        file_id: FileId,
564    ) -> Self {
565        ParseErrors {
566            errors: errs
567                .into_iter()
568                .map(|e| ParseError::from_lalrpop(e.error, file_id))
569                .collect(),
570        }
571    }
572}
573
574impl From<ParseError> for ParseErrors {
575    fn from(e: ParseError) -> ParseErrors {
576        ParseErrors { errors: vec![e] }
577    }
578}
579
580impl From<Vec<ParseError>> for ParseErrors {
581    fn from(errors: Vec<ParseError>) -> ParseErrors {
582        ParseErrors { errors }
583    }
584}
585
586impl IntoDiagnostics for ParseErrors {
587    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
588        self.errors
589            .into_iter()
590            .flat_map(|e| e.into_diagnostics(files))
591            .collect()
592    }
593}
594
595/// An error occurring during parsing.
596#[derive(Debug, PartialEq, Eq, Clone)]
597pub enum ParseError {
598    /// Unexpected end of file.
599    UnexpectedEOF(FileId, /* tokens expected by the parser */ Vec<String>),
600    /// Unexpected token.
601    UnexpectedToken(
602        RawSpan,
603        /* tokens expected by the parser */ Vec<String>,
604    ),
605    /// Superfluous, unexpected token.
606    ExtraToken(RawSpan),
607    /// A closing brace '}' does not match an opening brace '{'. This rather precise error is
608    /// detected because of how interpolated strings are lexed.
609    UnmatchedCloseBrace(RawSpan),
610    /// Invalid escape sequence in a string literal.
611    InvalidEscapeSequence(RawSpan),
612    /// Invalid ASCII escape code in a string literal.
613    InvalidAsciiEscapeCode(RawSpan),
614    /// A multiline string was closed with a delimiter which has a `%` count higher than the
615    /// opening delimiter.
616    StringDelimiterMismatch {
617        opening_delimiter: RawSpan,
618        closing_delimiter: RawSpan,
619    },
620    /// Error when parsing an external format such as JSON, YAML, etc.
621    ExternalFormatError(
622        String, /* format */
623        String, /* error message */
624        Option<RawSpan>,
625    ),
626    /// Unbound type variable
627    UnboundTypeVariables(Vec<LocIdent>),
628    /// Illegal record type literal.
629    ///
630    /// This occurs when failing to convert from the uniterm syntax to a record type literal.
631    /// See [RFC002](../../rfcs/002-merge-types-terms-syntax.md) for more details.
632    InvalidRecordType {
633        /// The position of the invalid record.
634        record_span: RawSpan,
635        /// Position of the tail, if there was one.
636        tail_span: Option<RawSpan>,
637        /// The reason that interpretation as a record type failed.
638        cause: InvalidRecordTypeError,
639    },
640    /// A recursive let pattern was encountered. They are not currently supported because we
641    /// decided it was too involved to implement them.
642    RecursiveLetPattern(RawSpan),
643    /// Let blocks can currently only contain plain bindings, not pattern bindings.
644    PatternInLetBlock(RawSpan),
645    /// A type variable is used in ways that imply it has muiltiple different kinds.
646    ///
647    /// This can happen in several situations, for example:
648    /// - a variable is used as both a type variable and a row type variable,
649    ///   e.g. in the signature `forall r. { ; r } -> r`,
650    /// - a variable is used as both a record and enum row variable, e.g. in the
651    ///   signature `forall r. [| ; r |] -> { ; r }`.
652    TypeVariableKindMismatch { ty_var: LocIdent, span: RawSpan },
653    /// A record literal, which isn't a record type, has a field with a type annotation but without
654    /// a definition. While we could technically handle this situation, this is most probably an
655    /// error from the user, because this type annotation is useless and, maybe non-intuitively,
656    /// won't have any effect as part of a larger contract:
657    ///
658    /// ```nickel
659    /// let MixedContract = {foo : String, bar | Number} in
660    /// { foo = 1, bar = 2} | MixedContract
661    /// ```
662    ///
663    /// This example works, because the `foo : String` annotation doesn't propagate, and contract
664    /// application is mostly merging, which is probably not the intent. It might become a warning
665    /// in a future version, but we don't have warnings for now, so we rather forbid such
666    /// constructions.
667    TypedFieldWithoutDefinition {
668        /// The position of the field definition.
669        field_span: RawSpan,
670        /// The position of the type annotation.
671        annot_span: RawSpan,
672    },
673    /// The user provided a field path on the CLI, which is expected to be only composed of
674    /// literals, but the parsed field path contains string interpolation.
675    InterpolationInStaticPath {
676        input: String,
677        path_elem_span: RawSpan,
678    },
679    /// A duplicate binding was encountered in a record destructuring pattern.
680    DuplicateIdentInRecordPattern {
681        /// The duplicate identifier.
682        ident: LocIdent,
683        /// The previous instance of the duplicated identifier.
684        prev_ident: LocIdent,
685    },
686    /// A duplicate binding was encountered in a let block.
687    DuplicateIdentInLetBlock {
688        /// The duplicate identifier.
689        ident: LocIdent,
690        /// The previous instance of the duplicated identifier.
691        prev_ident: LocIdent,
692    },
693    /// There was an attempt to use a feature that hasn't been enabled.
694    DisabledFeature { feature: String, span: RawSpan },
695    /// A term was used as a contract in type position, but this term has no chance to make any
696    /// sense as a contract. What terms make sense might evolve with time, but any given point in
697    /// time, there are a set of expressions that can be excluded syntactically. Currently, it's
698    /// mostly constants.
699    InvalidContract(RawSpan),
700    /// Unrecognized explicit import format tag
701    InvalidImportFormat { span: RawSpan },
702    /// A CLI sigil expression such as `@env:FOO` is invalid because no `:` separator was found.
703    SigilExprMissingColon(RawSpan),
704    /// A CLI sigil expression is unknown or unsupported, such as `@unknown:value`.
705    UnknownSigilSelector { selector: String, span: RawSpan },
706    /// A CLI sigil attribute is unknown or unsupported, such as `@file/unsupported:value`.
707    UnknownSigilAttribute {
708        selector: String,
709        attribute: String,
710        span: RawSpan,
711    },
712    /// An included field has several definitions. While we could just merge both at runtime like a
713    /// piecewise field definition, we entirely forbid this situation for now.
714    MultipleFieldDecls {
715        /// The identifier.
716        ident: Ident,
717        /// The identifier and the position of the include expression. The ident part is the same
718        /// as the ident part of `ident`.
719        include_span: RawSpan,
720        /// The span of the other declaration, which can be either a field
721        /// definition or an include expression as well.
722        other_span: RawSpan,
723    },
724}
725
726/// An error occurring during the resolution of an import.
727#[derive(Debug, PartialEq, Eq, Clone)]
728pub enum ImportError {
729    /// An IO error occurred during an import.
730    IOError(
731        /* imported file */ String,
732        /* error message */ String,
733        /* import position */ TermPos,
734    ),
735    /// A parse error occurred during an import.
736    ParseErrors(
737        /* error */ ParseErrors,
738        /* import position */ TermPos,
739    ),
740    /// A package dependency was not found.
741    MissingDependency {
742        /// The package that tried to import the missing dependency, if there was one.
743        /// This will be `None` if the missing dependency was from the top-level
744        parent: Option<std::path::PathBuf>,
745        /// The name of the package that could not be resolved.
746        missing: Ident,
747        pos: TermPos,
748    },
749    /// They tried to import a file from a package, but no package manifest was supplied.
750    NoPackageMap { pos: TermPos },
751}
752
753#[derive(Debug, PartialEq, Clone)]
754pub struct ExportError {
755    /// The path to the field that contains a non-serializable value. This might be empty if the
756    /// error occurred before entering any record.
757    pub path: NickelPointer,
758    /// The cause of the error.
759    pub data: ExportErrorData,
760}
761
762/// The type of error occurring during serialization.
763#[derive(Debug, PartialEq, Clone)]
764pub enum ExportErrorData {
765    /// Encountered a null value for a format that doesn't support them.
766    UnsupportedNull(ExportFormat, RichTerm),
767    /// Tried exporting something else than a `String` to raw format.
768    NotAString(RichTerm),
769    /// A term contains constructs that cannot be serialized.
770    NonSerializable(RichTerm),
771    /// No exportable documentation was found when requested.
772    NoDocumentation(RichTerm),
773    /// A number was too large (in absolute value) to be serialized as `f64`
774    NumberOutOfRange {
775        term: RichTerm,
776        value: Number,
777    },
778    Other(String),
779}
780
781impl From<ExportErrorData> for ExportError {
782    fn from(data: ExportErrorData) -> ExportError {
783        ExportError {
784            path: NickelPointer::new(),
785            data,
786        }
787    }
788}
789
790/// A general I/O error, occurring when reading a source file or writing an export.
791#[derive(Debug, PartialEq, Eq, Clone)]
792pub struct IOError(pub String);
793
794/// An error occurring during an REPL session.
795#[derive(Debug, PartialEq, Eq, Clone)]
796pub enum ReplError {
797    UnknownCommand(String),
798    MissingArg {
799        cmd: repl::command::CommandType,
800        msg_opt: Option<String>,
801    },
802    InvalidQueryPath(ParseError),
803}
804
805impl From<EvalError> for Error {
806    fn from(error: EvalError) -> Error {
807        Error::EvalError(error)
808    }
809}
810
811impl From<ParseError> for Error {
812    fn from(error: ParseError) -> Error {
813        Error::ParseErrors(ParseErrors {
814            errors: vec![error],
815        })
816    }
817}
818
819impl From<ParseErrors> for Error {
820    fn from(errors: ParseErrors) -> Error {
821        Error::ParseErrors(errors)
822    }
823}
824
825impl From<TypecheckError> for Error {
826    fn from(error: TypecheckError) -> Error {
827        Error::TypecheckError(error)
828    }
829}
830
831impl From<ImportError> for Error {
832    fn from(error: ImportError) -> Error {
833        Error::ImportError(error)
834    }
835}
836
837impl From<ExportError> for Error {
838    fn from(error: ExportError) -> Error {
839        Error::ExportError(error)
840    }
841}
842
843impl From<IOError> for Error {
844    fn from(error: IOError) -> Error {
845        Error::IOError(error)
846    }
847}
848
849impl From<std::io::Error> for IOError {
850    fn from(error: std::io::Error) -> IOError {
851        IOError(error.to_string())
852    }
853}
854
855impl From<ExportError> for EvalError {
856    fn from(error: ExportError) -> EvalError {
857        EvalError::SerializationError(error)
858    }
859}
860
861impl From<ImportError> for TypecheckError {
862    fn from(error: ImportError) -> Self {
863        TypecheckError::new(AstAlloc::new(), |_alloc| {
864            TypecheckErrorData::ImportError(error)
865        })
866    }
867}
868
869/// Return an escaped version of a string. Used to sanitize strings before inclusion in error
870/// messages, which can contain ASCII code sequences, and in particular ANSI escape codes, that
871/// could alter Nickel's error messages.
872pub fn escape(s: &str) -> String {
873    String::from_utf8(strip_ansi_escapes::strip(s))
874        .expect("escape(): converting from a string should give back a valid UTF8 string")
875}
876
877impl From<ReplError> for Error {
878    fn from(error: ReplError) -> Error {
879        Error::ReplError(error)
880    }
881}
882
883impl ParseError {
884    pub fn from_lalrpop<T>(
885        error: lalrpop_util::ParseError<usize, T, InternalParseError>,
886        file_id: FileId,
887    ) -> ParseError {
888        match error {
889            lalrpop_util::ParseError::InvalidToken { location } => {
890                ParseError::UnexpectedToken(mk_span(file_id, location, location + 1), Vec::new())
891            }
892            lalrpop_util::ParseError::UnrecognizedToken {
893                token: (start, _, end),
894                expected,
895            } => ParseError::UnexpectedToken(mk_span(file_id, start, end), expected),
896            lalrpop_util::ParseError::UnrecognizedEof { expected, .. } => {
897                ParseError::UnexpectedEOF(file_id, expected)
898            }
899            lalrpop_util::ParseError::ExtraToken {
900                token: (start, _, end),
901            } => ParseError::ExtraToken(mk_span(file_id, start, end)),
902            lalrpop_util::ParseError::User { error } => match error {
903                InternalParseError::Lexical(LexicalError::Generic(range)) => {
904                    ParseError::UnexpectedToken(
905                        mk_span(file_id, range.start, range.end),
906                        Vec::new(),
907                    )
908                }
909                InternalParseError::Lexical(LexicalError::UnmatchedCloseBrace(location)) => {
910                    ParseError::UnmatchedCloseBrace(mk_span(file_id, location, location + 1))
911                }
912                InternalParseError::Lexical(LexicalError::InvalidEscapeSequence(location)) => {
913                    ParseError::InvalidEscapeSequence(mk_span(file_id, location, location + 1))
914                }
915                InternalParseError::Lexical(LexicalError::InvalidAsciiEscapeCode(location)) => {
916                    ParseError::InvalidAsciiEscapeCode(mk_span(file_id, location, location + 2))
917                }
918                InternalParseError::Lexical(LexicalError::StringDelimiterMismatch {
919                    opening_delimiter,
920                    closing_delimiter,
921                }) => ParseError::StringDelimiterMismatch {
922                    opening_delimiter: mk_span(
923                        file_id,
924                        opening_delimiter.start,
925                        opening_delimiter.end,
926                    ),
927                    closing_delimiter: mk_span(
928                        file_id,
929                        closing_delimiter.start,
930                        closing_delimiter.end,
931                    ),
932                },
933                InternalParseError::UnboundTypeVariables(idents) => {
934                    ParseError::UnboundTypeVariables(idents)
935                }
936                InternalParseError::InvalidRecordType {
937                    record_span,
938                    tail_span,
939                    cause,
940                } => ParseError::InvalidRecordType {
941                    record_span,
942                    tail_span,
943                    cause,
944                },
945                InternalParseError::RecursiveLetPattern(pos) => {
946                    ParseError::RecursiveLetPattern(pos)
947                }
948                InternalParseError::PatternInLetBlock(pos) => ParseError::PatternInLetBlock(pos),
949                InternalParseError::TypeVariableKindMismatch { ty_var, span } => {
950                    ParseError::TypeVariableKindMismatch { ty_var, span }
951                }
952                InternalParseError::TypedFieldWithoutDefinition {
953                    field_span,
954                    annot_span,
955                } => ParseError::TypedFieldWithoutDefinition {
956                    field_span,
957                    annot_span,
958                },
959                InternalParseError::DuplicateIdentInRecordPattern { ident, prev_ident } => {
960                    ParseError::DuplicateIdentInRecordPattern { ident, prev_ident }
961                }
962                InternalParseError::DuplicateIdentInLetBlock { ident, prev_ident } => {
963                    ParseError::DuplicateIdentInLetBlock { ident, prev_ident }
964                }
965                InternalParseError::DisabledFeature { feature, span } => {
966                    ParseError::DisabledFeature { feature, span }
967                }
968                InternalParseError::InterpolationInStaticPath { path_elem_span } => {
969                    ParseError::InterpolationInStaticPath {
970                        input: String::new(),
971                        path_elem_span,
972                    }
973                }
974                InternalParseError::InvalidContract(span) => ParseError::InvalidContract(span),
975                InternalParseError::InvalidImportFormat { span } => {
976                    ParseError::InvalidImportFormat { span }
977                }
978                InternalParseError::MultipleFieldDecls {
979                    ident,
980                    include_span,
981                    other_span,
982                } => ParseError::MultipleFieldDecls {
983                    ident,
984                    include_span,
985                    other_span,
986                },
987            },
988        }
989    }
990
991    pub fn from_serde_json(error: serde_json::Error, file_id: FileId, files: &Files) -> Self {
992        use codespan::ByteOffset;
993
994        // error.line() should start at `1` according to the documentation, but in practice, it may
995        // be 0 for the error `json parse error: data did not match any variant of untagged enum
996        // Term`. Although this error should not happen, if it does, it's better to get a message
997        // than a panic message `subtract with overflow`.
998        let line_span = if error.line() == 0 {
999            None
1000        } else {
1001            files.line_index(file_id, error.line() - 1).ok()
1002        };
1003
1004        let start =
1005            line_span.map(|ls| ByteIndex::from(((ls + error.column()) as u32).saturating_sub(1)));
1006        ParseError::ExternalFormatError(
1007            String::from("json"),
1008            error.to_string(),
1009            start.map(|start| RawSpan {
1010                src_id: file_id,
1011                start,
1012                end: start + ByteOffset::from(1),
1013            }),
1014        )
1015    }
1016
1017    pub fn from_yaml(error: saphyr_parser::ScanError, file_id: Option<FileId>) -> Self {
1018        use codespan::{ByteIndex, ByteOffset};
1019
1020        let start = ByteIndex::from(error.marker().index() as u32);
1021        ParseError::ExternalFormatError(
1022            String::from("yaml"),
1023            error.to_string(),
1024            file_id.map(|src_id| RawSpan {
1025                src_id,
1026                start,
1027                end: start + ByteOffset::from(1),
1028            }),
1029        )
1030    }
1031
1032    pub fn from_toml(error: toml_edit::TomlError, file_id: FileId) -> Self {
1033        use codespan::{ByteIndex, ByteOffset};
1034
1035        let span = error.span();
1036        ParseError::ExternalFormatError(
1037            String::from("toml"),
1038            error.to_string(),
1039            span.map(|span| RawSpan {
1040                src_id: file_id,
1041                start: ByteIndex::from(span.start as u32),
1042                end: ByteIndex(span.end as u32) + ByteOffset::from(1),
1043            }),
1044        )
1045    }
1046
1047    #[cfg(feature = "nix-experimental")]
1048    pub fn from_nix(error: &str, _file_id: FileId) -> Self {
1049        // Span is shown in the nix error message
1050        ParseError::ExternalFormatError(String::from("nix"), error.to_string(), None)
1051    }
1052}
1053
1054pub const INTERNAL_ERROR_MSG: &str =
1055    "This error should not happen. This is likely a bug in the Nickel interpreter. Please consider \
1056 reporting it at https://github.com/tweag/nickel/issues with the above error message.";
1057
1058/// A trait for converting an error to a diagnostic.
1059pub trait IntoDiagnostics {
1060    /// Convert an error to a list of printable formatted diagnostic.
1061    ///
1062    /// # Arguments
1063    ///
1064    /// - `files`: this is a mutable reference to allow insertion of temporary snippets. Note that
1065    ///   `Files` is cheaply clonable and copy-on-write, so you can easily get a mutable `Files` from
1066    ///   a non-mutable one, but bear in mind that the returned diagnostics may contains file ids that
1067    ///   refer to your mutated files.
1068    ///
1069    /// # Return
1070    ///
1071    /// Return a list of diagnostics. Most errors generate only one, but showing the callstack
1072    /// ordered requires to sidestep a limitation of codespan. The current solution is to generate
1073    /// one diagnostic per callstack element. See issue
1074    /// [#285](https://github.com/brendanzab/codespan/issues/285).
1075    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>>;
1076}
1077
1078// Allow the use of a single `Diagnostic` directly as an error that can be reported by Nickel.
1079impl IntoDiagnostics for Diagnostic<FileId> {
1080    fn into_diagnostics(self, _files: &mut Files) -> Vec<Diagnostic<FileId>> {
1081        vec![self]
1082    }
1083}
1084
1085// Helpers for the creation of codespan `Label`s
1086
1087/// Create a primary label from a span.
1088fn primary(span: &RawSpan) -> Label<FileId> {
1089    Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1090}
1091
1092/// Create a secondary label from a span.
1093fn secondary(span: &RawSpan) -> Label<FileId> {
1094    Label::secondary(span.src_id, span.start.to_usize()..span.end.to_usize())
1095}
1096
1097/// Create a label from an optional span, or fallback to annotating the alternative snippet
1098/// `alt_term` if the span is `None`.
1099///
1100/// When `span_opt` is `None`, the code snippet `alt_term` is added to `files` under a special name
1101/// and is referred to instead.
1102///
1103/// This is useful because during evaluation, some terms are the results of computations. They
1104/// correspond to nothing in the original source, and thus have a position set to `None`(e.g. the
1105/// result of `let x = 1 + 1 in x`).  In such cases it may still be valuable to print the term (or a
1106/// terse representation) in the error diagnostic rather than nothing, because if you have let `x =
1107/// 1 + 1 in` and then 100 lines later, `x arg` - causing a `NotAFunc` error - it may be helpful to
1108/// know that `x` holds the value `2`.
1109///
1110/// For example, if one wants to report an error on a record, `alt_term` may be defined as
1111/// `{ ... }`. Then, if this record has no position (`span_opt` is `None`), the error will be
1112/// reported as:
1113///
1114/// ```text
1115/// error: some error
1116///   -- <unknown> (generated by evaluation):1:2
1117///   |
1118/// 1 | { ... }
1119///     ^^^^^^^ some annotation
1120/// ```
1121///
1122/// The reason for the mutable reference to `files` is that codespan do no let you annotate
1123/// something that is not in `files`: you can't provide a raw snippet, you need to provide a
1124/// `FileId` referring to a file. This leaves the following possibilities:
1125///
1126/// 1. Do nothing: just elude annotations which refer to the term
1127/// 2. Print the term and the annotation as a note together with the diagnostic. Notes are
1128///    additional text placed at the end of diagnostic. What you lose:
1129///     - pretty formatting of annotations for such snippets
1130///     - style consistency: the style of the error now depends on the term being from the source or
1131///       a byproduct of evaluation
1132/// 3. Add the term to files, take 1: pass a reference to files so that the code building the
1133///    diagnostic can itself add arbitrary snippets if necessary, and get back their `FileId`. This
1134///    is what is done here.
1135/// 4. Add the term to files, take 2: make a wrapper around the `Files` and `FileId` structures of
1136///    codespan which handle source mapping. `FileId` could be something like
1137///    `Either<codespan::FileId, CustomId = u32>` so that `to_diagnostic` could construct and use
1138///    these separate ids, and return the corresponding snippets to be added together with the
1139///    diagnostic without modifying external state. Or even have `FileId = Either<codespan::FileId`,
1140///    `LoneCode = String or (Id, String)>` so we don't have to return the additional list of
1141///    snippets. This adds some boilerplate, that we wanted to avoid, but this stays on the
1142///    reasonable side of being an alternative.
1143fn label_alt(
1144    span_opt: Option<RawSpan>,
1145    alt_term: String,
1146    style: LabelStyle,
1147    files: &mut Files,
1148) -> Label<FileId> {
1149    match span_opt {
1150        Some(span) => Label::new(
1151            style,
1152            span.src_id,
1153            span.start.to_usize()..span.end.to_usize(),
1154        ),
1155        None => {
1156            let range = 0..alt_term.len();
1157            Label::new(style, files.add(UNKNOWN_SOURCE_NAME, alt_term), range)
1158        }
1159    }
1160}
1161
1162/// Create a secondary label from an optional span, or fallback to annotating the alternative
1163/// snippet `alt_term` if the span is `None`.
1164///
1165/// See [`label_alt`].
1166fn primary_alt(span_opt: Option<RawSpan>, alt_term: String, files: &mut Files) -> Label<FileId> {
1167    label_alt(span_opt, alt_term, LabelStyle::Primary, files)
1168}
1169
1170/// Create a primary label from a term, or fallback to annotating the shallow representation of this
1171/// term if its span is `None`.
1172///
1173/// See [`label_alt`].
1174fn primary_term(term: &RichTerm, files: &mut Files) -> Label<FileId> {
1175    primary_alt(term.pos.into_opt(), term.to_string(), files)
1176}
1177
1178/// Create a secondary label from an optional span, or fallback to annotating the alternative
1179/// snippet `alt_term` if the span is `None`.
1180///
1181/// See [`label_alt`].
1182fn secondary_alt(span_opt: TermPos, alt_term: String, files: &mut Files) -> Label<FileId> {
1183    label_alt(span_opt.into_opt(), alt_term, LabelStyle::Secondary, files)
1184}
1185
1186/// Create a secondary label from a term, or fallback to annotating the shallow representation of
1187/// this term if its span is `None`.
1188///
1189/// See [`label_alt`].
1190fn secondary_term(term: &RichTerm, files: &mut Files) -> Label<FileId> {
1191    secondary_alt(term.pos, term.to_string(), files)
1192}
1193
1194fn cardinal(number: usize) -> String {
1195    let suffix = if number % 10 == 1 {
1196        "st"
1197    } else if number % 10 == 2 {
1198        "nd"
1199    } else if number % 10 == 3 {
1200        "rd"
1201    } else {
1202        "th"
1203    };
1204    format!("{number}{suffix}")
1205}
1206
1207impl IntoDiagnostics for Error {
1208    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
1209        match self {
1210            Error::ParseErrors(errs) => errs
1211                .errors
1212                .into_iter()
1213                .flat_map(|e| e.into_diagnostics(files))
1214                .collect(),
1215            Error::TypecheckError(err) => err.into_diagnostics(files),
1216            Error::EvalError(err) => err.into_diagnostics(files),
1217            Error::ImportError(err) => err.into_diagnostics(files),
1218            Error::ExportError(err) => err.into_diagnostics(files),
1219            Error::IOError(err) => err.into_diagnostics(files),
1220            Error::ReplError(err) => err.into_diagnostics(files),
1221        }
1222    }
1223}
1224
1225impl IntoDiagnostics for EvalError {
1226    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
1227        match self {
1228            EvalError::BlameError {
1229                evaluated_arg,
1230                label,
1231                call_stack,
1232            } => blame_error::blame_diagnostics(files, label, evaluated_arg, &call_stack, ""),
1233            EvalError::MissingFieldDef {
1234                id,
1235                metadata,
1236                pos_record,
1237                pos_access,
1238            } => {
1239                let mut labels = vec![];
1240
1241                // If there's a contract attached to the missing field, point the error message
1242                // at the contract instead of the access. This seems like a more useful error,
1243                // because if someone hands you a `x | { fld | String }` and you call `x.fld`,
1244                // then the `x.fld` shouldn't be blamed if `fld` is missing: we should point
1245                // at the original `x` and at the `fld` in the record contract.
1246                if let Some(label) = metadata
1247                    .annotation
1248                    .first()
1249                    .map(|labeled_ty| labeled_ty.label.clone())
1250                {
1251                    if let Some(span) = label.field_name.and_then(|id| id.pos.into_opt()) {
1252                        labels.push(primary(&span).with_message("required here"));
1253                    }
1254
1255                    if let Some(span) = pos_record.into_opt() {
1256                        labels.push(secondary(&span).with_message("in this record"));
1257                    }
1258
1259                    // In this branch, we don't point at the access location because it
1260                    // isn't to blame.
1261                } else {
1262                    if let Some(span) = id.pos.into_opt() {
1263                        labels.push(primary(&span).with_message("required here"));
1264                    }
1265
1266                    if let Some(span) = pos_record.into_opt() {
1267                        labels.push(secondary(&span).with_message("in this record"));
1268                    }
1269
1270                    if let Some(span) = pos_access.into_opt() {
1271                        labels.push(secondary(&span).with_message("accessed here"));
1272                    }
1273                }
1274
1275                let diags = vec![Diagnostic::error()
1276                    .with_message(format!("missing definition for `{id}`",))
1277                    .with_labels(labels)];
1278
1279                diags
1280            }
1281            EvalError::TypeError {
1282                expected,
1283                message,
1284                orig_pos,
1285                term: t,
1286            } => {
1287                let label = format!(
1288                    "this expression has type {}, but {} was expected",
1289                    t.term
1290                        .type_of()
1291                        .unwrap_or_else(|| String::from("<unevaluated>")),
1292                    expected,
1293                );
1294
1295                let labels = match (orig_pos.into_opt(), t.pos.into_opt()) {
1296                    (Some(span_orig), Some(span_t)) if span_orig == span_t => {
1297                        vec![primary(&span_orig).with_message(label)]
1298                    }
1299                    (Some(span_orig), Some(t_pos)) if !files.is_stdlib(t_pos.src_id) => {
1300                        vec![
1301                            primary(&span_orig).with_message(label),
1302                            secondary_term(&t, files).with_message("evaluated to this"),
1303                        ]
1304                    }
1305                    (Some(span), _) => {
1306                        vec![primary(&span).with_message(label)]
1307                    }
1308                    (None, Some(span)) => {
1309                        vec![primary(&span).with_message(label)]
1310                    }
1311                    (None, None) => {
1312                        vec![primary_term(&t, files).with_message(label)]
1313                    }
1314                };
1315
1316                vec![Diagnostic::error()
1317                    .with_message("dynamic type error")
1318                    .with_labels(labels)
1319                    .with_notes(vec![message])]
1320            }
1321            EvalError::ParseError(parse_error) => parse_error.into_diagnostics(files),
1322            EvalError::NotAFunc(t, arg, pos_opt) => vec![Diagnostic::error()
1323                .with_message("not a function")
1324                .with_labels(vec![
1325                    primary_term(&t, files)
1326                        .with_message("this term is applied, but it is not a function"),
1327                    secondary_alt(pos_opt, format!("({t}) ({arg})"), files)
1328                        .with_message("applied here"),
1329                ])],
1330            EvalError::FieldMissing {
1331                id: name,
1332                field_names,
1333                operator,
1334                pos_record,
1335                pos_op,
1336            } => {
1337                let mut labels = Vec::new();
1338                let mut notes = Vec::new();
1339                let field = escape(name.as_ref());
1340
1341                if let Some(span) = pos_op.into_opt() {
1342                    labels.push(
1343                        Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1344                            .with_message(format!("this requires the field `{field}` to exist")),
1345                    );
1346                } else {
1347                    notes.push(format!(
1348                        "The field `{field}` was required by the operator {operator}"
1349                    ));
1350                }
1351
1352                if let Some(span) = pos_record.as_opt_ref() {
1353                    labels.push(
1354                        secondary(span)
1355                            .with_message(format!("this record lacks the field `{field}`")),
1356                    );
1357                }
1358
1359                suggest::add_suggestion(&mut notes, &field_names, &name);
1360
1361                vec![Diagnostic::error()
1362                    .with_message(format!("missing field `{field}`"))
1363                    .with_labels(labels)
1364                    .with_notes(notes)]
1365            }
1366            EvalError::NotEnoughArgs(count, op, span_opt) => {
1367                let mut labels = Vec::new();
1368                let mut notes = Vec::new();
1369                let msg = format!("{op} expects {count} arguments, but not enough were provided");
1370
1371                if let Some(span) = span_opt.into_opt() {
1372                    labels.push(
1373                        Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1374                            .with_message(msg),
1375                    );
1376                } else {
1377                    notes.push(msg);
1378                }
1379
1380                vec![Diagnostic::error()
1381                    .with_message("not enough arguments")
1382                    .with_labels(labels)
1383                    .with_notes(notes)]
1384            }
1385            EvalError::MergeIncompatibleArgs {
1386                left_arg,
1387                right_arg,
1388                merge_label,
1389            } => {
1390                let mut labels = vec![
1391                    primary_term(&left_arg, files).with_message("cannot merge this expression"),
1392                    primary_term(&right_arg, files).with_message("with this expression"),
1393                ];
1394
1395                let span_label = match merge_label.kind {
1396                    // For a standard merge, the span of the label indicates the position of the
1397                    // original merge expression
1398                    MergeKind::Standard => "originally merged here",
1399                    // For a piecewise definition, there isn't such merge expression (the merge has
1400                    // been generated by the parser). The spans thus point to the corresponding
1401                    // field identifier
1402                    MergeKind::PiecewiseDef => "when combining the definitions of this field",
1403                };
1404
1405                if let Some(merge_label_span) = &merge_label.span {
1406                    labels.push(secondary(merge_label_span).with_message(span_label));
1407                }
1408
1409                fn push_merge_note(notes: &mut Vec<String>, typ: &str) {
1410                    notes.push(format!(
1411                        "Both values are of type {typ} but they aren't equal."
1412                    ));
1413                    notes.push(format!("{typ} values can only be merged if they are equal"));
1414                }
1415
1416                let mut notes = vec![
1417                    "Merge operands have the same merge priority but they can't \
1418                    be combined."
1419                        .to_owned(),
1420                ];
1421
1422                if let (Some(left_ty), Some(right_ty)) =
1423                    (right_arg.as_ref().type_of(), left_arg.as_ref().type_of())
1424                {
1425                    match left_ty.as_str() {
1426                        _ if left_ty != right_ty => {
1427                            notes.push(format!(
1428                                "One value is of type {left_ty} \
1429                                while the other is of type {right_ty}"
1430                            ));
1431                            notes.push("Values of different types can't be merged".to_owned());
1432                        }
1433                        "String" | "Number" | "Bool" | "Array" | "EnumTag" => {
1434                            push_merge_note(&mut notes, &left_ty);
1435                        }
1436                        "Function" | "MatchExpression" => {
1437                            notes.push(
1438                                "Both values are functions (or match expressions)".to_owned(),
1439                            );
1440                            notes.push(
1441                                "Functions can never be merged with anything else, \
1442                                even another function."
1443                                    .to_owned(),
1444                            );
1445                        }
1446                        "EnumVariant" => {
1447                            if let (
1448                                Term::EnumVariant { tag: tag1, .. },
1449                                Term::EnumVariant { tag: tag2, .. },
1450                            ) = (right_arg.as_ref(), left_arg.as_ref())
1451                            {
1452                                // The only possible cause of failure of merging two enum variants is a
1453                                // different tag (the arguments could fail to merge as well, but then
1454                                // the error would have them as the operands, not the enclosing enums).
1455                                notes.push(format!(
1456                                    "Both values are enum variants, \
1457                                    but their tags differ (`'{tag1}` vs `'{tag2}`)"
1458                                ));
1459                                notes.push(
1460                                    "Enum variants can only be \
1461                                    merged if they have the same tag"
1462                                        .to_owned(),
1463                                );
1464                            } else {
1465                                // This should not happen, but it's recoverable, so let's not fail
1466                                // in release mode.
1467                                debug_assert!(false);
1468
1469                                notes.push(
1470                                    "Primitive values (Number, String, EnumTag and Bool) \
1471                                    and arrays can only be merged if they are equal"
1472                                        .to_owned(),
1473                                );
1474                                notes.push("Enum variants must have the same tag.".to_owned());
1475                                notes.push("Functions can never be merged.".to_owned());
1476                            }
1477                        }
1478                        _ => {
1479                            // In other cases, we print a generic message
1480                            notes.push(
1481                                "Primitive values (Number, String, EnumTag and Bool) \
1482                                    and arrays can only be merged if they are equal"
1483                                    .to_owned(),
1484                            );
1485                            notes.push("Enum variants must have the same tag.".to_owned());
1486                            notes.push("Functions can never be merged.".to_owned());
1487                        }
1488                    }
1489                }
1490
1491                vec![Diagnostic::error()
1492                    .with_message("non mergeable terms")
1493                    .with_labels(labels)
1494                    .with_notes(notes)]
1495            }
1496            EvalError::UnboundIdentifier(ident, span_opt) => vec![Diagnostic::error()
1497                .with_message(format!("unbound identifier `{ident}`"))
1498                .with_labels(vec![primary_alt(
1499                    span_opt.into_opt(),
1500                    ident.to_string(),
1501                    files,
1502                )
1503                .with_message("this identifier is unbound")])],
1504            EvalError::InfiniteRecursion(_call_stack, span_opt) => {
1505                let labels = span_opt
1506                    .as_opt_ref()
1507                    .map(|span| vec![primary(span).with_message("recursive reference")])
1508                    .unwrap_or_default();
1509
1510                vec![Diagnostic::error()
1511                    .with_message("infinite recursion")
1512                    .with_labels(labels)]
1513            }
1514            EvalError::Other(msg, span_opt) => {
1515                let labels = span_opt
1516                    .as_opt_ref()
1517                    .map(|span| vec![primary(span).with_message("here")])
1518                    .unwrap_or_default();
1519
1520                vec![Diagnostic::error().with_message(msg).with_labels(labels)]
1521            }
1522            EvalError::InternalError(msg, span_opt) => {
1523                let labels = span_opt
1524                    .as_opt_ref()
1525                    .map(|span| vec![primary(span).with_message("here")])
1526                    .unwrap_or_default();
1527
1528                vec![Diagnostic::error()
1529                    .with_message(format!("internal error: {msg}"))
1530                    .with_labels(labels)
1531                    .with_notes(vec![String::from(INTERNAL_ERROR_MSG)])]
1532            }
1533            EvalError::SerializationError(err) => err.into_diagnostics(files),
1534            EvalError::DeserializationError(format, msg, span_opt) => {
1535                let labels = span_opt
1536                    .as_opt_ref()
1537                    .map(|span| vec![primary(span).with_message("here")])
1538                    .unwrap_or_default();
1539
1540                vec![Diagnostic::error()
1541                    .with_message(format!("{format} parse error: {msg}"))
1542                    .with_labels(labels)]
1543            }
1544            EvalError::DeserializationErrorWithInner { format, inner, pos } => {
1545                let mut diags = inner.into_diagnostics(files);
1546                if let Some(diag) = diags.first_mut() {
1547                    if let Some(span) = pos.as_opt_ref() {
1548                        diag.labels
1549                            .push(secondary(span).with_message("deserialized here"));
1550                    }
1551                    diag.notes.push(format!("while parsing {format}"));
1552                }
1553                diags
1554            }
1555            EvalError::IncomparableValues {
1556                eq_pos,
1557                left,
1558                right,
1559            } => {
1560                let mut labels = Vec::new();
1561
1562                if let Some(span) = eq_pos.as_opt_ref() {
1563                    labels.push(primary(span).with_message("in this equality comparison"));
1564                }
1565
1566                // Push the label for the right or left argument and return the type of said
1567                // argument.
1568                let mut push_label = |prefix: &str, term: &RichTerm| -> String {
1569                    let type_of = term
1570                        .term
1571                        .type_of()
1572                        .unwrap_or_else(|| String::from("<unevaluated>"));
1573
1574                    labels.push(
1575                        secondary_term(term, files)
1576                            .with_message(format!("{prefix} argument has type {type_of}")),
1577                    );
1578
1579                    type_of
1580                };
1581
1582                let left_type = push_label("left", &left);
1583                let right_type = push_label("right", &right);
1584
1585                vec![Diagnostic::error()
1586                    .with_message("cannot compare values for equality")
1587                    .with_labels(labels)
1588                    .with_notes(vec![format!(
1589                        "A {left_type} can't be meaningfully compared with a {right_type}"
1590                    )])]
1591            }
1592            EvalError::NonExhaustiveEnumMatch {
1593                expected,
1594                found,
1595                pos,
1596            } => {
1597                let tag_list = expected
1598                    .into_iter()
1599                    .map(|tag| {
1600                        // We let the pretty printer handle proper formatting
1601                        RichTerm::from(Term::Enum(tag)).to_string()
1602                    })
1603                    .collect::<Vec<_>>()
1604                    .join(", ");
1605
1606                let mut labels = Vec::new();
1607
1608                if let Some(span) = pos.into_opt() {
1609                    labels.push(primary(&span).with_message("in this match expression"));
1610                }
1611
1612                labels.push(
1613                    secondary_term(&found, files)
1614                        .with_message("this value doesn't match any branch"),
1615                );
1616
1617                vec![Diagnostic::error()
1618                    .with_message("unmatched pattern")
1619                    .with_labels(labels)
1620                    .with_notes(vec![
1621                        format!("This match expression isn't exhaustive, matching only the following pattern(s): `{tag_list}`"),
1622                        "But it has been applied to an argument which doesn't match any of those patterns".to_owned(),
1623                    ])]
1624            }
1625            EvalError::NonExhaustiveMatch { value, pos } => {
1626                let mut labels = Vec::new();
1627
1628                if let Some(span) = pos.into_opt() {
1629                    labels.push(primary(&span).with_message("in this match expression"));
1630                }
1631
1632                labels.push(
1633                    secondary_term(&value, files)
1634                        .with_message("this value doesn't match any branch"),
1635                );
1636
1637                vec![Diagnostic::error()
1638                    .with_message("unmatched pattern")
1639                    .with_labels(labels)]
1640            }
1641            EvalError::FailedDestructuring { value, pattern } => {
1642                let mut labels = Vec::new();
1643
1644                if let Some(span) = pattern.pos.into_opt() {
1645                    labels.push(primary(&span).with_message("this pattern"));
1646                }
1647
1648                labels
1649                    .push(secondary_term(&value, files).with_message("this value failed to match"));
1650
1651                vec![Diagnostic::error()
1652                    .with_message("destructuring failed")
1653                    .with_labels(labels)]
1654            }
1655            EvalError::IllegalPolymorphicTailAccess {
1656                action,
1657                label: contract_label,
1658                evaluated_arg,
1659                call_stack,
1660            } => blame_error::blame_diagnostics(
1661                files,
1662                contract_label,
1663                evaluated_arg,
1664                &call_stack,
1665                &format!(": {}", &action.message()),
1666            ),
1667            EvalError::UnaryPrimopTypeError {
1668                primop,
1669                expected,
1670                arg_pos,
1671                arg_evaluated,
1672            } => EvalError::TypeError {
1673                message: format!("{primop} expects its argument to be a {expected}"),
1674                expected,
1675                orig_pos: arg_pos,
1676                term: arg_evaluated,
1677            }
1678            .into_diagnostics(files),
1679            EvalError::NAryPrimopTypeError {
1680                primop,
1681                expected,
1682                arg_number,
1683                arg_pos,
1684                arg_evaluated,
1685                op_pos,
1686            } => {
1687                // The parsing of binary subtraction vs unary negation has
1688                // proven confusing in practice; for example, `add 1 -1` is
1689                // parsed as `(add 1) - 1`, so the `-` is a subtraction and
1690                // triggers a type error because `(add 1)` is not a number.
1691                //
1692                // We attempt to provide a useful hint for this case.
1693                //
1694                // We don't currently attempt to give a good hint for
1695                // `add -1 1` (parsed as `add - (1 1)`) because the evaluation
1696                // error hits in a context (the `(1 1)`) where we don't see
1697                // the `-`.
1698                let minus_pos = if primop == "(-)"
1699                    && arg_number == 1
1700                    && arg_evaluated.term.type_of().as_deref() == Some("Function")
1701                {
1702                    op_pos.into_opt()
1703                } else {
1704                    None
1705                };
1706
1707                let diags = EvalError::TypeError {
1708                    message: format!(
1709                        "{primop} expects its {} argument to be a {expected}",
1710                        cardinal(arg_number)
1711                    ),
1712                    expected,
1713                    orig_pos: arg_pos,
1714                    term: arg_evaluated,
1715                }
1716                .into_diagnostics(files);
1717
1718                if let Some(minus_pos) = minus_pos {
1719                    let label = secondary(&minus_pos)
1720                        .with_message("this expression was parsed as a binary subtraction");
1721                    diags
1722                        .into_iter()
1723                        .map(|d| {
1724                            d.with_label(label.clone())
1725                                .with_note(
1726                                    "for unary negation, add parentheses: write `(-42)` instead of `-42`",
1727                                )
1728                        })
1729                        .collect()
1730                } else {
1731                    diags
1732                }
1733            }
1734            EvalError::QueryNonRecord { pos, id, value } => {
1735                let label = format!(
1736                    "tried to query field `{}`, but the expression has type {}",
1737                    id,
1738                    value
1739                        .term
1740                        .type_of()
1741                        .unwrap_or_else(|| String::from("<unevaluated>")),
1742                );
1743
1744                let label = if let Some(span) = pos.into_opt() {
1745                    primary(&span).with_message(label)
1746                } else {
1747                    primary_term(&value, files).with_message(label)
1748                };
1749
1750                vec![Diagnostic::error()
1751                    .with_message("tried to query field of a non-record")
1752                    .with_labels(vec![label])]
1753            }
1754        }
1755    }
1756}
1757
1758/// Common functionality for formatting blame errors.
1759mod blame_error {
1760    use codespan_reporting::diagnostic::{Diagnostic, Label};
1761
1762    use crate::{
1763        eval::callstack::CallStack,
1764        files::{FileId, Files},
1765        label::{
1766            self,
1767            ty_path::{self, PathSpan},
1768            Polarity,
1769        },
1770        position::TermPos,
1771        term::RichTerm,
1772        typ::Type,
1773    };
1774
1775    use super::{primary, secondary, secondary_term};
1776
1777    /// Returns a title to be used by blame errors based on the `path` and `polarity`
1778    /// of the label.
1779    pub fn title(l: &label::Label) -> String {
1780        if ty_path::has_no_arrow(&l.path) {
1781            // An empty path or a path that contains only fields necessarily corresponds to
1782            // a positive blame
1783            assert_eq!(l.polarity, Polarity::Positive);
1784            match l.field_name {
1785                Some(ident) => format!("contract broken by the value of `{ident}`"),
1786                None => "contract broken by a value".to_owned(),
1787            }
1788        } else if l.polarity == Polarity::Positive {
1789            match l.field_name {
1790                Some(ident) => format!("contract broken by the function `{ident}`"),
1791                None => "contract broken by a function".to_owned(),
1792            }
1793        } else {
1794            match l.field_name {
1795                Some(ident) => format!("contract broken by the caller of `{ident}`"),
1796                None => "contract broken by the caller".to_owned(),
1797            }
1798        }
1799    }
1800
1801    /// Constructs the diagnostic labels used when raising a blame error.
1802    pub fn build_diagnostic_labels(
1803        evaluated_arg: Option<RichTerm>,
1804        blame_label: &label::Label,
1805        path_label: Label<FileId>,
1806        files: &mut Files,
1807    ) -> Vec<Label<FileId>> {
1808        let mut labels = vec![path_label];
1809
1810        if let Some(ref arg_pos) = blame_label.arg_pos.into_opt() {
1811            // In some cases, if the blame error is located in an argument or return value
1812            // of an higher order functions for example, the original argument position can
1813            // point to the builtin implementation contract like `func` or `record`, so
1814            // there's no good reason to show it. Note than even in that case, the
1815            // information contained at the argument index can still be useful.
1816            if !files.is_stdlib(arg_pos.src_id) {
1817                labels.push(primary(arg_pos).with_message("applied to this expression"));
1818            }
1819        }
1820
1821        // If we have a reference to the element in the cache that was being tested,
1822        // we can try to show more information about the final, evaluated value that is
1823        // responsible for the blame.
1824        if let Some(mut evaluated_arg) = evaluated_arg {
1825            match (evaluated_arg.pos, blame_label.arg_pos.as_opt_ref()) {
1826                // Avoid showing a position inside builtin contracts, it's rarely
1827                // informative.
1828                (TermPos::Original(val_pos), _) if files.is_stdlib(val_pos.src_id) => {
1829                    evaluated_arg.pos = TermPos::None;
1830                    labels.push(
1831                        secondary_term(&evaluated_arg, files)
1832                            .with_message("evaluated to this value"),
1833                    );
1834                }
1835                // Do not show the same thing twice: if arg_pos and val_pos are the same,
1836                // the first label "applied to this value" is sufficient.
1837                (TermPos::Original(ref val_pos), Some(arg_pos)) if val_pos == arg_pos => {}
1838                (TermPos::Original(ref val_pos), _) => {
1839                    labels.push(secondary(val_pos).with_message("evaluated to this expression"))
1840                }
1841                // If the final element is a direct reduct of the original value, rather
1842                // print the actual value than referring to the same position as
1843                // before.
1844                (TermPos::Inherited(ref val_pos), Some(arg_pos)) if val_pos == arg_pos => {
1845                    evaluated_arg.pos = TermPos::None;
1846                    labels.push(
1847                        secondary_term(&evaluated_arg, files)
1848                            .with_message("evaluated to this value"),
1849                    );
1850                }
1851                // Finally, if the parameter reduced to a value which originates from a
1852                // different expression, show both the expression and the value.
1853                (TermPos::Inherited(ref val_pos), _) => {
1854                    if !files.is_stdlib(val_pos.src_id) {
1855                        labels
1856                            .push(secondary(val_pos).with_message("evaluated to this expression"));
1857                    }
1858
1859                    evaluated_arg.pos = TermPos::None;
1860                    labels.push(
1861                        secondary_term(&evaluated_arg, files)
1862                            .with_message("evaluated to this value"),
1863                    );
1864                }
1865                (TermPos::None, _) => labels.push(
1866                    secondary_term(&evaluated_arg, files).with_message("evaluated to this value"),
1867                ),
1868            }
1869        }
1870
1871        labels
1872    }
1873
1874    pub trait ExtendWithCallStack {
1875        fn extend_with_call_stack(&mut self, files: &Files, call_stack: &CallStack);
1876    }
1877
1878    impl ExtendWithCallStack for Vec<Diagnostic<FileId>> {
1879        fn extend_with_call_stack(&mut self, files: &Files, call_stack: &CallStack) {
1880            let (calls, curr_call) = call_stack.group_by_calls(files);
1881            let diag_curr_call = curr_call.map(|cdescr| {
1882                let name = cdescr
1883                    .head
1884                    .map(|ident| ident.to_string())
1885                    .unwrap_or_else(|| String::from("<func>"));
1886                Diagnostic::note().with_labels(vec![
1887                    primary(&cdescr.span).with_message(format!("While calling to {name}"))
1888                ])
1889            });
1890            let diags =
1891                calls.into_iter().enumerate().map(|(i, cdescr)| {
1892                    let name = cdescr
1893                        .head
1894                        .map(|ident| ident.to_string())
1895                        .unwrap_or_else(|| String::from("<func>"));
1896                    Diagnostic::note().with_labels(vec![secondary(&cdescr.span)
1897                        .with_message(format!("({}) calling {}", i + 1, name))])
1898                });
1899
1900            self.extend(diag_curr_call);
1901            self.extend(diags);
1902        }
1903    }
1904
1905    /// Calls [`crate::label::ty_path::span`], but if the call returns `None` (the position of the
1906    /// subtype isn't defined), [path_span] pretty-prints the type inside a new source, parses it,
1907    /// and calls `ty_path::span`. This new type is guaranteed to have all of its positions set,
1908    /// providing a definite `PathSpan`. This is similar to the behavior of [`super::primary_alt`].
1909    pub fn path_span(files: &mut Files, path: &[ty_path::Elem], ty: &Type) -> PathSpan {
1910        use crate::parser::{grammar::FixedTypeParser, lexer::Lexer, ErrorTolerantParserCompat};
1911
1912        ty_path::span(path.iter().peekable(), ty)
1913            .or_else(|| {
1914                let type_pprinted = format!("{ty}");
1915                let file_id = files.add(super::UNKNOWN_SOURCE_NAME, type_pprinted.clone());
1916
1917                let (ty_with_pos, _) = FixedTypeParser::new()
1918                    .parse_tolerant_compat(file_id, Lexer::new(&type_pprinted))
1919                    .unwrap();
1920
1921                ty_path::span(path.iter().peekable(), &ty_with_pos)
1922            })
1923            .expect(
1924                "path_span: we pretty-printed and parsed again the type of a label, \
1925                so it must have all of its position defined, but `ty_path::span` returned `None`",
1926            )
1927    }
1928
1929    /// Generate a codespan label that describes the [type path][crate::label::ty_path::Path] of a
1930    /// (Nickel) label.
1931    pub fn report_ty_path(files: &mut Files, l: &label::Label) -> Label<FileId> {
1932        let PathSpan {
1933            span,
1934            last,
1935            last_arrow_elem,
1936        } = path_span(files, &l.path, &l.typ);
1937
1938        let msg = match (last, last_arrow_elem) {
1939            // The type path doesn't contain any arrow, and the failing subcontract is the
1940            // contract for the elements of an array
1941            (Some(ty_path::Elem::Array), None) => "expected array element type",
1942            // The type path doesn't contain any arrow, and the failing subcontract is the contract
1943            // for the fields of a dictionary
1944            (Some(ty_path::Elem::Dict), None) => "expected dictionary field type",
1945            // The type path doesn't contain any arrow, and the failing subcontract is the contract
1946            // for the field of a record
1947            (Some(ty_path::Elem::Field(_)), None) => "expected field type",
1948            // The original contract contains an arrow, and the path is only composed of codomains.
1949            // Then polarity is necessarily true and the cause of the blame is the return value of
1950            // the function
1951            (Some(_), Some(ty_path::Elem::Codomain)) if ty_path::has_no_dom(&l.path) => {
1952                "expected return type"
1953            }
1954            // The original contract contains an arrow, the subcontract is the domain of an
1955            // arrow, and the polarity is positive. The function is to be blamed for calling an
1956            // argument on a value of the wrong type.
1957            (Some(_), Some(ty_path::Elem::Domain)) if l.polarity == Polarity::Positive => {
1958                "expected type of an argument of an inner call"
1959            }
1960            // The original contract contains an arrow, the subcontract is the codomain of an
1961            // arrow, and the polarity is positive. The function is to be blamed for calling a
1962            // higher-order function argument on a function which returns a value of the wrong
1963            // type.
1964            (Some(_), Some(ty_path::Elem::Codomain)) if l.polarity == Polarity::Positive => {
1965                "expected return type of a sub-function passed as an argument of an inner call"
1966            }
1967            // The original contract contains an arrow, the subcontract is the domain of an arrow,
1968            // and the polarity is negative. The caller is to be blamed for providing an argument
1969            // of the wrong type.
1970            (Some(_), Some(ty_path::Elem::Domain)) => {
1971                "expected type of the argument provided by the caller"
1972            }
1973            // The original contract contains an arrow, the subcontract is the codomain of an
1974            // arrow, and the polarity is negative. The caller is to be blamed for providing a
1975            // higher-order function argument which returns a value of the wrong type.
1976            (Some(_), Some(ty_path::Elem::Codomain)) => {
1977                "expected return type of a function provided by the caller"
1978            }
1979            // If there is a last arrow element, then there must be last element
1980            (None, Some(_)) => panic!(
1981                "blame error reporting: inconsistent path analysis, last_elem\
1982is None but last_arrow_elem is Some"
1983            ),
1984            _ => "expected type",
1985        };
1986
1987        secondary(&span).with_message(msg.to_owned())
1988    }
1989
1990    /// Generate codespan diagnostics from blame data. Mostly used by `into_diagnostics`
1991    /// implementations.
1992    ///
1993    /// # Parameters
1994    ///
1995    /// The `msg_addendum` is used to customize the main error message. It's inserted between the
1996    /// leading "contract broken by .." and the custom contract diagnostic message in tail
1997    /// position.
1998    pub fn blame_diagnostics(
1999        files: &mut Files,
2000        mut label: label::Label,
2001        evaluated_arg: Option<RichTerm>,
2002        call_stack: &CallStack,
2003        msg_addendum: &str,
2004    ) -> Vec<Diagnostic<FileId>> {
2005        use std::fmt::Write;
2006
2007        let mut diagnostics = Vec::new();
2008
2009        // Contract diagnostics are stacked up in order, which means the last one is
2010        // usually the latest/most precise/most relevant. We ignore empty diagnostics and
2011        // iterate in reverse order, to show the most relevant diagnostics first.
2012        let mut contract_diagnostics = std::mem::take(&mut label.diagnostics)
2013            .into_iter()
2014            .rev()
2015            .filter(|diag| !label::ContractDiagnostic::is_empty(diag));
2016        let head_contract_diagnostic = contract_diagnostics.next();
2017
2018        // The addendum and the custom contract diagnostic are important, so we want to display
2019        // them as part of the main error message. However, they can make the message quite long.
2020        // To avoid clutter, we display each component on a new line, indented with respect to the
2021        // initial "error: "
2022        let new_msg_block = "\n       ";
2023        let mut msg = title(&label);
2024
2025        if !msg_addendum.is_empty() {
2026            // unwrap(): write shouldn't fail on a String
2027            write!(&mut msg, "{new_msg_block}{msg_addendum}").unwrap();
2028        }
2029
2030        if let Some(contract_msg) = head_contract_diagnostic
2031            .as_ref()
2032            .and_then(|diag| diag.message.as_ref())
2033        {
2034            // unwrap(): write shouldn't fail on a String
2035            write!(&mut msg, "{new_msg_block}{}", &super::escape(contract_msg)).unwrap();
2036        }
2037
2038        let contract_notes = head_contract_diagnostic
2039            .map(|diag| diag.notes)
2040            .unwrap_or_default();
2041        let path_label = report_ty_path(files, &label);
2042
2043        let labels = build_diagnostic_labels(evaluated_arg, &label, path_label, files);
2044
2045        // If there are notes in the head contract diagnostic, we build the first
2046        // diagnostic using them and will put potential generated notes on higher-order
2047        // contracts in a following diagnostic.
2048        if !contract_notes.is_empty() {
2049            diagnostics.push(
2050                Diagnostic::error()
2051                    .with_message(msg)
2052                    .with_labels(labels)
2053                    .with_notes(contract_notes),
2054            );
2055        } else {
2056            diagnostics.push(Diagnostic::error().with_message(msg).with_labels(labels));
2057        }
2058
2059        for ctr_diag in contract_diagnostics {
2060            let mut msg = String::from("from a parent contract violation");
2061
2062            if let Some(msg_contract) = ctr_diag.message {
2063                msg.push_str(": ");
2064                msg.push_str(&super::escape(&msg_contract));
2065            }
2066
2067            diagnostics.push(
2068                Diagnostic::note()
2069                    .with_message(msg)
2070                    .with_notes(ctr_diag.notes),
2071            );
2072        }
2073
2074        if !ty_path::has_no_dom(&label.path) {
2075            diagnostics.extend_with_call_stack(files, call_stack);
2076        }
2077
2078        diagnostics
2079    }
2080}
2081
2082impl IntoDiagnostics for ParseError {
2083    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2084        let diagnostic = match self {
2085            ParseError::UnexpectedEOF(file_id, _expected) => {
2086                let end = files.source_span(file_id).end;
2087                Diagnostic::error()
2088                    .with_message(format!(
2089                        "unexpected end of file when parsing {}",
2090                        files.name(file_id).to_string_lossy()
2091                    ))
2092                    .with_labels(vec![primary(&RawSpan {
2093                        start: end,
2094                        end,
2095                        src_id: file_id,
2096                    })])
2097            }
2098            ParseError::UnexpectedToken(span, _expected) => Diagnostic::error()
2099                .with_message("unexpected token")
2100                .with_labels(vec![primary(&span)]),
2101            ParseError::ExtraToken(span) => Diagnostic::error()
2102                .with_message("superfluous unexpected token")
2103                .with_labels(vec![primary(&span)]),
2104            ParseError::UnmatchedCloseBrace(span) => Diagnostic::error()
2105                .with_message("unmatched closing brace \'}\'")
2106                .with_labels(vec![primary(&span)]),
2107            ParseError::InvalidEscapeSequence(span) => Diagnostic::error()
2108                .with_message("invalid escape sequence")
2109                .with_labels(vec![primary(&span)]),
2110            ParseError::InvalidAsciiEscapeCode(span) => Diagnostic::error()
2111                .with_message("invalid ascii escape code")
2112                .with_labels(vec![primary(&span)]),
2113            ParseError::StringDelimiterMismatch {
2114                opening_delimiter,
2115                closing_delimiter,
2116            } => Diagnostic::error()
2117                .with_message("string closing delimiter has too many `%`")
2118                .with_labels(vec![
2119                    primary(&closing_delimiter).with_message("the closing delimiter"),
2120                    secondary(&opening_delimiter).with_message("the opening delimiter"),
2121                ])
2122                .with_notes(vec![
2123                    "A special string must be opened and closed with the same number of `%` \
2124                    in the corresponding delimiters."
2125                        .into(),
2126                    "Try removing the superflous `%` in the closing delimiter".into(),
2127                ]),
2128            ParseError::ExternalFormatError(format, msg, span_opt) => {
2129                let labels = span_opt
2130                    .as_ref()
2131                    .map(|span| vec![primary(span)])
2132                    .unwrap_or_default();
2133
2134                Diagnostic::error()
2135                    .with_message(format!("{format} parse error: {msg}"))
2136                    .with_labels(labels)
2137            }
2138            ParseError::UnboundTypeVariables(idents) => Diagnostic::error()
2139                .with_message(format!(
2140                    "unbound type variable(s): {}",
2141                    idents
2142                        .iter()
2143                        .map(|x| format!("`{x}`"))
2144                        .collect::<Vec<_>>()
2145                        .join(",")
2146                ))
2147                .with_labels(
2148                    idents
2149                        .into_iter()
2150                        .filter_map(|id| id.pos.into_opt())
2151                        .map(|span| primary(&span).with_message("this identifier is unbound"))
2152                        .collect(),
2153                ),
2154            ParseError::InvalidRecordType {
2155                record_span,
2156                tail_span,
2157                cause,
2158            } => {
2159                let mut labels: Vec<_> = std::iter::once(primary(&record_span))
2160                    .chain(cause.labels())
2161                    .collect();
2162                let mut notes: Vec<_> = std::iter::once(
2163                    "A record type is a literal composed only of type annotations, of the \
2164                        form `<field>: <type>`."
2165                        .into(),
2166                )
2167                .chain(cause.notes())
2168                .collect();
2169
2170                if let Some(tail_span) = tail_span {
2171                    labels.push(secondary(&tail_span).with_message("tail"));
2172                    notes.push(
2173                        "This literal was interpreted as a record type because it has a \
2174                        polymorphic tail; record values cannot have tails."
2175                            .into(),
2176                    );
2177                } else {
2178                    notes.push(
2179                        "This literal was interpreted as a record type because it has \
2180                        fields with type annotations but no value definitions; to make \
2181                        this a record value, assign values to its fields."
2182                            .into(),
2183                    );
2184                };
2185                Diagnostic::error()
2186                    .with_message("invalid record literal")
2187                    .with_labels(labels)
2188                    .with_notes(notes)
2189            }
2190            ParseError::RecursiveLetPattern(span) => Diagnostic::error()
2191                .with_message("recursive destructuring is not supported")
2192                .with_labels(vec![primary(&span)])
2193                .with_notes(vec![
2194                    "A destructuring let-binding can't be recursive. Try removing the `rec` \
2195                        from `let rec`."
2196                        .into(),
2197                    "You can reference other fields of a record recursively \
2198                        from within a field, so you might not need the recursive let."
2199                        .into(),
2200                ]),
2201            ParseError::PatternInLetBlock(span) => Diagnostic::error()
2202                .with_message("destructuring patterns are not currently permitted in let blocks")
2203                .with_labels(vec![primary(&span)])
2204                .with_notes(vec!["Try re-writing your let block as nested `let ... in` expressions.".into()]),
2205            ParseError::TypeVariableKindMismatch { ty_var, span } => Diagnostic::error()
2206                .with_message(format!(
2207                    "the type variable `{ty_var}` is used in conflicting ways"
2208                ))
2209                .with_labels(vec![primary(&span)])
2210                .with_notes(vec![
2211                    "Type variables may be used either as types, polymorphic record tails, \
2212                    or polymorphic enum tails."
2213                        .into(),
2214                    "Using the same type variable as more than one category at the same time \
2215                    is forbidden."
2216                        .into(),
2217                ]),
2218            ParseError::TypedFieldWithoutDefinition {
2219                field_span,
2220                annot_span,
2221            } => Diagnostic::error()
2222                .with_message("statically typed field without a definition")
2223                .with_labels(vec![
2224                    primary(&field_span).with_message("this field doesn't have a definition"),
2225                    secondary(&annot_span).with_message("but it has a type annotation"),
2226                ])
2227                .with_notes(vec![
2228                    "A static type annotation must be attached to an expression but \
2229                    this field doesn't have a definition."
2230                        .into(),
2231                    "Did you mean to use `|` instead of `:`, for example when defining a \
2232                    record contract?"
2233                        .into(),
2234                    "Typed fields without definitions are only allowed inside \
2235                    record types, but the enclosing record literal doesn't qualify as a \
2236                    record type. Please refer to the manual for the defining conditions of a \
2237                    record type."
2238                        .into(),
2239                ]),
2240            ParseError::InterpolationInStaticPath {
2241                input: _,
2242                path_elem_span,
2243            } => Diagnostic::error()
2244                .with_message("string interpolation is forbidden within a query")
2245                .with_labels(vec![primary(&path_elem_span)])
2246                .with_notes(vec![
2247                    "Field paths don't support string interpolation when querying \
2248                        metadata."
2249                        .into(),
2250                    "Only identifiers and simple string literals are allowed.".into(),
2251                ]),
2252            ParseError::DuplicateIdentInRecordPattern { ident, prev_ident } => Diagnostic::error()
2253                .with_message(format!(
2254                    "duplicated binding `{}` in record pattern",
2255                    ident.label()
2256                ))
2257                .with_labels(vec![
2258                    secondary(&prev_ident.pos.unwrap()).with_message("previous binding here"),
2259                    primary(&ident.pos.unwrap()).with_message("duplicated binding here"),
2260                ]),
2261            ParseError::DuplicateIdentInLetBlock { ident, prev_ident } => Diagnostic::error()
2262                .with_message(format!(
2263                    "duplicated binding `{}` in let block",
2264                    ident.label()
2265                ))
2266                .with_labels(vec![
2267                    secondary(&prev_ident.pos.unwrap()).with_message("previous binding here"),
2268                    primary(&ident.pos.unwrap()).with_message("duplicated binding here"),
2269                ]),
2270            ParseError::DisabledFeature { feature, span } => Diagnostic::error()
2271                .with_message("interpreter compiled without required features")
2272                .with_labels(vec![primary(&span).with_message(format!(
2273                    "this syntax is only supported with the `{feature}` feature enabled"
2274                ))])
2275                .with_notes(vec![format!(
2276                    "Recompile nickel with `--features {}`",
2277                    feature
2278                )]),
2279            ParseError::InvalidContract(span) => Diagnostic::error()
2280                .with_message("invalid contract expression")
2281                .with_labels(vec![primary(&span).with_message("this can't be used as a contract")])
2282                .with_notes(vec![
2283                    "This expression is used as a contract as part of an annotation or a type expression."
2284                        .to_owned(),
2285                    "Only functions and records might be valid contracts".to_owned(),
2286                ]),
2287            ParseError::InvalidImportFormat{span} => Diagnostic::error()
2288                .with_message("unknown import format tag")
2289                .with_labels(vec![primary(&span)])
2290                .with_notes(vec![
2291                    "Examples of valid format tags: 'Nickel, 'Json, 'Yaml, 'Toml, 'Text"
2292                        .to_owned()
2293                ]),
2294            ParseError::UnknownSigilSelector { selector, span } => {
2295                Diagnostic::error()
2296                .with_message(format!("unknown sigil selector `{selector}`"))
2297                .with_labels(vec![primary(&span)])
2298                .with_note("Available selectors are currently: `env`")
2299            }
2300            ParseError::UnknownSigilAttribute { selector, attribute, span } => {
2301                Diagnostic::error()
2302                .with_message(format!("unknown sigil attribute `{attribute}`"))
2303                .with_labels(vec![primary(&span).with_message(format!("unknown attribute for sigil selector `{selector}`"))])
2304                .with_note(available_sigil_attrs_note(&selector))
2305            }
2306            ParseError::SigilExprMissingColon(span) => {
2307                Diagnostic::error()
2308                .with_message("missing sigil expression separator `:`")
2309                .with_labels(vec![primary(&span)])
2310                .with_notes(vec![
2311                    "The CLI sigil expression syntax is `@<selector>:<argument>` or `@<selector>/<attribute>:<argument>`".to_owned(),
2312                    "The provided sigil expression is missing the `:` separator.".to_owned(),
2313                ])
2314            }
2315            ParseError::MultipleFieldDecls { ident, include_span, other_span } => Diagnostic::error()
2316                .with_message(format!(
2317                    "multiple declarations for included field `{ident}`",
2318                ))
2319                .with_labels(vec![
2320                    primary(&include_span).with_message("included here"),
2321                    secondary(&other_span).with_message("but also declared here"),
2322                ])
2323                .with_notes(vec![
2324                    "Piecewise definitions involving an included field are currently not supported".to_owned()
2325                ]),
2326        };
2327
2328        vec![diagnostic]
2329    }
2330}
2331
2332/// Returns the available attributes for each supported sigil
2333// It's currently trivial, but might be expanded in the future
2334fn available_sigil_attrs_note(selector: &str) -> String {
2335    format!("No attributes are available for sigil selector `{selector}`. Use the selector directly as in `@{selector}:<argument>`")
2336}
2337
2338impl IntoDiagnostics for TypecheckError {
2339    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2340        self.borrow_error().into_diagnostics(files)
2341    }
2342}
2343
2344impl<'ast> IntoDiagnostics for &'_ TypecheckErrorData<'ast> {
2345    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2346        fn mk_expr_label(span_opt: &TermPos) -> Vec<Label<FileId>> {
2347            span_opt
2348                .as_opt_ref()
2349                .map(|span| vec![primary(span).with_message("this expression")])
2350                .unwrap_or_default()
2351        }
2352
2353        fn mk_expected_msg<T: std::fmt::Display>(expected: &T) -> String {
2354            format!("Expected an expression of type `{expected}`")
2355        }
2356
2357        fn mk_inferred_msg<T: std::fmt::Display>(inferred: &T) -> String {
2358            format!("Found an expression of type `{inferred}`")
2359        }
2360
2361        match self {
2362            TypecheckErrorData::UnboundIdentifier(id) =>
2363            // Use the same diagnostic as `EvalError::UnboundIdentifier` for consistency.
2364            {
2365                EvalError::UnboundIdentifier(*id, id.pos).into_diagnostics(files)
2366            }
2367            TypecheckErrorData::MissingRow {
2368                id,
2369                expected,
2370                inferred,
2371                pos,
2372            } => vec![Diagnostic::error()
2373                .with_message(format!("type error: missing row `{id}`"))
2374                .with_labels(mk_expr_label(pos))
2375                .with_notes(vec![
2376                    format!(
2377                        "{}, which contains the field `{id}`",
2378                        mk_expected_msg(expected)
2379                    ),
2380                    format!(
2381                        "{}, which does not contain the field `{id}`",
2382                        mk_inferred_msg(inferred)
2383                    ),
2384                ])],
2385            TypecheckErrorData::MissingDynTail {
2386                expected,
2387                inferred,
2388                pos,
2389            } => vec![Diagnostic::error()
2390                .with_message(String::from("type error: missing dynamic tail `; Dyn`"))
2391                .with_labels(mk_expr_label(pos))
2392                .with_notes(vec![
2393                    format!(
2394                        "{}, which contains the tail `; Dyn`",
2395                        mk_expected_msg(expected)
2396                    ),
2397                    format!(
2398                        "{}, which does not contain the tail `; Dyn`",
2399                        mk_inferred_msg(inferred)
2400                    ),
2401                ])],
2402            TypecheckErrorData::ExtraRow {
2403                id,
2404                expected,
2405                inferred,
2406                pos,
2407            } => vec![Diagnostic::error()
2408                .with_message(format!("type error: extra row `{id}`"))
2409                .with_labels(mk_expr_label(pos))
2410                .with_notes(vec![
2411                    format!(
2412                        "{}, which does not contain the field `{id}`",
2413                        mk_expected_msg(expected)
2414                    ),
2415                    format!(
2416                        "{}, which contains the extra field `{id}`",
2417                        mk_inferred_msg(inferred)
2418                    ),
2419                ])],
2420            TypecheckErrorData::ExtraDynTail {
2421                expected,
2422                inferred,
2423                pos,
2424            } => vec![Diagnostic::error()
2425                .with_message(String::from("type error: extra dynamic tail `; Dyn`"))
2426                .with_labels(mk_expr_label(pos))
2427                .with_notes(vec![
2428                    format!(
2429                        "{}, which does not contain the tail `; Dyn`",
2430                        mk_expected_msg(expected)
2431                    ),
2432                    format!(
2433                        "{}, which contains the extra tail `; Dyn`",
2434                        mk_inferred_msg(inferred)
2435                    ),
2436                ])],
2437            TypecheckErrorData::UnboundTypeVariable(ident) => vec![Diagnostic::error()
2438                .with_message(format!("unbound type variable `{ident}`"))
2439                .with_labels(vec![primary_alt(
2440                    ident.pos.into_opt(),
2441                    ident.to_string(),
2442                    files,
2443                )
2444                .with_message("this type variable is unbound")])
2445                .with_notes(vec![format!(
2446                    "Did you forget to put a `forall {ident}.` somewhere in the enclosing type?"
2447                )])],
2448            TypecheckErrorData::TypeMismatch {
2449                expected,
2450                inferred,
2451                pos,
2452            } => {
2453                fn addendum<'ast>(ty: &Type<'ast>) -> &'static str {
2454                    if ty.typ.is_contract() {
2455                        " (a contract)"
2456                    } else {
2457                        ""
2458                    }
2459                }
2460                let last_note = if expected.typ.is_contract() ^ inferred.typ.is_contract() {
2461                    "Static types and contracts are not compatible"
2462                } else {
2463                    "These types are not compatible"
2464                };
2465
2466                vec![Diagnostic::error()
2467                    .with_message("incompatible types")
2468                    .with_labels(mk_expr_label(pos))
2469                    .with_notes(vec![
2470                        format!("{}{}", mk_expected_msg(expected), addendum(expected),),
2471                        format!("{}{}", mk_inferred_msg(inferred), addendum(inferred),),
2472                        String::from(last_note),
2473                    ])]
2474            }
2475            TypecheckErrorData::RecordRowMismatch {
2476                id,
2477                expected,
2478                inferred,
2479                cause: ref err,
2480                pos,
2481            } => {
2482                let mut err = err;
2483                // If the unification error is on a nested field, we will have a succession of
2484                // `RowMismatch` errors wrapping the underlying error. In this case, instead of
2485                // showing a cascade of similar error messages, we determine the full path of the
2486                // nested field (e.g. `pkg.subpkg1.meta.url`) and only show once the row mismatch
2487                // error followed by the underlying error.
2488                let mut path = vec![id.ident()];
2489
2490                while let TypecheckErrorData::RecordRowMismatch {
2491                    id: id_next,
2492                    cause: next,
2493                    ..
2494                } = &**err
2495                {
2496                    path.push(id_next.ident());
2497                    err = next;
2498                }
2499
2500                let path_str: Vec<String> = path
2501                    .clone()
2502                    .into_iter()
2503                    .map(|ident| format!("{ident}"))
2504                    .collect();
2505                let field = path_str.join(".");
2506
2507                let mk_expected_row_msg = |field, ty| {
2508                    format!("Expected an expression of a record type with the row `{field}: {ty}`")
2509                };
2510                let mk_inferred_row_msg = |field, ty| {
2511                    format!("Found an expression of a record type with the row `{field}: {ty}`")
2512                };
2513
2514                //TODO: we should rather have RowMismatch hold a rows, instead of a general type,
2515                //than doing this match.
2516                let note1 = if let TypeF::Record(rrows) = &expected.typ {
2517                    match rrows.find_path(path.as_slice()) {
2518                        Some(row) => mk_expected_row_msg(&field, row.typ),
2519                        None => mk_expected_msg(&expected),
2520                    }
2521                } else {
2522                    mk_expected_msg(&expected)
2523                };
2524
2525                let note2 = if let TypeF::Record(rrows) = &inferred.typ {
2526                    match rrows.find_path(path.as_slice()) {
2527                        Some(row) => mk_inferred_row_msg(&field, row.typ),
2528                        None => mk_inferred_msg(&inferred),
2529                    }
2530                } else {
2531                    mk_inferred_msg(inferred)
2532                };
2533
2534                let mut diags = vec![Diagnostic::error()
2535                    .with_message("incompatible record rows declaration")
2536                    .with_labels(mk_expr_label(pos))
2537                    .with_notes(vec![
2538                        note1,
2539                        note2,
2540                        format!("Could not match the two declarations of `{field}`"),
2541                    ])];
2542
2543                // We generate a diagnostic for the underlying error, but append a prefix to the
2544                // error message to make it clear that this is not a separate error but a more
2545                // precise description of why the unification of a row failed.
2546                diags.extend(err.into_diagnostics(files).into_iter().map(|mut diag| {
2547                    diag.message = format!("while typing field `{}`: {}", field, diag.message);
2548                    diag
2549                }));
2550                diags
2551            }
2552            TypecheckErrorData::EnumRowMismatch {
2553                id,
2554                expected,
2555                inferred,
2556                cause,
2557                pos,
2558            } => {
2559                let mk_expected_row_msg = |row| {
2560                    format!("Expected an expression of an enum type with the enum row `{row}`")
2561                };
2562                let mk_inferred_row_msg =
2563                    |row| format!("Found an expression of an enum type with the enum row `{row}`");
2564
2565                //TODO: we should rather have RowMismatch hold enum rows, instead of a general
2566                //type, to avoid doing this match.
2567                let note1 = if let TypeF::Enum(erows) = &expected.typ {
2568                    if let Some(row) = erows.find_row(id.ident()) {
2569                        mk_expected_row_msg(row)
2570                    } else {
2571                        mk_expected_msg(expected)
2572                    }
2573                } else {
2574                    mk_expected_msg(expected)
2575                };
2576
2577                let note2 = if let TypeF::Enum(erows) = &inferred.typ {
2578                    if let Some(row) = erows.find_row(id.ident()) {
2579                        mk_inferred_row_msg(row)
2580                    } else {
2581                        mk_inferred_msg(expected)
2582                    }
2583                } else {
2584                    mk_inferred_msg(inferred)
2585                };
2586
2587                let mut diags = vec![Diagnostic::error()
2588                    .with_message("incompatible enum rows declaration")
2589                    .with_labels(mk_expr_label(pos))
2590                    .with_notes(vec![
2591                        note1,
2592                        note2,
2593                        format!("Could not match the two declarations of `{id}`"),
2594                    ])];
2595
2596                // We generate a diagnostic for the underlying error if any, but append a prefix to
2597                // the error message to make it clear that this is not a separate error but a more
2598                // precise description of why the unification of a row failed.
2599                if let Some(err) = cause {
2600                    diags.extend((*err).into_diagnostics(files).into_iter().map(|mut diag| {
2601                        diag.message = format!("while typing enum row `{id}`: {}", diag.message);
2602                        diag
2603                    }));
2604                }
2605
2606                diags
2607            }
2608            TypecheckErrorData::RecordRowConflict {
2609                row,
2610                expected,
2611                inferred,
2612                pos,
2613            } => {
2614                let mut diags = Vec::new();
2615
2616                diags.push(
2617                    Diagnostic::error()
2618                        .with_message("multiple record row declarations")
2619                        .with_labels(mk_expr_label(pos))
2620                        .with_notes(vec![
2621                            format!("Found an expression with the row `{row}`"),
2622                            format!(
2623                                "But this row appears inside another record type, \
2624                                which already has a diffent declaration for the field `{}`",
2625                                row.id
2626                            ),
2627                            String::from(
2628                                "A type cannot have two conflicting declarations for the same row",
2629                            ),
2630                        ]),
2631                );
2632
2633                diags.push(
2634                    Diagnostic::note()
2635                        .with_message("while matching types")
2636                        .with_notes(vec![
2637                            format!("Expected type {expected}"),
2638                            format!("With inferred type {inferred}"),
2639                        ]),
2640                );
2641
2642                diags
2643            }
2644            TypecheckErrorData::EnumRowConflict {
2645                row,
2646                expected,
2647                inferred,
2648                pos,
2649            } => {
2650                let mut diags = Vec::new();
2651
2652                diags.push(
2653                    Diagnostic::error()
2654                        .with_message("multiple enum row declarations")
2655                        .with_labels(mk_expr_label(pos))
2656                        .with_notes(vec![
2657                            format!("Found an expression with the row `{row}`"),
2658                            format!(
2659                                "But this row appears inside another enum type, \
2660                                which already has a diffent declaration for the tag `{}`",
2661                                row.id
2662                            ),
2663                            String::from(
2664                                "A type cannot have two conflicting declarations for the same row",
2665                            ),
2666                        ]),
2667                );
2668
2669                diags.push(
2670                    Diagnostic::note()
2671                        .with_message("while matching types")
2672                        .with_notes(vec![
2673                            format!("Expected type {expected}"),
2674                            format!("With inferred type {inferred}"),
2675                        ]),
2676                );
2677
2678                diags
2679            }
2680            TypecheckErrorData::ArrowTypeMismatch {
2681                expected,
2682                inferred,
2683                type_path,
2684                cause,
2685                pos,
2686            } => {
2687                let PathSpan {
2688                    span: expd_span, ..
2689                } = blame_error::path_span(files, type_path, &expected.to_mainline());
2690                let PathSpan {
2691                    span: actual_span, ..
2692                } = blame_error::path_span(files, type_path, &inferred.to_mainline());
2693
2694                let mut labels = vec![
2695                    secondary(&expd_span).with_message("this part of the expected type"),
2696                    secondary(&actual_span)
2697                        .with_message("does not match this part of the inferred type"),
2698                ];
2699                labels.extend(mk_expr_label(pos));
2700
2701                let mut diags = vec![Diagnostic::error()
2702                    .with_message("function types mismatch")
2703                    .with_labels(labels)
2704                    .with_notes(vec![
2705                        mk_expected_msg(expected),
2706                        mk_inferred_msg(inferred),
2707                        String::from("Could not match the two function types"),
2708                    ])];
2709
2710                // We generate a diagnostic for the underlying error, but append a prefix to the
2711                // error message to make it clear that this is not a separated error but a more
2712                // precise description of why the unification of the row failed.
2713                match &**cause {
2714                    // If the underlying error is a type mismatch, printing won't add any useful
2715                    // information, so we just ignore it.
2716                    TypecheckErrorData::TypeMismatch { .. } => (),
2717                    error => {
2718                        diags.extend(error.into_diagnostics(files).into_iter().map(|mut diag| {
2719                            diag.message =
2720                                format!("while matching function types: {}", diag.message);
2721                            diag
2722                        }));
2723                    }
2724                }
2725
2726                diags
2727            }
2728            TypecheckErrorData::ForallParametricityViolation {
2729                kind,
2730                tail,
2731                violating_type,
2732                pos,
2733            } => {
2734                let tail_kind = match kind {
2735                    VarKindDiscriminant::Type => "type",
2736                    VarKindDiscriminant::EnumRows => "enum tail",
2737                    VarKindDiscriminant::RecordRows => "record tail",
2738                };
2739                vec![Diagnostic::error()
2740                    .with_message(format!(
2741                        "values of type `{violating_type}` are not guaranteed to be compatible \
2742                        with polymorphic {tail_kind} `{tail}`"
2743                    ))
2744                    .with_labels(mk_expr_label(pos))
2745                    .with_notes(vec![
2746                        "Type variables introduced in a `forall` range over all possible types."
2747                            .to_owned(),
2748                    ])]
2749            }
2750            TypecheckErrorData::CtrTypeInTermPos { contract, pos } => {
2751                vec![Diagnostic::error()
2752                    .with_message(
2753                        "types containing user-defined contracts cannot be converted into contracts"
2754                    )
2755                    .with_labels(
2756                        pos.as_opt_ref()
2757                            .map(|span| {
2758                                primary(span).with_message("This type (in contract position)")
2759                            })
2760                            .into_iter()
2761                            .chain(contract.pos.as_opt_ref().map(|span| {
2762                                secondary(span).with_message("contains this user-defined contract")
2763                            }))
2764                            .collect(),
2765                    )]
2766            }
2767            TypecheckErrorData::VarLevelMismatch {
2768                type_var: constant,
2769                pos,
2770            } => {
2771                let mut labels = mk_expr_label(pos);
2772
2773                if let Some(span) = constant.pos.as_opt_ref() {
2774                    labels.push(secondary(span).with_message("this polymorphic type variable"));
2775                }
2776
2777                vec![Diagnostic::error()
2778                    .with_message("invalid polymorphic generalization".to_string())
2779                    .with_labels(labels)
2780                    .with_notes(vec![
2781                        "While the type of this expression is still undetermined, it appears \
2782                            indirectly in the type of another expression introduced before \
2783                            the `forall` block."
2784                            .into(),
2785                        format!(
2786                            "The type of this expression escapes the scope of the \
2787                                corresponding `forall` and can't be generalized to the \
2788                                polymorphic type variable `{constant}`"
2789                        ),
2790                    ])]
2791            }
2792            TypecheckErrorData::InhomogeneousRecord {
2793                pos,
2794                row_a: expected,
2795                row_b: inferred,
2796            } => {
2797                vec![Diagnostic::error()
2798                    .with_message("incompatible types")
2799                    .with_labels(mk_expr_label(pos))
2800                    .with_notes(vec![
2801                        "Expected a dictionary type".into(),
2802                        format!("Found a record with a field of type {expected} and a field of type {inferred}"),
2803                        "Records are compatible with dicts only if all their fields have the same type".into(),
2804                    ])]
2805            }
2806            TypecheckErrorData::OrPatternVarsMismatch { var, pos } => {
2807                let mut labels = vec![primary_alt(var.pos.into_opt(), var.into_label(), files)
2808                    .with_message("this variable must occur in all branches")];
2809
2810                if let Some(span) = pos.as_opt_ref() {
2811                    labels.push(secondary(span).with_message("in this or-pattern"));
2812                }
2813
2814                vec![Diagnostic::error()
2815                    .with_message("or-pattern variable mismatch".to_string())
2816                    .with_labels(labels)
2817                    .with_notes(vec![
2818                        "All branches of an or-pattern must bind exactly the same set of variables"
2819                            .into(),
2820                    ])]
2821            }
2822            // clone() here is unfortunate, but I haven't found a better way to interface typecheck
2823            // errors - which can generate a diagnostic by reference - from other errors (import
2824            // errors can themselves hide parsing errors), where `into_diagnostics` consume `self`
2825            // in the current implementation. Maybe we should migrate from `into_diagnostics` to
2826            // `to_diagnostic`, taking the error by reference, but this might cause more copying.
2827            TypecheckErrorData::ImportError(err) => err.clone().into_diagnostics(files),
2828        }
2829    }
2830}
2831
2832impl IntoDiagnostics for ImportError {
2833    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2834        match self {
2835            ImportError::IOError(path, error, span_opt) => {
2836                let labels = span_opt
2837                    .as_opt_ref()
2838                    .map(|span| vec![secondary(span).with_message("imported here")])
2839                    .unwrap_or_default();
2840
2841                vec![Diagnostic::error()
2842                    .with_message(format!("import of {path} failed: {error}"))
2843                    .with_labels(labels)]
2844            }
2845            ImportError::ParseErrors(error, span_opt) => {
2846                let mut diagnostic: Vec<Diagnostic<FileId>> = error
2847                    .errors
2848                    .into_iter()
2849                    .flat_map(|e| e.into_diagnostics(files))
2850                    .collect();
2851
2852                if let Some(span) = span_opt.as_opt_ref() {
2853                    diagnostic[0]
2854                        .labels
2855                        .push(secondary(span).with_message("imported here"));
2856                }
2857
2858                diagnostic
2859            }
2860            ImportError::MissingDependency {
2861                parent,
2862                missing,
2863                pos,
2864            } => {
2865                let labels = pos
2866                    .as_opt_ref()
2867                    .map(|span| vec![primary(span).with_message("imported here")])
2868                    .unwrap_or_default();
2869                let msg = if let Some(parent_path) = parent.as_deref() {
2870                    format!(
2871                        "unknown package {missing}, imported from package {}",
2872                        parent_path.display()
2873                    )
2874                } else {
2875                    format!("unknown package {missing}")
2876                };
2877
2878                vec![Diagnostic::error().with_message(msg).with_labels(labels)]
2879            }
2880            ImportError::NoPackageMap { pos } => {
2881                let labels = pos
2882                    .as_opt_ref()
2883                    .map(|span| vec![primary(span).with_message("imported here")])
2884                    .unwrap_or_default();
2885                vec![Diagnostic::error()
2886                    .with_message("tried to import from a package, but no package manifest found")
2887                    .with_labels(labels)
2888                    .with_notes(vec!["did you forget a --manifest-path argument?".to_owned()])]
2889            }
2890        }
2891    }
2892}
2893
2894impl IntoDiagnostics for ExportError {
2895    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2896        let mut notes = if !self.path.0.is_empty() {
2897            vec![format!("When exporting field `{}`", self.path)]
2898        } else {
2899            vec![]
2900        };
2901
2902        match self.data {
2903            ExportErrorData::NotAString(rt) => vec![Diagnostic::error()
2904                .with_message(format!(
2905                    "raw export expects a String value, but got {}",
2906                    rt.as_ref()
2907                        .type_of()
2908                        .unwrap_or_else(|| String::from("<unevaluated>"))
2909                ))
2910                .with_labels(vec![primary_term(&rt, files)])
2911                .with_notes(notes)],
2912            ExportErrorData::UnsupportedNull(format, rt) => vec![Diagnostic::error()
2913                .with_message(format!("{format} format doesn't support null values"))
2914                .with_labels(vec![primary_term(&rt, files)])
2915                .with_notes(notes)],
2916            ExportErrorData::NonSerializable(rt) => {
2917                notes.extend([
2918                    "Nickel only supports serializing to and from strings, booleans, numbers, \
2919                    enum tags, `null` (depending on the format), as well as records and arrays \
2920                    of serializable values."
2921                        .into(),
2922                    "Functions and special values (such as contract labels) aren't \
2923                    serializable."
2924                        .into(),
2925                    "If you want serialization to ignore a specific value, please use the \
2926                    `not_exported` metadata."
2927                        .into(),
2928                ]);
2929
2930                vec![Diagnostic::error()
2931                    .with_message("non serializable term")
2932                    .with_labels(vec![primary_term(&rt, files)])
2933                    .with_notes(notes)]
2934            }
2935            ExportErrorData::NoDocumentation(rt) => {
2936                notes.push("documentation can only be collected from a record.".to_owned());
2937
2938                vec![Diagnostic::error()
2939                    .with_message("no documentation found")
2940                    .with_labels(vec![primary_term(&rt, files)])
2941                    .with_notes(notes)]
2942            }
2943            ExportErrorData::NumberOutOfRange { term, value } => {
2944                notes.push(format!(
2945                    "Only numbers in the range {:e} to {:e} can be portably serialized",
2946                    f64::MIN,
2947                    f64::MAX
2948                ));
2949
2950                vec![Diagnostic::error()
2951                    .with_message(format!(
2952                        "The number {} is too large (in absolute value) to be serialized.",
2953                        value.to_sci()
2954                    ))
2955                    .with_labels(vec![primary_term(&term, files)])
2956                    .with_notes(notes)]
2957            }
2958            ExportErrorData::Other(msg) => {
2959                notes.push(msg);
2960
2961                vec![Diagnostic::error()
2962                    .with_message("serialization failed")
2963                    .with_notes(notes)]
2964            }
2965        }
2966    }
2967}
2968
2969impl IntoDiagnostics for IOError {
2970    fn into_diagnostics(self, _fil: &mut Files) -> Vec<Diagnostic<FileId>> {
2971        match self {
2972            IOError(msg) => vec![Diagnostic::error().with_message(msg)],
2973        }
2974    }
2975}
2976
2977impl IntoDiagnostics for ReplError {
2978    fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2979        match self {
2980            ReplError::UnknownCommand(s) => vec![Diagnostic::error()
2981                .with_message(format!("unknown command `{s}`"))
2982                .with_notes(vec![String::from(
2983                    "type `:?` or `:help` for a list of available commands.",
2984                )])],
2985            ReplError::InvalidQueryPath(err) => err.into_diagnostics(files),
2986            ReplError::MissingArg { cmd, msg_opt } => {
2987                let mut notes = msg_opt
2988                    .as_ref()
2989                    .map(|msg| vec![msg.clone()])
2990                    .unwrap_or_default();
2991                notes.push(format!(
2992                    "type `:? {cmd}` or `:help {cmd}` for more information."
2993                ));
2994
2995                vec![Diagnostic::error()
2996                    .with_message(format!("{cmd}: missing argument"))
2997                    .with_notes(notes)]
2998            }
2999        }
3000    }
3001}
3002
3003impl CloneTo for TypecheckErrorData<'_> {
3004    type Data<'ast> = TypecheckErrorData<'ast>;
3005
3006    fn clone_to<'to>(data: Self::Data<'_>, dest: &'to AstAlloc) -> Self::Data<'to> {
3007        match data {
3008            TypecheckErrorData::UnboundIdentifier(loc_ident) => {
3009                TypecheckErrorData::UnboundIdentifier(loc_ident)
3010            }
3011            TypecheckErrorData::MissingRow {
3012                id,
3013                expected,
3014                inferred,
3015                pos,
3016            } => TypecheckErrorData::MissingRow {
3017                id,
3018                expected: Type::clone_to(expected, dest),
3019                inferred: Type::clone_to(inferred, dest),
3020                pos,
3021            },
3022            TypecheckErrorData::MissingDynTail {
3023                expected,
3024                inferred,
3025                pos,
3026            } => TypecheckErrorData::MissingDynTail {
3027                expected: Type::clone_to(expected, dest),
3028                inferred: Type::clone_to(inferred, dest),
3029                pos,
3030            },
3031            TypecheckErrorData::ExtraRow {
3032                id,
3033                expected,
3034                inferred,
3035                pos,
3036            } => TypecheckErrorData::ExtraRow {
3037                id,
3038                expected: Type::clone_to(expected, dest),
3039                inferred: Type::clone_to(inferred, dest),
3040                pos,
3041            },
3042            TypecheckErrorData::ExtraDynTail {
3043                expected,
3044                inferred,
3045                pos,
3046            } => TypecheckErrorData::ExtraDynTail {
3047                expected: Type::clone_to(expected, dest),
3048                inferred: Type::clone_to(inferred, dest),
3049                pos,
3050            },
3051            TypecheckErrorData::ForallParametricityViolation {
3052                kind,
3053                tail,
3054                violating_type,
3055                pos,
3056            } => TypecheckErrorData::ForallParametricityViolation {
3057                kind,
3058                tail: Type::clone_to(tail, dest),
3059                violating_type: Type::clone_to(violating_type, dest),
3060                pos,
3061            },
3062            TypecheckErrorData::UnboundTypeVariable(loc_ident) => {
3063                TypecheckErrorData::UnboundTypeVariable(loc_ident)
3064            }
3065            TypecheckErrorData::TypeMismatch {
3066                expected,
3067                inferred,
3068                pos,
3069            } => TypecheckErrorData::TypeMismatch {
3070                expected: Type::clone_to(expected, dest),
3071                inferred: Type::clone_to(inferred, dest),
3072                pos,
3073            },
3074            TypecheckErrorData::RecordRowMismatch {
3075                id,
3076                expected,
3077                inferred,
3078                cause,
3079                pos,
3080            } => TypecheckErrorData::RecordRowMismatch {
3081                id,
3082                expected: Type::clone_to(expected, dest),
3083                inferred: Type::clone_to(inferred, dest),
3084                cause: Box::new(TypecheckErrorData::clone_to(*cause, dest)),
3085                pos,
3086            },
3087            TypecheckErrorData::EnumRowMismatch {
3088                id,
3089                expected,
3090                inferred,
3091                cause,
3092                pos,
3093            } => TypecheckErrorData::EnumRowMismatch {
3094                id,
3095                expected: Type::clone_to(expected, dest),
3096                inferred: Type::clone_to(inferred, dest),
3097                cause: cause.map(|cause| Box::new(TypecheckErrorData::clone_to(*cause, dest))),
3098                pos,
3099            },
3100            TypecheckErrorData::RecordRowConflict {
3101                row,
3102                expected,
3103                inferred,
3104                pos,
3105            } => TypecheckErrorData::RecordRowConflict {
3106                row: RecordRow::clone_to(row, dest),
3107                expected: Type::clone_to(expected, dest),
3108                inferred: Type::clone_to(inferred, dest),
3109                pos,
3110            },
3111            TypecheckErrorData::EnumRowConflict {
3112                row,
3113                expected,
3114                inferred,
3115                pos,
3116            } => TypecheckErrorData::EnumRowConflict {
3117                row: EnumRow::clone_to(row, dest),
3118                expected: Type::clone_to(expected, dest),
3119                inferred: Type::clone_to(inferred, dest),
3120                pos,
3121            },
3122            TypecheckErrorData::ArrowTypeMismatch {
3123                expected,
3124                inferred,
3125                type_path,
3126                cause,
3127                pos,
3128            } => TypecheckErrorData::ArrowTypeMismatch {
3129                expected: Type::clone_to(expected, dest),
3130                inferred: Type::clone_to(inferred, dest),
3131                type_path,
3132                cause: Box::new(TypecheckErrorData::clone_to(*cause, dest)),
3133                pos,
3134            },
3135            TypecheckErrorData::CtrTypeInTermPos { contract, pos } => {
3136                TypecheckErrorData::CtrTypeInTermPos {
3137                    contract: Ast::clone_to(contract, dest),
3138                    pos,
3139                }
3140            }
3141            TypecheckErrorData::VarLevelMismatch { type_var, pos } => {
3142                TypecheckErrorData::VarLevelMismatch { type_var, pos }
3143            }
3144            TypecheckErrorData::InhomogeneousRecord { row_a, row_b, pos } => {
3145                TypecheckErrorData::InhomogeneousRecord {
3146                    row_a: Type::clone_to(row_a, dest),
3147                    row_b: Type::clone_to(row_b, dest),
3148                    pos,
3149                }
3150            }
3151            TypecheckErrorData::OrPatternVarsMismatch { var, pos } => {
3152                TypecheckErrorData::OrPatternVarsMismatch { var, pos }
3153            }
3154            TypecheckErrorData::ImportError(import_error) => {
3155                TypecheckErrorData::ImportError(import_error)
3156            }
3157        }
3158    }
3159}