1use codespan::ByteIndex;
6pub use codespan_reporting::diagnostic::{Diagnostic, Label, LabelStyle};
7
8use codespan_reporting::files::Files as _;
9use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
10use lalrpop_util::ErrorRecovery;
11use malachite::base::num::conversion::traits::ToSci;
12
13use ouroboros::self_referencing;
14
15use crate::{
16 bytecode::ast::{
17 alloc::{AstAlloc, CloneTo},
18 compat::ToMainline as _,
19 typ::{EnumRow, RecordRow, Type},
20 Ast,
21 },
22 cache::InputFormat,
23 eval::callstack::CallStack,
24 files::{FileId, Files},
25 identifier::{Ident, LocIdent},
26 label::{
27 self,
28 ty_path::{self, PathSpan},
29 MergeKind, MergeLabel,
30 },
31 parser::{
32 self,
33 error::{InvalidRecordTypeError, LexicalError, ParseError as InternalParseError},
34 lexer::Token,
35 utils::mk_span,
36 },
37 position::{RawSpan, TermPos},
38 repl,
39 serialize::{ExportFormat, NickelPointer},
40 term::{pattern::Pattern, record::FieldMetadata, Number, RichTerm, Term},
41 typ::{TypeF, VarKindDiscriminant},
42};
43
44pub mod report;
45pub mod suggest;
46pub mod warning;
47
48pub use warning::Warning;
49
50pub trait Reporter<E> {
55 fn report(&mut self, e: E);
57
58 fn report_result<T, E2>(&mut self, result: Result<T, E2>)
63 where
64 Self: Sized,
65 E2: Into<E>,
66 {
67 if let Err(e) = result {
68 self.report(e.into());
69 }
70 }
71}
72
73impl<E, R: Reporter<E>> Reporter<E> for &mut R {
74 fn report(&mut self, e: E) {
75 R::report(*self, e)
76 }
77}
78
79pub struct Sink<E> {
81 pub errors: Vec<E>,
82}
83
84impl<E> Default for Sink<E> {
85 fn default() -> Self {
86 Sink { errors: Vec::new() }
87 }
88}
89
90impl<E> Reporter<E> for Sink<E> {
91 fn report(&mut self, e: E) {
92 self.errors.push(e);
93 }
94}
95
96pub struct NullReporter {}
98
99impl<E> Reporter<E> for NullReporter {
100 fn report(&mut self, _e: E) {}
101}
102
103#[derive(Debug, Clone, PartialEq)]
105pub enum Error {
106 EvalError(EvalError),
107 TypecheckError(TypecheckError),
108 ParseErrors(ParseErrors),
109 ImportError(ImportError),
110 ExportError(ExportError),
111 IOError(IOError),
112 ReplError(ReplError),
113}
114
115#[derive(Debug, Clone, PartialEq)]
117pub enum EvalError {
118 BlameError {
120 evaluated_arg: Option<RichTerm>,
123 label: label::Label,
125 call_stack: CallStack,
127 },
128 MissingFieldDef {
130 id: LocIdent,
131 metadata: FieldMetadata,
132 pos_record: TermPos,
133 pos_access: TermPos,
134 },
135 TypeError {
137 expected: String,
139 message: String,
141 orig_pos: TermPos,
143 term: RichTerm,
145 },
146 UnaryPrimopTypeError {
148 primop: String,
149 expected: String,
150 arg_pos: TermPos,
151 arg_evaluated: RichTerm,
152 },
153 NAryPrimopTypeError {
155 primop: String,
156 expected: String,
157 arg_number: usize,
158 arg_pos: TermPos,
159 arg_evaluated: RichTerm,
160 op_pos: TermPos,
161 },
162 ParseError(ParseError),
164 NotAFunc(
166 RichTerm,
167 RichTerm,
168 TermPos,
169 ),
170 FieldMissing {
173 id: LocIdent,
175 field_names: Vec<LocIdent>,
177 operator: String,
179 pos_record: TermPos,
181 pos_op: TermPos,
183 },
184 NotEnoughArgs(
186 usize,
187 String,
188 TermPos,
189 ),
190 MergeIncompatibleArgs {
193 left_arg: RichTerm,
195 right_arg: RichTerm,
197 merge_label: MergeLabel,
199 },
200 UnboundIdentifier(LocIdent, TermPos),
202 InfiniteRecursion(CallStack, TermPos),
204 SerializationError(ExportError),
206 DeserializationError(
208 String, String, TermPos, ),
212 DeserializationErrorWithInner {
217 format: InputFormat,
218 inner: ParseError,
219 pos: TermPos,
221 },
222 IllegalPolymorphicTailAccess {
224 action: IllegalPolymorphicTailAction,
225 evaluated_arg: Option<RichTerm>,
226 label: label::Label,
227 call_stack: CallStack,
228 },
229 IncomparableValues {
231 eq_pos: TermPos,
232 left: RichTerm,
233 right: RichTerm,
234 },
235 NonExhaustiveEnumMatch {
239 expected: Vec<LocIdent>,
241 found: RichTerm,
243 pos: TermPos,
245 },
246 NonExhaustiveMatch {
247 value: RichTerm,
249 pos: TermPos,
251 },
252 FailedDestructuring {
253 value: RichTerm,
255 pattern: Pattern,
257 },
258 QueryNonRecord {
260 pos: TermPos,
262 id: LocIdent,
264 value: RichTerm,
266 },
267 InternalError(String, TermPos),
269 Other(String, TermPos),
271}
272
273#[derive(Clone, Debug, Eq, PartialEq)]
274pub enum IllegalPolymorphicTailAction {
275 FieldAccess { field: String },
276 Map,
277 Merge,
278 FieldRemove { field: String },
279 Freeze,
280}
281
282impl IllegalPolymorphicTailAction {
283 fn message(&self) -> String {
284 use IllegalPolymorphicTailAction::*;
285
286 match self {
287 FieldAccess { field } => {
288 format!("cannot access field `{field}` sealed by a polymorphic contract")
289 }
290 Map => "cannot map over a record sealed by a polymorphic contract".to_owned(),
291 Merge => "cannot merge a record sealed by a polymorphic contract".to_owned(),
292 FieldRemove { field } => {
293 format!("cannot remove field `{field}` sealed by a polymorphic contract")
294 }
295 Freeze => "cannot freeze a record sealed by a polymorphic contract".to_owned(),
296 }
297 }
298}
299
300pub const UNKNOWN_SOURCE_NAME: &str = "<unknown> (generated by evaluation)";
301
302#[self_referencing(pub_extras)]
304#[derive(Debug)]
305pub struct TypecheckError {
306 alloc: AstAlloc,
308 #[borrows(alloc)]
310 #[covariant]
311 pub error: TypecheckErrorData<'this>,
312}
313
314impl Clone for TypecheckError {
315 fn clone(&self) -> Self {
316 TypecheckError::new(AstAlloc::new(), |alloc| {
317 TypecheckErrorData::clone_to(self.borrow_error().clone(), alloc)
320 })
321 }
322}
323
324impl PartialEq for TypecheckError {
325 fn eq(&self, other: &Self) -> bool {
326 self.borrow_error() == other.borrow_error()
327 }
328}
329
330#[derive(Debug, PartialEq, Clone)]
332pub enum TypecheckErrorData<'ast> {
333 UnboundIdentifier(LocIdent),
335 MissingRow {
337 id: LocIdent,
338 expected: Type<'ast>,
339 inferred: Type<'ast>,
340 pos: TermPos,
341 },
342 MissingDynTail {
344 expected: Type<'ast>,
345 inferred: Type<'ast>,
346 pos: TermPos,
347 },
348 ExtraRow {
350 id: LocIdent,
351 expected: Type<'ast>,
352 inferred: Type<'ast>,
353 pos: TermPos,
354 },
355 ExtraDynTail {
357 expected: Type<'ast>,
358 inferred: Type<'ast>,
359 pos: TermPos,
360 },
361 ForallParametricityViolation {
374 kind: VarKindDiscriminant,
375 tail: Type<'ast>,
376 violating_type: Type<'ast>,
377 pos: TermPos,
378 },
379 UnboundTypeVariable(LocIdent),
381 TypeMismatch {
384 expected: Type<'ast>,
385 inferred: Type<'ast>,
386 pos: TermPos,
387 },
388 RecordRowMismatch {
392 id: LocIdent,
393 expected: Type<'ast>,
394 inferred: Type<'ast>,
395 cause: Box<TypecheckErrorData<'ast>>,
396 pos: TermPos,
397 },
398 EnumRowMismatch {
400 id: LocIdent,
401 expected: Type<'ast>,
402 inferred: Type<'ast>,
403 cause: Option<Box<TypecheckErrorData<'ast>>>,
404 pos: TermPos,
405 },
406 RecordRowConflict {
421 row: RecordRow<'ast>,
424 expected: Type<'ast>,
425 inferred: Type<'ast>,
426 pos: TermPos,
427 },
428 EnumRowConflict {
430 row: EnumRow<'ast>,
433 expected: Type<'ast>,
434 inferred: Type<'ast>,
435 pos: TermPos,
436 },
437 ArrowTypeMismatch {
453 expected: Type<'ast>,
454 inferred: Type<'ast>,
455 type_path: ty_path::Path,
457 cause: Box<TypecheckErrorData<'ast>>,
458 pos: TermPos,
459 },
460 CtrTypeInTermPos {
469 contract: Ast<'ast>,
471 pos: TermPos,
473 },
474 VarLevelMismatch {
500 type_var: LocIdent,
503 pos: TermPos,
505 },
506 InhomogeneousRecord {
508 row_a: Type<'ast>,
510 row_b: Type<'ast>,
512 pos: TermPos,
514 },
515 OrPatternVarsMismatch {
520 var: LocIdent,
523 pos: TermPos,
525 },
526 ImportError(ImportError),
533}
534
535#[derive(Debug, PartialEq, Eq, Clone, Default)]
536pub struct ParseErrors {
537 pub errors: Vec<ParseError>,
538}
539
540impl ParseErrors {
541 pub fn new(errors: Vec<ParseError>) -> ParseErrors {
542 ParseErrors { errors }
543 }
544
545 pub fn errors(self) -> Option<Vec<ParseError>> {
546 if self.errors.is_empty() {
547 None
548 } else {
549 Some(self.errors)
550 }
551 }
552
553 pub fn no_errors(&self) -> bool {
554 self.errors.is_empty()
555 }
556
557 pub const fn none() -> ParseErrors {
558 ParseErrors { errors: Vec::new() }
559 }
560
561 pub fn from_recoverable(
562 errs: Vec<ErrorRecovery<usize, Token<'_>, parser::error::ParseError>>,
563 file_id: FileId,
564 ) -> Self {
565 ParseErrors {
566 errors: errs
567 .into_iter()
568 .map(|e| ParseError::from_lalrpop(e.error, file_id))
569 .collect(),
570 }
571 }
572}
573
574impl From<ParseError> for ParseErrors {
575 fn from(e: ParseError) -> ParseErrors {
576 ParseErrors { errors: vec![e] }
577 }
578}
579
580impl From<Vec<ParseError>> for ParseErrors {
581 fn from(errors: Vec<ParseError>) -> ParseErrors {
582 ParseErrors { errors }
583 }
584}
585
586impl IntoDiagnostics for ParseErrors {
587 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
588 self.errors
589 .into_iter()
590 .flat_map(|e| e.into_diagnostics(files))
591 .collect()
592 }
593}
594
595#[derive(Debug, PartialEq, Eq, Clone)]
597pub enum ParseError {
598 UnexpectedEOF(FileId, Vec<String>),
600 UnexpectedToken(
602 RawSpan,
603 Vec<String>,
604 ),
605 ExtraToken(RawSpan),
607 UnmatchedCloseBrace(RawSpan),
610 InvalidEscapeSequence(RawSpan),
612 InvalidAsciiEscapeCode(RawSpan),
614 StringDelimiterMismatch {
617 opening_delimiter: RawSpan,
618 closing_delimiter: RawSpan,
619 },
620 ExternalFormatError(
622 String, String, Option<RawSpan>,
625 ),
626 UnboundTypeVariables(Vec<LocIdent>),
628 InvalidRecordType {
633 record_span: RawSpan,
635 tail_span: Option<RawSpan>,
637 cause: InvalidRecordTypeError,
639 },
640 RecursiveLetPattern(RawSpan),
643 PatternInLetBlock(RawSpan),
645 TypeVariableKindMismatch { ty_var: LocIdent, span: RawSpan },
653 TypedFieldWithoutDefinition {
668 field_span: RawSpan,
670 annot_span: RawSpan,
672 },
673 InterpolationInStaticPath {
676 input: String,
677 path_elem_span: RawSpan,
678 },
679 DuplicateIdentInRecordPattern {
681 ident: LocIdent,
683 prev_ident: LocIdent,
685 },
686 DuplicateIdentInLetBlock {
688 ident: LocIdent,
690 prev_ident: LocIdent,
692 },
693 DisabledFeature { feature: String, span: RawSpan },
695 InvalidContract(RawSpan),
700 InvalidImportFormat { span: RawSpan },
702 SigilExprMissingColon(RawSpan),
704 UnknownSigilSelector { selector: String, span: RawSpan },
706 UnknownSigilAttribute {
708 selector: String,
709 attribute: String,
710 span: RawSpan,
711 },
712 MultipleFieldDecls {
715 ident: Ident,
717 include_span: RawSpan,
720 other_span: RawSpan,
723 },
724}
725
726#[derive(Debug, PartialEq, Eq, Clone)]
728pub enum ImportError {
729 IOError(
731 String,
732 String,
733 TermPos,
734 ),
735 ParseErrors(
737 ParseErrors,
738 TermPos,
739 ),
740 MissingDependency {
742 parent: Option<std::path::PathBuf>,
745 missing: Ident,
747 pos: TermPos,
748 },
749 NoPackageMap { pos: TermPos },
751}
752
753#[derive(Debug, PartialEq, Clone)]
754pub struct ExportError {
755 pub path: NickelPointer,
758 pub data: ExportErrorData,
760}
761
762#[derive(Debug, PartialEq, Clone)]
764pub enum ExportErrorData {
765 UnsupportedNull(ExportFormat, RichTerm),
767 NotAString(RichTerm),
769 NonSerializable(RichTerm),
771 NoDocumentation(RichTerm),
773 NumberOutOfRange {
775 term: RichTerm,
776 value: Number,
777 },
778 Other(String),
779}
780
781impl From<ExportErrorData> for ExportError {
782 fn from(data: ExportErrorData) -> ExportError {
783 ExportError {
784 path: NickelPointer::new(),
785 data,
786 }
787 }
788}
789
790#[derive(Debug, PartialEq, Eq, Clone)]
792pub struct IOError(pub String);
793
794#[derive(Debug, PartialEq, Eq, Clone)]
796pub enum ReplError {
797 UnknownCommand(String),
798 MissingArg {
799 cmd: repl::command::CommandType,
800 msg_opt: Option<String>,
801 },
802 InvalidQueryPath(ParseError),
803}
804
805impl From<EvalError> for Error {
806 fn from(error: EvalError) -> Error {
807 Error::EvalError(error)
808 }
809}
810
811impl From<ParseError> for Error {
812 fn from(error: ParseError) -> Error {
813 Error::ParseErrors(ParseErrors {
814 errors: vec![error],
815 })
816 }
817}
818
819impl From<ParseErrors> for Error {
820 fn from(errors: ParseErrors) -> Error {
821 Error::ParseErrors(errors)
822 }
823}
824
825impl From<TypecheckError> for Error {
826 fn from(error: TypecheckError) -> Error {
827 Error::TypecheckError(error)
828 }
829}
830
831impl From<ImportError> for Error {
832 fn from(error: ImportError) -> Error {
833 Error::ImportError(error)
834 }
835}
836
837impl From<ExportError> for Error {
838 fn from(error: ExportError) -> Error {
839 Error::ExportError(error)
840 }
841}
842
843impl From<IOError> for Error {
844 fn from(error: IOError) -> Error {
845 Error::IOError(error)
846 }
847}
848
849impl From<std::io::Error> for IOError {
850 fn from(error: std::io::Error) -> IOError {
851 IOError(error.to_string())
852 }
853}
854
855impl From<ExportError> for EvalError {
856 fn from(error: ExportError) -> EvalError {
857 EvalError::SerializationError(error)
858 }
859}
860
861impl From<ImportError> for TypecheckError {
862 fn from(error: ImportError) -> Self {
863 TypecheckError::new(AstAlloc::new(), |_alloc| {
864 TypecheckErrorData::ImportError(error)
865 })
866 }
867}
868
869pub fn escape(s: &str) -> String {
873 String::from_utf8(strip_ansi_escapes::strip(s))
874 .expect("escape(): converting from a string should give back a valid UTF8 string")
875}
876
877impl From<ReplError> for Error {
878 fn from(error: ReplError) -> Error {
879 Error::ReplError(error)
880 }
881}
882
883impl ParseError {
884 pub fn from_lalrpop<T>(
885 error: lalrpop_util::ParseError<usize, T, InternalParseError>,
886 file_id: FileId,
887 ) -> ParseError {
888 match error {
889 lalrpop_util::ParseError::InvalidToken { location } => {
890 ParseError::UnexpectedToken(mk_span(file_id, location, location + 1), Vec::new())
891 }
892 lalrpop_util::ParseError::UnrecognizedToken {
893 token: (start, _, end),
894 expected,
895 } => ParseError::UnexpectedToken(mk_span(file_id, start, end), expected),
896 lalrpop_util::ParseError::UnrecognizedEof { expected, .. } => {
897 ParseError::UnexpectedEOF(file_id, expected)
898 }
899 lalrpop_util::ParseError::ExtraToken {
900 token: (start, _, end),
901 } => ParseError::ExtraToken(mk_span(file_id, start, end)),
902 lalrpop_util::ParseError::User { error } => match error {
903 InternalParseError::Lexical(LexicalError::Generic(range)) => {
904 ParseError::UnexpectedToken(
905 mk_span(file_id, range.start, range.end),
906 Vec::new(),
907 )
908 }
909 InternalParseError::Lexical(LexicalError::UnmatchedCloseBrace(location)) => {
910 ParseError::UnmatchedCloseBrace(mk_span(file_id, location, location + 1))
911 }
912 InternalParseError::Lexical(LexicalError::InvalidEscapeSequence(location)) => {
913 ParseError::InvalidEscapeSequence(mk_span(file_id, location, location + 1))
914 }
915 InternalParseError::Lexical(LexicalError::InvalidAsciiEscapeCode(location)) => {
916 ParseError::InvalidAsciiEscapeCode(mk_span(file_id, location, location + 2))
917 }
918 InternalParseError::Lexical(LexicalError::StringDelimiterMismatch {
919 opening_delimiter,
920 closing_delimiter,
921 }) => ParseError::StringDelimiterMismatch {
922 opening_delimiter: mk_span(
923 file_id,
924 opening_delimiter.start,
925 opening_delimiter.end,
926 ),
927 closing_delimiter: mk_span(
928 file_id,
929 closing_delimiter.start,
930 closing_delimiter.end,
931 ),
932 },
933 InternalParseError::UnboundTypeVariables(idents) => {
934 ParseError::UnboundTypeVariables(idents)
935 }
936 InternalParseError::InvalidRecordType {
937 record_span,
938 tail_span,
939 cause,
940 } => ParseError::InvalidRecordType {
941 record_span,
942 tail_span,
943 cause,
944 },
945 InternalParseError::RecursiveLetPattern(pos) => {
946 ParseError::RecursiveLetPattern(pos)
947 }
948 InternalParseError::PatternInLetBlock(pos) => ParseError::PatternInLetBlock(pos),
949 InternalParseError::TypeVariableKindMismatch { ty_var, span } => {
950 ParseError::TypeVariableKindMismatch { ty_var, span }
951 }
952 InternalParseError::TypedFieldWithoutDefinition {
953 field_span,
954 annot_span,
955 } => ParseError::TypedFieldWithoutDefinition {
956 field_span,
957 annot_span,
958 },
959 InternalParseError::DuplicateIdentInRecordPattern { ident, prev_ident } => {
960 ParseError::DuplicateIdentInRecordPattern { ident, prev_ident }
961 }
962 InternalParseError::DuplicateIdentInLetBlock { ident, prev_ident } => {
963 ParseError::DuplicateIdentInLetBlock { ident, prev_ident }
964 }
965 InternalParseError::DisabledFeature { feature, span } => {
966 ParseError::DisabledFeature { feature, span }
967 }
968 InternalParseError::InterpolationInStaticPath { path_elem_span } => {
969 ParseError::InterpolationInStaticPath {
970 input: String::new(),
971 path_elem_span,
972 }
973 }
974 InternalParseError::InvalidContract(span) => ParseError::InvalidContract(span),
975 InternalParseError::InvalidImportFormat { span } => {
976 ParseError::InvalidImportFormat { span }
977 }
978 InternalParseError::MultipleFieldDecls {
979 ident,
980 include_span,
981 other_span,
982 } => ParseError::MultipleFieldDecls {
983 ident,
984 include_span,
985 other_span,
986 },
987 },
988 }
989 }
990
991 pub fn from_serde_json(error: serde_json::Error, file_id: FileId, files: &Files) -> Self {
992 use codespan::ByteOffset;
993
994 let line_span = if error.line() == 0 {
999 None
1000 } else {
1001 files.line_index(file_id, error.line() - 1).ok()
1002 };
1003
1004 let start =
1005 line_span.map(|ls| ByteIndex::from(((ls + error.column()) as u32).saturating_sub(1)));
1006 ParseError::ExternalFormatError(
1007 String::from("json"),
1008 error.to_string(),
1009 start.map(|start| RawSpan {
1010 src_id: file_id,
1011 start,
1012 end: start + ByteOffset::from(1),
1013 }),
1014 )
1015 }
1016
1017 pub fn from_yaml(error: saphyr_parser::ScanError, file_id: Option<FileId>) -> Self {
1018 use codespan::{ByteIndex, ByteOffset};
1019
1020 let start = ByteIndex::from(error.marker().index() as u32);
1021 ParseError::ExternalFormatError(
1022 String::from("yaml"),
1023 error.to_string(),
1024 file_id.map(|src_id| RawSpan {
1025 src_id,
1026 start,
1027 end: start + ByteOffset::from(1),
1028 }),
1029 )
1030 }
1031
1032 pub fn from_toml(error: toml_edit::TomlError, file_id: FileId) -> Self {
1033 use codespan::{ByteIndex, ByteOffset};
1034
1035 let span = error.span();
1036 ParseError::ExternalFormatError(
1037 String::from("toml"),
1038 error.to_string(),
1039 span.map(|span| RawSpan {
1040 src_id: file_id,
1041 start: ByteIndex::from(span.start as u32),
1042 end: ByteIndex(span.end as u32) + ByteOffset::from(1),
1043 }),
1044 )
1045 }
1046
1047 #[cfg(feature = "nix-experimental")]
1048 pub fn from_nix(error: &str, _file_id: FileId) -> Self {
1049 ParseError::ExternalFormatError(String::from("nix"), error.to_string(), None)
1051 }
1052}
1053
1054pub const INTERNAL_ERROR_MSG: &str =
1055 "This error should not happen. This is likely a bug in the Nickel interpreter. Please consider \
1056 reporting it at https://github.com/tweag/nickel/issues with the above error message.";
1057
1058pub trait IntoDiagnostics {
1060 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>>;
1076}
1077
1078impl IntoDiagnostics for Diagnostic<FileId> {
1080 fn into_diagnostics(self, _files: &mut Files) -> Vec<Diagnostic<FileId>> {
1081 vec![self]
1082 }
1083}
1084
1085fn primary(span: &RawSpan) -> Label<FileId> {
1089 Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1090}
1091
1092fn secondary(span: &RawSpan) -> Label<FileId> {
1094 Label::secondary(span.src_id, span.start.to_usize()..span.end.to_usize())
1095}
1096
1097fn label_alt(
1144 span_opt: Option<RawSpan>,
1145 alt_term: String,
1146 style: LabelStyle,
1147 files: &mut Files,
1148) -> Label<FileId> {
1149 match span_opt {
1150 Some(span) => Label::new(
1151 style,
1152 span.src_id,
1153 span.start.to_usize()..span.end.to_usize(),
1154 ),
1155 None => {
1156 let range = 0..alt_term.len();
1157 Label::new(style, files.add(UNKNOWN_SOURCE_NAME, alt_term), range)
1158 }
1159 }
1160}
1161
1162fn primary_alt(span_opt: Option<RawSpan>, alt_term: String, files: &mut Files) -> Label<FileId> {
1167 label_alt(span_opt, alt_term, LabelStyle::Primary, files)
1168}
1169
1170fn primary_term(term: &RichTerm, files: &mut Files) -> Label<FileId> {
1175 primary_alt(term.pos.into_opt(), term.to_string(), files)
1176}
1177
1178fn secondary_alt(span_opt: TermPos, alt_term: String, files: &mut Files) -> Label<FileId> {
1183 label_alt(span_opt.into_opt(), alt_term, LabelStyle::Secondary, files)
1184}
1185
1186fn secondary_term(term: &RichTerm, files: &mut Files) -> Label<FileId> {
1191 secondary_alt(term.pos, term.to_string(), files)
1192}
1193
1194fn cardinal(number: usize) -> String {
1195 let suffix = if number % 10 == 1 {
1196 "st"
1197 } else if number % 10 == 2 {
1198 "nd"
1199 } else if number % 10 == 3 {
1200 "rd"
1201 } else {
1202 "th"
1203 };
1204 format!("{number}{suffix}")
1205}
1206
1207impl IntoDiagnostics for Error {
1208 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
1209 match self {
1210 Error::ParseErrors(errs) => errs
1211 .errors
1212 .into_iter()
1213 .flat_map(|e| e.into_diagnostics(files))
1214 .collect(),
1215 Error::TypecheckError(err) => err.into_diagnostics(files),
1216 Error::EvalError(err) => err.into_diagnostics(files),
1217 Error::ImportError(err) => err.into_diagnostics(files),
1218 Error::ExportError(err) => err.into_diagnostics(files),
1219 Error::IOError(err) => err.into_diagnostics(files),
1220 Error::ReplError(err) => err.into_diagnostics(files),
1221 }
1222 }
1223}
1224
1225impl IntoDiagnostics for EvalError {
1226 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
1227 match self {
1228 EvalError::BlameError {
1229 evaluated_arg,
1230 label,
1231 call_stack,
1232 } => blame_error::blame_diagnostics(files, label, evaluated_arg, &call_stack, ""),
1233 EvalError::MissingFieldDef {
1234 id,
1235 metadata,
1236 pos_record,
1237 pos_access,
1238 } => {
1239 let mut labels = vec![];
1240
1241 if let Some(label) = metadata
1247 .annotation
1248 .first()
1249 .map(|labeled_ty| labeled_ty.label.clone())
1250 {
1251 if let Some(span) = label.field_name.and_then(|id| id.pos.into_opt()) {
1252 labels.push(primary(&span).with_message("required here"));
1253 }
1254
1255 if let Some(span) = pos_record.into_opt() {
1256 labels.push(secondary(&span).with_message("in this record"));
1257 }
1258
1259 } else {
1262 if let Some(span) = id.pos.into_opt() {
1263 labels.push(primary(&span).with_message("required here"));
1264 }
1265
1266 if let Some(span) = pos_record.into_opt() {
1267 labels.push(secondary(&span).with_message("in this record"));
1268 }
1269
1270 if let Some(span) = pos_access.into_opt() {
1271 labels.push(secondary(&span).with_message("accessed here"));
1272 }
1273 }
1274
1275 let diags = vec![Diagnostic::error()
1276 .with_message(format!("missing definition for `{id}`",))
1277 .with_labels(labels)];
1278
1279 diags
1280 }
1281 EvalError::TypeError {
1282 expected,
1283 message,
1284 orig_pos,
1285 term: t,
1286 } => {
1287 let label = format!(
1288 "this expression has type {}, but {} was expected",
1289 t.term
1290 .type_of()
1291 .unwrap_or_else(|| String::from("<unevaluated>")),
1292 expected,
1293 );
1294
1295 let labels = match (orig_pos.into_opt(), t.pos.into_opt()) {
1296 (Some(span_orig), Some(span_t)) if span_orig == span_t => {
1297 vec![primary(&span_orig).with_message(label)]
1298 }
1299 (Some(span_orig), Some(t_pos)) if !files.is_stdlib(t_pos.src_id) => {
1300 vec![
1301 primary(&span_orig).with_message(label),
1302 secondary_term(&t, files).with_message("evaluated to this"),
1303 ]
1304 }
1305 (Some(span), _) => {
1306 vec![primary(&span).with_message(label)]
1307 }
1308 (None, Some(span)) => {
1309 vec![primary(&span).with_message(label)]
1310 }
1311 (None, None) => {
1312 vec![primary_term(&t, files).with_message(label)]
1313 }
1314 };
1315
1316 vec![Diagnostic::error()
1317 .with_message("dynamic type error")
1318 .with_labels(labels)
1319 .with_notes(vec![message])]
1320 }
1321 EvalError::ParseError(parse_error) => parse_error.into_diagnostics(files),
1322 EvalError::NotAFunc(t, arg, pos_opt) => vec![Diagnostic::error()
1323 .with_message("not a function")
1324 .with_labels(vec![
1325 primary_term(&t, files)
1326 .with_message("this term is applied, but it is not a function"),
1327 secondary_alt(pos_opt, format!("({t}) ({arg})"), files)
1328 .with_message("applied here"),
1329 ])],
1330 EvalError::FieldMissing {
1331 id: name,
1332 field_names,
1333 operator,
1334 pos_record,
1335 pos_op,
1336 } => {
1337 let mut labels = Vec::new();
1338 let mut notes = Vec::new();
1339 let field = escape(name.as_ref());
1340
1341 if let Some(span) = pos_op.into_opt() {
1342 labels.push(
1343 Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1344 .with_message(format!("this requires the field `{field}` to exist")),
1345 );
1346 } else {
1347 notes.push(format!(
1348 "The field `{field}` was required by the operator {operator}"
1349 ));
1350 }
1351
1352 if let Some(span) = pos_record.as_opt_ref() {
1353 labels.push(
1354 secondary(span)
1355 .with_message(format!("this record lacks the field `{field}`")),
1356 );
1357 }
1358
1359 suggest::add_suggestion(&mut notes, &field_names, &name);
1360
1361 vec![Diagnostic::error()
1362 .with_message(format!("missing field `{field}`"))
1363 .with_labels(labels)
1364 .with_notes(notes)]
1365 }
1366 EvalError::NotEnoughArgs(count, op, span_opt) => {
1367 let mut labels = Vec::new();
1368 let mut notes = Vec::new();
1369 let msg = format!("{op} expects {count} arguments, but not enough were provided");
1370
1371 if let Some(span) = span_opt.into_opt() {
1372 labels.push(
1373 Label::primary(span.src_id, span.start.to_usize()..span.end.to_usize())
1374 .with_message(msg),
1375 );
1376 } else {
1377 notes.push(msg);
1378 }
1379
1380 vec![Diagnostic::error()
1381 .with_message("not enough arguments")
1382 .with_labels(labels)
1383 .with_notes(notes)]
1384 }
1385 EvalError::MergeIncompatibleArgs {
1386 left_arg,
1387 right_arg,
1388 merge_label,
1389 } => {
1390 let mut labels = vec![
1391 primary_term(&left_arg, files).with_message("cannot merge this expression"),
1392 primary_term(&right_arg, files).with_message("with this expression"),
1393 ];
1394
1395 let span_label = match merge_label.kind {
1396 MergeKind::Standard => "originally merged here",
1399 MergeKind::PiecewiseDef => "when combining the definitions of this field",
1403 };
1404
1405 if let Some(merge_label_span) = &merge_label.span {
1406 labels.push(secondary(merge_label_span).with_message(span_label));
1407 }
1408
1409 fn push_merge_note(notes: &mut Vec<String>, typ: &str) {
1410 notes.push(format!(
1411 "Both values are of type {typ} but they aren't equal."
1412 ));
1413 notes.push(format!("{typ} values can only be merged if they are equal"));
1414 }
1415
1416 let mut notes = vec![
1417 "Merge operands have the same merge priority but they can't \
1418 be combined."
1419 .to_owned(),
1420 ];
1421
1422 if let (Some(left_ty), Some(right_ty)) =
1423 (right_arg.as_ref().type_of(), left_arg.as_ref().type_of())
1424 {
1425 match left_ty.as_str() {
1426 _ if left_ty != right_ty => {
1427 notes.push(format!(
1428 "One value is of type {left_ty} \
1429 while the other is of type {right_ty}"
1430 ));
1431 notes.push("Values of different types can't be merged".to_owned());
1432 }
1433 "String" | "Number" | "Bool" | "Array" | "EnumTag" => {
1434 push_merge_note(&mut notes, &left_ty);
1435 }
1436 "Function" | "MatchExpression" => {
1437 notes.push(
1438 "Both values are functions (or match expressions)".to_owned(),
1439 );
1440 notes.push(
1441 "Functions can never be merged with anything else, \
1442 even another function."
1443 .to_owned(),
1444 );
1445 }
1446 "EnumVariant" => {
1447 if let (
1448 Term::EnumVariant { tag: tag1, .. },
1449 Term::EnumVariant { tag: tag2, .. },
1450 ) = (right_arg.as_ref(), left_arg.as_ref())
1451 {
1452 notes.push(format!(
1456 "Both values are enum variants, \
1457 but their tags differ (`'{tag1}` vs `'{tag2}`)"
1458 ));
1459 notes.push(
1460 "Enum variants can only be \
1461 merged if they have the same tag"
1462 .to_owned(),
1463 );
1464 } else {
1465 debug_assert!(false);
1468
1469 notes.push(
1470 "Primitive values (Number, String, EnumTag and Bool) \
1471 and arrays can only be merged if they are equal"
1472 .to_owned(),
1473 );
1474 notes.push("Enum variants must have the same tag.".to_owned());
1475 notes.push("Functions can never be merged.".to_owned());
1476 }
1477 }
1478 _ => {
1479 notes.push(
1481 "Primitive values (Number, String, EnumTag and Bool) \
1482 and arrays can only be merged if they are equal"
1483 .to_owned(),
1484 );
1485 notes.push("Enum variants must have the same tag.".to_owned());
1486 notes.push("Functions can never be merged.".to_owned());
1487 }
1488 }
1489 }
1490
1491 vec![Diagnostic::error()
1492 .with_message("non mergeable terms")
1493 .with_labels(labels)
1494 .with_notes(notes)]
1495 }
1496 EvalError::UnboundIdentifier(ident, span_opt) => vec![Diagnostic::error()
1497 .with_message(format!("unbound identifier `{ident}`"))
1498 .with_labels(vec![primary_alt(
1499 span_opt.into_opt(),
1500 ident.to_string(),
1501 files,
1502 )
1503 .with_message("this identifier is unbound")])],
1504 EvalError::InfiniteRecursion(_call_stack, span_opt) => {
1505 let labels = span_opt
1506 .as_opt_ref()
1507 .map(|span| vec![primary(span).with_message("recursive reference")])
1508 .unwrap_or_default();
1509
1510 vec![Diagnostic::error()
1511 .with_message("infinite recursion")
1512 .with_labels(labels)]
1513 }
1514 EvalError::Other(msg, span_opt) => {
1515 let labels = span_opt
1516 .as_opt_ref()
1517 .map(|span| vec![primary(span).with_message("here")])
1518 .unwrap_or_default();
1519
1520 vec![Diagnostic::error().with_message(msg).with_labels(labels)]
1521 }
1522 EvalError::InternalError(msg, span_opt) => {
1523 let labels = span_opt
1524 .as_opt_ref()
1525 .map(|span| vec![primary(span).with_message("here")])
1526 .unwrap_or_default();
1527
1528 vec![Diagnostic::error()
1529 .with_message(format!("internal error: {msg}"))
1530 .with_labels(labels)
1531 .with_notes(vec![String::from(INTERNAL_ERROR_MSG)])]
1532 }
1533 EvalError::SerializationError(err) => err.into_diagnostics(files),
1534 EvalError::DeserializationError(format, msg, span_opt) => {
1535 let labels = span_opt
1536 .as_opt_ref()
1537 .map(|span| vec![primary(span).with_message("here")])
1538 .unwrap_or_default();
1539
1540 vec![Diagnostic::error()
1541 .with_message(format!("{format} parse error: {msg}"))
1542 .with_labels(labels)]
1543 }
1544 EvalError::DeserializationErrorWithInner { format, inner, pos } => {
1545 let mut diags = inner.into_diagnostics(files);
1546 if let Some(diag) = diags.first_mut() {
1547 if let Some(span) = pos.as_opt_ref() {
1548 diag.labels
1549 .push(secondary(span).with_message("deserialized here"));
1550 }
1551 diag.notes.push(format!("while parsing {format}"));
1552 }
1553 diags
1554 }
1555 EvalError::IncomparableValues {
1556 eq_pos,
1557 left,
1558 right,
1559 } => {
1560 let mut labels = Vec::new();
1561
1562 if let Some(span) = eq_pos.as_opt_ref() {
1563 labels.push(primary(span).with_message("in this equality comparison"));
1564 }
1565
1566 let mut push_label = |prefix: &str, term: &RichTerm| -> String {
1569 let type_of = term
1570 .term
1571 .type_of()
1572 .unwrap_or_else(|| String::from("<unevaluated>"));
1573
1574 labels.push(
1575 secondary_term(term, files)
1576 .with_message(format!("{prefix} argument has type {type_of}")),
1577 );
1578
1579 type_of
1580 };
1581
1582 let left_type = push_label("left", &left);
1583 let right_type = push_label("right", &right);
1584
1585 vec![Diagnostic::error()
1586 .with_message("cannot compare values for equality")
1587 .with_labels(labels)
1588 .with_notes(vec![format!(
1589 "A {left_type} can't be meaningfully compared with a {right_type}"
1590 )])]
1591 }
1592 EvalError::NonExhaustiveEnumMatch {
1593 expected,
1594 found,
1595 pos,
1596 } => {
1597 let tag_list = expected
1598 .into_iter()
1599 .map(|tag| {
1600 RichTerm::from(Term::Enum(tag)).to_string()
1602 })
1603 .collect::<Vec<_>>()
1604 .join(", ");
1605
1606 let mut labels = Vec::new();
1607
1608 if let Some(span) = pos.into_opt() {
1609 labels.push(primary(&span).with_message("in this match expression"));
1610 }
1611
1612 labels.push(
1613 secondary_term(&found, files)
1614 .with_message("this value doesn't match any branch"),
1615 );
1616
1617 vec![Diagnostic::error()
1618 .with_message("unmatched pattern")
1619 .with_labels(labels)
1620 .with_notes(vec![
1621 format!("This match expression isn't exhaustive, matching only the following pattern(s): `{tag_list}`"),
1622 "But it has been applied to an argument which doesn't match any of those patterns".to_owned(),
1623 ])]
1624 }
1625 EvalError::NonExhaustiveMatch { value, pos } => {
1626 let mut labels = Vec::new();
1627
1628 if let Some(span) = pos.into_opt() {
1629 labels.push(primary(&span).with_message("in this match expression"));
1630 }
1631
1632 labels.push(
1633 secondary_term(&value, files)
1634 .with_message("this value doesn't match any branch"),
1635 );
1636
1637 vec![Diagnostic::error()
1638 .with_message("unmatched pattern")
1639 .with_labels(labels)]
1640 }
1641 EvalError::FailedDestructuring { value, pattern } => {
1642 let mut labels = Vec::new();
1643
1644 if let Some(span) = pattern.pos.into_opt() {
1645 labels.push(primary(&span).with_message("this pattern"));
1646 }
1647
1648 labels
1649 .push(secondary_term(&value, files).with_message("this value failed to match"));
1650
1651 vec![Diagnostic::error()
1652 .with_message("destructuring failed")
1653 .with_labels(labels)]
1654 }
1655 EvalError::IllegalPolymorphicTailAccess {
1656 action,
1657 label: contract_label,
1658 evaluated_arg,
1659 call_stack,
1660 } => blame_error::blame_diagnostics(
1661 files,
1662 contract_label,
1663 evaluated_arg,
1664 &call_stack,
1665 &format!(": {}", &action.message()),
1666 ),
1667 EvalError::UnaryPrimopTypeError {
1668 primop,
1669 expected,
1670 arg_pos,
1671 arg_evaluated,
1672 } => EvalError::TypeError {
1673 message: format!("{primop} expects its argument to be a {expected}"),
1674 expected,
1675 orig_pos: arg_pos,
1676 term: arg_evaluated,
1677 }
1678 .into_diagnostics(files),
1679 EvalError::NAryPrimopTypeError {
1680 primop,
1681 expected,
1682 arg_number,
1683 arg_pos,
1684 arg_evaluated,
1685 op_pos,
1686 } => {
1687 let minus_pos = if primop == "(-)"
1699 && arg_number == 1
1700 && arg_evaluated.term.type_of().as_deref() == Some("Function")
1701 {
1702 op_pos.into_opt()
1703 } else {
1704 None
1705 };
1706
1707 let diags = EvalError::TypeError {
1708 message: format!(
1709 "{primop} expects its {} argument to be a {expected}",
1710 cardinal(arg_number)
1711 ),
1712 expected,
1713 orig_pos: arg_pos,
1714 term: arg_evaluated,
1715 }
1716 .into_diagnostics(files);
1717
1718 if let Some(minus_pos) = minus_pos {
1719 let label = secondary(&minus_pos)
1720 .with_message("this expression was parsed as a binary subtraction");
1721 diags
1722 .into_iter()
1723 .map(|d| {
1724 d.with_label(label.clone())
1725 .with_note(
1726 "for unary negation, add parentheses: write `(-42)` instead of `-42`",
1727 )
1728 })
1729 .collect()
1730 } else {
1731 diags
1732 }
1733 }
1734 EvalError::QueryNonRecord { pos, id, value } => {
1735 let label = format!(
1736 "tried to query field `{}`, but the expression has type {}",
1737 id,
1738 value
1739 .term
1740 .type_of()
1741 .unwrap_or_else(|| String::from("<unevaluated>")),
1742 );
1743
1744 let label = if let Some(span) = pos.into_opt() {
1745 primary(&span).with_message(label)
1746 } else {
1747 primary_term(&value, files).with_message(label)
1748 };
1749
1750 vec![Diagnostic::error()
1751 .with_message("tried to query field of a non-record")
1752 .with_labels(vec![label])]
1753 }
1754 }
1755 }
1756}
1757
1758mod blame_error {
1760 use codespan_reporting::diagnostic::{Diagnostic, Label};
1761
1762 use crate::{
1763 eval::callstack::CallStack,
1764 files::{FileId, Files},
1765 label::{
1766 self,
1767 ty_path::{self, PathSpan},
1768 Polarity,
1769 },
1770 position::TermPos,
1771 term::RichTerm,
1772 typ::Type,
1773 };
1774
1775 use super::{primary, secondary, secondary_term};
1776
1777 pub fn title(l: &label::Label) -> String {
1780 if ty_path::has_no_arrow(&l.path) {
1781 assert_eq!(l.polarity, Polarity::Positive);
1784 match l.field_name {
1785 Some(ident) => format!("contract broken by the value of `{ident}`"),
1786 None => "contract broken by a value".to_owned(),
1787 }
1788 } else if l.polarity == Polarity::Positive {
1789 match l.field_name {
1790 Some(ident) => format!("contract broken by the function `{ident}`"),
1791 None => "contract broken by a function".to_owned(),
1792 }
1793 } else {
1794 match l.field_name {
1795 Some(ident) => format!("contract broken by the caller of `{ident}`"),
1796 None => "contract broken by the caller".to_owned(),
1797 }
1798 }
1799 }
1800
1801 pub fn build_diagnostic_labels(
1803 evaluated_arg: Option<RichTerm>,
1804 blame_label: &label::Label,
1805 path_label: Label<FileId>,
1806 files: &mut Files,
1807 ) -> Vec<Label<FileId>> {
1808 let mut labels = vec![path_label];
1809
1810 if let Some(ref arg_pos) = blame_label.arg_pos.into_opt() {
1811 if !files.is_stdlib(arg_pos.src_id) {
1817 labels.push(primary(arg_pos).with_message("applied to this expression"));
1818 }
1819 }
1820
1821 if let Some(mut evaluated_arg) = evaluated_arg {
1825 match (evaluated_arg.pos, blame_label.arg_pos.as_opt_ref()) {
1826 (TermPos::Original(val_pos), _) if files.is_stdlib(val_pos.src_id) => {
1829 evaluated_arg.pos = TermPos::None;
1830 labels.push(
1831 secondary_term(&evaluated_arg, files)
1832 .with_message("evaluated to this value"),
1833 );
1834 }
1835 (TermPos::Original(ref val_pos), Some(arg_pos)) if val_pos == arg_pos => {}
1838 (TermPos::Original(ref val_pos), _) => {
1839 labels.push(secondary(val_pos).with_message("evaluated to this expression"))
1840 }
1841 (TermPos::Inherited(ref val_pos), Some(arg_pos)) if val_pos == arg_pos => {
1845 evaluated_arg.pos = TermPos::None;
1846 labels.push(
1847 secondary_term(&evaluated_arg, files)
1848 .with_message("evaluated to this value"),
1849 );
1850 }
1851 (TermPos::Inherited(ref val_pos), _) => {
1854 if !files.is_stdlib(val_pos.src_id) {
1855 labels
1856 .push(secondary(val_pos).with_message("evaluated to this expression"));
1857 }
1858
1859 evaluated_arg.pos = TermPos::None;
1860 labels.push(
1861 secondary_term(&evaluated_arg, files)
1862 .with_message("evaluated to this value"),
1863 );
1864 }
1865 (TermPos::None, _) => labels.push(
1866 secondary_term(&evaluated_arg, files).with_message("evaluated to this value"),
1867 ),
1868 }
1869 }
1870
1871 labels
1872 }
1873
1874 pub trait ExtendWithCallStack {
1875 fn extend_with_call_stack(&mut self, files: &Files, call_stack: &CallStack);
1876 }
1877
1878 impl ExtendWithCallStack for Vec<Diagnostic<FileId>> {
1879 fn extend_with_call_stack(&mut self, files: &Files, call_stack: &CallStack) {
1880 let (calls, curr_call) = call_stack.group_by_calls(files);
1881 let diag_curr_call = curr_call.map(|cdescr| {
1882 let name = cdescr
1883 .head
1884 .map(|ident| ident.to_string())
1885 .unwrap_or_else(|| String::from("<func>"));
1886 Diagnostic::note().with_labels(vec![
1887 primary(&cdescr.span).with_message(format!("While calling to {name}"))
1888 ])
1889 });
1890 let diags =
1891 calls.into_iter().enumerate().map(|(i, cdescr)| {
1892 let name = cdescr
1893 .head
1894 .map(|ident| ident.to_string())
1895 .unwrap_or_else(|| String::from("<func>"));
1896 Diagnostic::note().with_labels(vec![secondary(&cdescr.span)
1897 .with_message(format!("({}) calling {}", i + 1, name))])
1898 });
1899
1900 self.extend(diag_curr_call);
1901 self.extend(diags);
1902 }
1903 }
1904
1905 pub fn path_span(files: &mut Files, path: &[ty_path::Elem], ty: &Type) -> PathSpan {
1910 use crate::parser::{grammar::FixedTypeParser, lexer::Lexer, ErrorTolerantParserCompat};
1911
1912 ty_path::span(path.iter().peekable(), ty)
1913 .or_else(|| {
1914 let type_pprinted = format!("{ty}");
1915 let file_id = files.add(super::UNKNOWN_SOURCE_NAME, type_pprinted.clone());
1916
1917 let (ty_with_pos, _) = FixedTypeParser::new()
1918 .parse_tolerant_compat(file_id, Lexer::new(&type_pprinted))
1919 .unwrap();
1920
1921 ty_path::span(path.iter().peekable(), &ty_with_pos)
1922 })
1923 .expect(
1924 "path_span: we pretty-printed and parsed again the type of a label, \
1925 so it must have all of its position defined, but `ty_path::span` returned `None`",
1926 )
1927 }
1928
1929 pub fn report_ty_path(files: &mut Files, l: &label::Label) -> Label<FileId> {
1932 let PathSpan {
1933 span,
1934 last,
1935 last_arrow_elem,
1936 } = path_span(files, &l.path, &l.typ);
1937
1938 let msg = match (last, last_arrow_elem) {
1939 (Some(ty_path::Elem::Array), None) => "expected array element type",
1942 (Some(ty_path::Elem::Dict), None) => "expected dictionary field type",
1945 (Some(ty_path::Elem::Field(_)), None) => "expected field type",
1948 (Some(_), Some(ty_path::Elem::Codomain)) if ty_path::has_no_dom(&l.path) => {
1952 "expected return type"
1953 }
1954 (Some(_), Some(ty_path::Elem::Domain)) if l.polarity == Polarity::Positive => {
1958 "expected type of an argument of an inner call"
1959 }
1960 (Some(_), Some(ty_path::Elem::Codomain)) if l.polarity == Polarity::Positive => {
1965 "expected return type of a sub-function passed as an argument of an inner call"
1966 }
1967 (Some(_), Some(ty_path::Elem::Domain)) => {
1971 "expected type of the argument provided by the caller"
1972 }
1973 (Some(_), Some(ty_path::Elem::Codomain)) => {
1977 "expected return type of a function provided by the caller"
1978 }
1979 (None, Some(_)) => panic!(
1981 "blame error reporting: inconsistent path analysis, last_elem\
1982is None but last_arrow_elem is Some"
1983 ),
1984 _ => "expected type",
1985 };
1986
1987 secondary(&span).with_message(msg.to_owned())
1988 }
1989
1990 pub fn blame_diagnostics(
1999 files: &mut Files,
2000 mut label: label::Label,
2001 evaluated_arg: Option<RichTerm>,
2002 call_stack: &CallStack,
2003 msg_addendum: &str,
2004 ) -> Vec<Diagnostic<FileId>> {
2005 use std::fmt::Write;
2006
2007 let mut diagnostics = Vec::new();
2008
2009 let mut contract_diagnostics = std::mem::take(&mut label.diagnostics)
2013 .into_iter()
2014 .rev()
2015 .filter(|diag| !label::ContractDiagnostic::is_empty(diag));
2016 let head_contract_diagnostic = contract_diagnostics.next();
2017
2018 let new_msg_block = "\n ";
2023 let mut msg = title(&label);
2024
2025 if !msg_addendum.is_empty() {
2026 write!(&mut msg, "{new_msg_block}{msg_addendum}").unwrap();
2028 }
2029
2030 if let Some(contract_msg) = head_contract_diagnostic
2031 .as_ref()
2032 .and_then(|diag| diag.message.as_ref())
2033 {
2034 write!(&mut msg, "{new_msg_block}{}", &super::escape(contract_msg)).unwrap();
2036 }
2037
2038 let contract_notes = head_contract_diagnostic
2039 .map(|diag| diag.notes)
2040 .unwrap_or_default();
2041 let path_label = report_ty_path(files, &label);
2042
2043 let labels = build_diagnostic_labels(evaluated_arg, &label, path_label, files);
2044
2045 if !contract_notes.is_empty() {
2049 diagnostics.push(
2050 Diagnostic::error()
2051 .with_message(msg)
2052 .with_labels(labels)
2053 .with_notes(contract_notes),
2054 );
2055 } else {
2056 diagnostics.push(Diagnostic::error().with_message(msg).with_labels(labels));
2057 }
2058
2059 for ctr_diag in contract_diagnostics {
2060 let mut msg = String::from("from a parent contract violation");
2061
2062 if let Some(msg_contract) = ctr_diag.message {
2063 msg.push_str(": ");
2064 msg.push_str(&super::escape(&msg_contract));
2065 }
2066
2067 diagnostics.push(
2068 Diagnostic::note()
2069 .with_message(msg)
2070 .with_notes(ctr_diag.notes),
2071 );
2072 }
2073
2074 if !ty_path::has_no_dom(&label.path) {
2075 diagnostics.extend_with_call_stack(files, call_stack);
2076 }
2077
2078 diagnostics
2079 }
2080}
2081
2082impl IntoDiagnostics for ParseError {
2083 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2084 let diagnostic = match self {
2085 ParseError::UnexpectedEOF(file_id, _expected) => {
2086 let end = files.source_span(file_id).end;
2087 Diagnostic::error()
2088 .with_message(format!(
2089 "unexpected end of file when parsing {}",
2090 files.name(file_id).to_string_lossy()
2091 ))
2092 .with_labels(vec![primary(&RawSpan {
2093 start: end,
2094 end,
2095 src_id: file_id,
2096 })])
2097 }
2098 ParseError::UnexpectedToken(span, _expected) => Diagnostic::error()
2099 .with_message("unexpected token")
2100 .with_labels(vec![primary(&span)]),
2101 ParseError::ExtraToken(span) => Diagnostic::error()
2102 .with_message("superfluous unexpected token")
2103 .with_labels(vec![primary(&span)]),
2104 ParseError::UnmatchedCloseBrace(span) => Diagnostic::error()
2105 .with_message("unmatched closing brace \'}\'")
2106 .with_labels(vec![primary(&span)]),
2107 ParseError::InvalidEscapeSequence(span) => Diagnostic::error()
2108 .with_message("invalid escape sequence")
2109 .with_labels(vec![primary(&span)]),
2110 ParseError::InvalidAsciiEscapeCode(span) => Diagnostic::error()
2111 .with_message("invalid ascii escape code")
2112 .with_labels(vec![primary(&span)]),
2113 ParseError::StringDelimiterMismatch {
2114 opening_delimiter,
2115 closing_delimiter,
2116 } => Diagnostic::error()
2117 .with_message("string closing delimiter has too many `%`")
2118 .with_labels(vec![
2119 primary(&closing_delimiter).with_message("the closing delimiter"),
2120 secondary(&opening_delimiter).with_message("the opening delimiter"),
2121 ])
2122 .with_notes(vec![
2123 "A special string must be opened and closed with the same number of `%` \
2124 in the corresponding delimiters."
2125 .into(),
2126 "Try removing the superflous `%` in the closing delimiter".into(),
2127 ]),
2128 ParseError::ExternalFormatError(format, msg, span_opt) => {
2129 let labels = span_opt
2130 .as_ref()
2131 .map(|span| vec![primary(span)])
2132 .unwrap_or_default();
2133
2134 Diagnostic::error()
2135 .with_message(format!("{format} parse error: {msg}"))
2136 .with_labels(labels)
2137 }
2138 ParseError::UnboundTypeVariables(idents) => Diagnostic::error()
2139 .with_message(format!(
2140 "unbound type variable(s): {}",
2141 idents
2142 .iter()
2143 .map(|x| format!("`{x}`"))
2144 .collect::<Vec<_>>()
2145 .join(",")
2146 ))
2147 .with_labels(
2148 idents
2149 .into_iter()
2150 .filter_map(|id| id.pos.into_opt())
2151 .map(|span| primary(&span).with_message("this identifier is unbound"))
2152 .collect(),
2153 ),
2154 ParseError::InvalidRecordType {
2155 record_span,
2156 tail_span,
2157 cause,
2158 } => {
2159 let mut labels: Vec<_> = std::iter::once(primary(&record_span))
2160 .chain(cause.labels())
2161 .collect();
2162 let mut notes: Vec<_> = std::iter::once(
2163 "A record type is a literal composed only of type annotations, of the \
2164 form `<field>: <type>`."
2165 .into(),
2166 )
2167 .chain(cause.notes())
2168 .collect();
2169
2170 if let Some(tail_span) = tail_span {
2171 labels.push(secondary(&tail_span).with_message("tail"));
2172 notes.push(
2173 "This literal was interpreted as a record type because it has a \
2174 polymorphic tail; record values cannot have tails."
2175 .into(),
2176 );
2177 } else {
2178 notes.push(
2179 "This literal was interpreted as a record type because it has \
2180 fields with type annotations but no value definitions; to make \
2181 this a record value, assign values to its fields."
2182 .into(),
2183 );
2184 };
2185 Diagnostic::error()
2186 .with_message("invalid record literal")
2187 .with_labels(labels)
2188 .with_notes(notes)
2189 }
2190 ParseError::RecursiveLetPattern(span) => Diagnostic::error()
2191 .with_message("recursive destructuring is not supported")
2192 .with_labels(vec![primary(&span)])
2193 .with_notes(vec![
2194 "A destructuring let-binding can't be recursive. Try removing the `rec` \
2195 from `let rec`."
2196 .into(),
2197 "You can reference other fields of a record recursively \
2198 from within a field, so you might not need the recursive let."
2199 .into(),
2200 ]),
2201 ParseError::PatternInLetBlock(span) => Diagnostic::error()
2202 .with_message("destructuring patterns are not currently permitted in let blocks")
2203 .with_labels(vec![primary(&span)])
2204 .with_notes(vec!["Try re-writing your let block as nested `let ... in` expressions.".into()]),
2205 ParseError::TypeVariableKindMismatch { ty_var, span } => Diagnostic::error()
2206 .with_message(format!(
2207 "the type variable `{ty_var}` is used in conflicting ways"
2208 ))
2209 .with_labels(vec![primary(&span)])
2210 .with_notes(vec![
2211 "Type variables may be used either as types, polymorphic record tails, \
2212 or polymorphic enum tails."
2213 .into(),
2214 "Using the same type variable as more than one category at the same time \
2215 is forbidden."
2216 .into(),
2217 ]),
2218 ParseError::TypedFieldWithoutDefinition {
2219 field_span,
2220 annot_span,
2221 } => Diagnostic::error()
2222 .with_message("statically typed field without a definition")
2223 .with_labels(vec![
2224 primary(&field_span).with_message("this field doesn't have a definition"),
2225 secondary(&annot_span).with_message("but it has a type annotation"),
2226 ])
2227 .with_notes(vec![
2228 "A static type annotation must be attached to an expression but \
2229 this field doesn't have a definition."
2230 .into(),
2231 "Did you mean to use `|` instead of `:`, for example when defining a \
2232 record contract?"
2233 .into(),
2234 "Typed fields without definitions are only allowed inside \
2235 record types, but the enclosing record literal doesn't qualify as a \
2236 record type. Please refer to the manual for the defining conditions of a \
2237 record type."
2238 .into(),
2239 ]),
2240 ParseError::InterpolationInStaticPath {
2241 input: _,
2242 path_elem_span,
2243 } => Diagnostic::error()
2244 .with_message("string interpolation is forbidden within a query")
2245 .with_labels(vec![primary(&path_elem_span)])
2246 .with_notes(vec![
2247 "Field paths don't support string interpolation when querying \
2248 metadata."
2249 .into(),
2250 "Only identifiers and simple string literals are allowed.".into(),
2251 ]),
2252 ParseError::DuplicateIdentInRecordPattern { ident, prev_ident } => Diagnostic::error()
2253 .with_message(format!(
2254 "duplicated binding `{}` in record pattern",
2255 ident.label()
2256 ))
2257 .with_labels(vec![
2258 secondary(&prev_ident.pos.unwrap()).with_message("previous binding here"),
2259 primary(&ident.pos.unwrap()).with_message("duplicated binding here"),
2260 ]),
2261 ParseError::DuplicateIdentInLetBlock { ident, prev_ident } => Diagnostic::error()
2262 .with_message(format!(
2263 "duplicated binding `{}` in let block",
2264 ident.label()
2265 ))
2266 .with_labels(vec![
2267 secondary(&prev_ident.pos.unwrap()).with_message("previous binding here"),
2268 primary(&ident.pos.unwrap()).with_message("duplicated binding here"),
2269 ]),
2270 ParseError::DisabledFeature { feature, span } => Diagnostic::error()
2271 .with_message("interpreter compiled without required features")
2272 .with_labels(vec![primary(&span).with_message(format!(
2273 "this syntax is only supported with the `{feature}` feature enabled"
2274 ))])
2275 .with_notes(vec![format!(
2276 "Recompile nickel with `--features {}`",
2277 feature
2278 )]),
2279 ParseError::InvalidContract(span) => Diagnostic::error()
2280 .with_message("invalid contract expression")
2281 .with_labels(vec![primary(&span).with_message("this can't be used as a contract")])
2282 .with_notes(vec![
2283 "This expression is used as a contract as part of an annotation or a type expression."
2284 .to_owned(),
2285 "Only functions and records might be valid contracts".to_owned(),
2286 ]),
2287 ParseError::InvalidImportFormat{span} => Diagnostic::error()
2288 .with_message("unknown import format tag")
2289 .with_labels(vec![primary(&span)])
2290 .with_notes(vec![
2291 "Examples of valid format tags: 'Nickel, 'Json, 'Yaml, 'Toml, 'Text"
2292 .to_owned()
2293 ]),
2294 ParseError::UnknownSigilSelector { selector, span } => {
2295 Diagnostic::error()
2296 .with_message(format!("unknown sigil selector `{selector}`"))
2297 .with_labels(vec![primary(&span)])
2298 .with_note("Available selectors are currently: `env`")
2299 }
2300 ParseError::UnknownSigilAttribute { selector, attribute, span } => {
2301 Diagnostic::error()
2302 .with_message(format!("unknown sigil attribute `{attribute}`"))
2303 .with_labels(vec![primary(&span).with_message(format!("unknown attribute for sigil selector `{selector}`"))])
2304 .with_note(available_sigil_attrs_note(&selector))
2305 }
2306 ParseError::SigilExprMissingColon(span) => {
2307 Diagnostic::error()
2308 .with_message("missing sigil expression separator `:`")
2309 .with_labels(vec![primary(&span)])
2310 .with_notes(vec![
2311 "The CLI sigil expression syntax is `@<selector>:<argument>` or `@<selector>/<attribute>:<argument>`".to_owned(),
2312 "The provided sigil expression is missing the `:` separator.".to_owned(),
2313 ])
2314 }
2315 ParseError::MultipleFieldDecls { ident, include_span, other_span } => Diagnostic::error()
2316 .with_message(format!(
2317 "multiple declarations for included field `{ident}`",
2318 ))
2319 .with_labels(vec![
2320 primary(&include_span).with_message("included here"),
2321 secondary(&other_span).with_message("but also declared here"),
2322 ])
2323 .with_notes(vec![
2324 "Piecewise definitions involving an included field are currently not supported".to_owned()
2325 ]),
2326 };
2327
2328 vec![diagnostic]
2329 }
2330}
2331
2332fn available_sigil_attrs_note(selector: &str) -> String {
2335 format!("No attributes are available for sigil selector `{selector}`. Use the selector directly as in `@{selector}:<argument>`")
2336}
2337
2338impl IntoDiagnostics for TypecheckError {
2339 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2340 self.borrow_error().into_diagnostics(files)
2341 }
2342}
2343
2344impl<'ast> IntoDiagnostics for &'_ TypecheckErrorData<'ast> {
2345 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2346 fn mk_expr_label(span_opt: &TermPos) -> Vec<Label<FileId>> {
2347 span_opt
2348 .as_opt_ref()
2349 .map(|span| vec![primary(span).with_message("this expression")])
2350 .unwrap_or_default()
2351 }
2352
2353 fn mk_expected_msg<T: std::fmt::Display>(expected: &T) -> String {
2354 format!("Expected an expression of type `{expected}`")
2355 }
2356
2357 fn mk_inferred_msg<T: std::fmt::Display>(inferred: &T) -> String {
2358 format!("Found an expression of type `{inferred}`")
2359 }
2360
2361 match self {
2362 TypecheckErrorData::UnboundIdentifier(id) =>
2363 {
2365 EvalError::UnboundIdentifier(*id, id.pos).into_diagnostics(files)
2366 }
2367 TypecheckErrorData::MissingRow {
2368 id,
2369 expected,
2370 inferred,
2371 pos,
2372 } => vec![Diagnostic::error()
2373 .with_message(format!("type error: missing row `{id}`"))
2374 .with_labels(mk_expr_label(pos))
2375 .with_notes(vec![
2376 format!(
2377 "{}, which contains the field `{id}`",
2378 mk_expected_msg(expected)
2379 ),
2380 format!(
2381 "{}, which does not contain the field `{id}`",
2382 mk_inferred_msg(inferred)
2383 ),
2384 ])],
2385 TypecheckErrorData::MissingDynTail {
2386 expected,
2387 inferred,
2388 pos,
2389 } => vec![Diagnostic::error()
2390 .with_message(String::from("type error: missing dynamic tail `; Dyn`"))
2391 .with_labels(mk_expr_label(pos))
2392 .with_notes(vec![
2393 format!(
2394 "{}, which contains the tail `; Dyn`",
2395 mk_expected_msg(expected)
2396 ),
2397 format!(
2398 "{}, which does not contain the tail `; Dyn`",
2399 mk_inferred_msg(inferred)
2400 ),
2401 ])],
2402 TypecheckErrorData::ExtraRow {
2403 id,
2404 expected,
2405 inferred,
2406 pos,
2407 } => vec![Diagnostic::error()
2408 .with_message(format!("type error: extra row `{id}`"))
2409 .with_labels(mk_expr_label(pos))
2410 .with_notes(vec![
2411 format!(
2412 "{}, which does not contain the field `{id}`",
2413 mk_expected_msg(expected)
2414 ),
2415 format!(
2416 "{}, which contains the extra field `{id}`",
2417 mk_inferred_msg(inferred)
2418 ),
2419 ])],
2420 TypecheckErrorData::ExtraDynTail {
2421 expected,
2422 inferred,
2423 pos,
2424 } => vec![Diagnostic::error()
2425 .with_message(String::from("type error: extra dynamic tail `; Dyn`"))
2426 .with_labels(mk_expr_label(pos))
2427 .with_notes(vec![
2428 format!(
2429 "{}, which does not contain the tail `; Dyn`",
2430 mk_expected_msg(expected)
2431 ),
2432 format!(
2433 "{}, which contains the extra tail `; Dyn`",
2434 mk_inferred_msg(inferred)
2435 ),
2436 ])],
2437 TypecheckErrorData::UnboundTypeVariable(ident) => vec![Diagnostic::error()
2438 .with_message(format!("unbound type variable `{ident}`"))
2439 .with_labels(vec![primary_alt(
2440 ident.pos.into_opt(),
2441 ident.to_string(),
2442 files,
2443 )
2444 .with_message("this type variable is unbound")])
2445 .with_notes(vec![format!(
2446 "Did you forget to put a `forall {ident}.` somewhere in the enclosing type?"
2447 )])],
2448 TypecheckErrorData::TypeMismatch {
2449 expected,
2450 inferred,
2451 pos,
2452 } => {
2453 fn addendum<'ast>(ty: &Type<'ast>) -> &'static str {
2454 if ty.typ.is_contract() {
2455 " (a contract)"
2456 } else {
2457 ""
2458 }
2459 }
2460 let last_note = if expected.typ.is_contract() ^ inferred.typ.is_contract() {
2461 "Static types and contracts are not compatible"
2462 } else {
2463 "These types are not compatible"
2464 };
2465
2466 vec![Diagnostic::error()
2467 .with_message("incompatible types")
2468 .with_labels(mk_expr_label(pos))
2469 .with_notes(vec![
2470 format!("{}{}", mk_expected_msg(expected), addendum(expected),),
2471 format!("{}{}", mk_inferred_msg(inferred), addendum(inferred),),
2472 String::from(last_note),
2473 ])]
2474 }
2475 TypecheckErrorData::RecordRowMismatch {
2476 id,
2477 expected,
2478 inferred,
2479 cause: ref err,
2480 pos,
2481 } => {
2482 let mut err = err;
2483 let mut path = vec![id.ident()];
2489
2490 while let TypecheckErrorData::RecordRowMismatch {
2491 id: id_next,
2492 cause: next,
2493 ..
2494 } = &**err
2495 {
2496 path.push(id_next.ident());
2497 err = next;
2498 }
2499
2500 let path_str: Vec<String> = path
2501 .clone()
2502 .into_iter()
2503 .map(|ident| format!("{ident}"))
2504 .collect();
2505 let field = path_str.join(".");
2506
2507 let mk_expected_row_msg = |field, ty| {
2508 format!("Expected an expression of a record type with the row `{field}: {ty}`")
2509 };
2510 let mk_inferred_row_msg = |field, ty| {
2511 format!("Found an expression of a record type with the row `{field}: {ty}`")
2512 };
2513
2514 let note1 = if let TypeF::Record(rrows) = &expected.typ {
2517 match rrows.find_path(path.as_slice()) {
2518 Some(row) => mk_expected_row_msg(&field, row.typ),
2519 None => mk_expected_msg(&expected),
2520 }
2521 } else {
2522 mk_expected_msg(&expected)
2523 };
2524
2525 let note2 = if let TypeF::Record(rrows) = &inferred.typ {
2526 match rrows.find_path(path.as_slice()) {
2527 Some(row) => mk_inferred_row_msg(&field, row.typ),
2528 None => mk_inferred_msg(&inferred),
2529 }
2530 } else {
2531 mk_inferred_msg(inferred)
2532 };
2533
2534 let mut diags = vec![Diagnostic::error()
2535 .with_message("incompatible record rows declaration")
2536 .with_labels(mk_expr_label(pos))
2537 .with_notes(vec![
2538 note1,
2539 note2,
2540 format!("Could not match the two declarations of `{field}`"),
2541 ])];
2542
2543 diags.extend(err.into_diagnostics(files).into_iter().map(|mut diag| {
2547 diag.message = format!("while typing field `{}`: {}", field, diag.message);
2548 diag
2549 }));
2550 diags
2551 }
2552 TypecheckErrorData::EnumRowMismatch {
2553 id,
2554 expected,
2555 inferred,
2556 cause,
2557 pos,
2558 } => {
2559 let mk_expected_row_msg = |row| {
2560 format!("Expected an expression of an enum type with the enum row `{row}`")
2561 };
2562 let mk_inferred_row_msg =
2563 |row| format!("Found an expression of an enum type with the enum row `{row}`");
2564
2565 let note1 = if let TypeF::Enum(erows) = &expected.typ {
2568 if let Some(row) = erows.find_row(id.ident()) {
2569 mk_expected_row_msg(row)
2570 } else {
2571 mk_expected_msg(expected)
2572 }
2573 } else {
2574 mk_expected_msg(expected)
2575 };
2576
2577 let note2 = if let TypeF::Enum(erows) = &inferred.typ {
2578 if let Some(row) = erows.find_row(id.ident()) {
2579 mk_inferred_row_msg(row)
2580 } else {
2581 mk_inferred_msg(expected)
2582 }
2583 } else {
2584 mk_inferred_msg(inferred)
2585 };
2586
2587 let mut diags = vec![Diagnostic::error()
2588 .with_message("incompatible enum rows declaration")
2589 .with_labels(mk_expr_label(pos))
2590 .with_notes(vec![
2591 note1,
2592 note2,
2593 format!("Could not match the two declarations of `{id}`"),
2594 ])];
2595
2596 if let Some(err) = cause {
2600 diags.extend((*err).into_diagnostics(files).into_iter().map(|mut diag| {
2601 diag.message = format!("while typing enum row `{id}`: {}", diag.message);
2602 diag
2603 }));
2604 }
2605
2606 diags
2607 }
2608 TypecheckErrorData::RecordRowConflict {
2609 row,
2610 expected,
2611 inferred,
2612 pos,
2613 } => {
2614 let mut diags = Vec::new();
2615
2616 diags.push(
2617 Diagnostic::error()
2618 .with_message("multiple record row declarations")
2619 .with_labels(mk_expr_label(pos))
2620 .with_notes(vec![
2621 format!("Found an expression with the row `{row}`"),
2622 format!(
2623 "But this row appears inside another record type, \
2624 which already has a diffent declaration for the field `{}`",
2625 row.id
2626 ),
2627 String::from(
2628 "A type cannot have two conflicting declarations for the same row",
2629 ),
2630 ]),
2631 );
2632
2633 diags.push(
2634 Diagnostic::note()
2635 .with_message("while matching types")
2636 .with_notes(vec![
2637 format!("Expected type {expected}"),
2638 format!("With inferred type {inferred}"),
2639 ]),
2640 );
2641
2642 diags
2643 }
2644 TypecheckErrorData::EnumRowConflict {
2645 row,
2646 expected,
2647 inferred,
2648 pos,
2649 } => {
2650 let mut diags = Vec::new();
2651
2652 diags.push(
2653 Diagnostic::error()
2654 .with_message("multiple enum row declarations")
2655 .with_labels(mk_expr_label(pos))
2656 .with_notes(vec![
2657 format!("Found an expression with the row `{row}`"),
2658 format!(
2659 "But this row appears inside another enum type, \
2660 which already has a diffent declaration for the tag `{}`",
2661 row.id
2662 ),
2663 String::from(
2664 "A type cannot have two conflicting declarations for the same row",
2665 ),
2666 ]),
2667 );
2668
2669 diags.push(
2670 Diagnostic::note()
2671 .with_message("while matching types")
2672 .with_notes(vec![
2673 format!("Expected type {expected}"),
2674 format!("With inferred type {inferred}"),
2675 ]),
2676 );
2677
2678 diags
2679 }
2680 TypecheckErrorData::ArrowTypeMismatch {
2681 expected,
2682 inferred,
2683 type_path,
2684 cause,
2685 pos,
2686 } => {
2687 let PathSpan {
2688 span: expd_span, ..
2689 } = blame_error::path_span(files, type_path, &expected.to_mainline());
2690 let PathSpan {
2691 span: actual_span, ..
2692 } = blame_error::path_span(files, type_path, &inferred.to_mainline());
2693
2694 let mut labels = vec![
2695 secondary(&expd_span).with_message("this part of the expected type"),
2696 secondary(&actual_span)
2697 .with_message("does not match this part of the inferred type"),
2698 ];
2699 labels.extend(mk_expr_label(pos));
2700
2701 let mut diags = vec![Diagnostic::error()
2702 .with_message("function types mismatch")
2703 .with_labels(labels)
2704 .with_notes(vec![
2705 mk_expected_msg(expected),
2706 mk_inferred_msg(inferred),
2707 String::from("Could not match the two function types"),
2708 ])];
2709
2710 match &**cause {
2714 TypecheckErrorData::TypeMismatch { .. } => (),
2717 error => {
2718 diags.extend(error.into_diagnostics(files).into_iter().map(|mut diag| {
2719 diag.message =
2720 format!("while matching function types: {}", diag.message);
2721 diag
2722 }));
2723 }
2724 }
2725
2726 diags
2727 }
2728 TypecheckErrorData::ForallParametricityViolation {
2729 kind,
2730 tail,
2731 violating_type,
2732 pos,
2733 } => {
2734 let tail_kind = match kind {
2735 VarKindDiscriminant::Type => "type",
2736 VarKindDiscriminant::EnumRows => "enum tail",
2737 VarKindDiscriminant::RecordRows => "record tail",
2738 };
2739 vec![Diagnostic::error()
2740 .with_message(format!(
2741 "values of type `{violating_type}` are not guaranteed to be compatible \
2742 with polymorphic {tail_kind} `{tail}`"
2743 ))
2744 .with_labels(mk_expr_label(pos))
2745 .with_notes(vec![
2746 "Type variables introduced in a `forall` range over all possible types."
2747 .to_owned(),
2748 ])]
2749 }
2750 TypecheckErrorData::CtrTypeInTermPos { contract, pos } => {
2751 vec![Diagnostic::error()
2752 .with_message(
2753 "types containing user-defined contracts cannot be converted into contracts"
2754 )
2755 .with_labels(
2756 pos.as_opt_ref()
2757 .map(|span| {
2758 primary(span).with_message("This type (in contract position)")
2759 })
2760 .into_iter()
2761 .chain(contract.pos.as_opt_ref().map(|span| {
2762 secondary(span).with_message("contains this user-defined contract")
2763 }))
2764 .collect(),
2765 )]
2766 }
2767 TypecheckErrorData::VarLevelMismatch {
2768 type_var: constant,
2769 pos,
2770 } => {
2771 let mut labels = mk_expr_label(pos);
2772
2773 if let Some(span) = constant.pos.as_opt_ref() {
2774 labels.push(secondary(span).with_message("this polymorphic type variable"));
2775 }
2776
2777 vec![Diagnostic::error()
2778 .with_message("invalid polymorphic generalization".to_string())
2779 .with_labels(labels)
2780 .with_notes(vec![
2781 "While the type of this expression is still undetermined, it appears \
2782 indirectly in the type of another expression introduced before \
2783 the `forall` block."
2784 .into(),
2785 format!(
2786 "The type of this expression escapes the scope of the \
2787 corresponding `forall` and can't be generalized to the \
2788 polymorphic type variable `{constant}`"
2789 ),
2790 ])]
2791 }
2792 TypecheckErrorData::InhomogeneousRecord {
2793 pos,
2794 row_a: expected,
2795 row_b: inferred,
2796 } => {
2797 vec![Diagnostic::error()
2798 .with_message("incompatible types")
2799 .with_labels(mk_expr_label(pos))
2800 .with_notes(vec![
2801 "Expected a dictionary type".into(),
2802 format!("Found a record with a field of type {expected} and a field of type {inferred}"),
2803 "Records are compatible with dicts only if all their fields have the same type".into(),
2804 ])]
2805 }
2806 TypecheckErrorData::OrPatternVarsMismatch { var, pos } => {
2807 let mut labels = vec![primary_alt(var.pos.into_opt(), var.into_label(), files)
2808 .with_message("this variable must occur in all branches")];
2809
2810 if let Some(span) = pos.as_opt_ref() {
2811 labels.push(secondary(span).with_message("in this or-pattern"));
2812 }
2813
2814 vec![Diagnostic::error()
2815 .with_message("or-pattern variable mismatch".to_string())
2816 .with_labels(labels)
2817 .with_notes(vec![
2818 "All branches of an or-pattern must bind exactly the same set of variables"
2819 .into(),
2820 ])]
2821 }
2822 TypecheckErrorData::ImportError(err) => err.clone().into_diagnostics(files),
2828 }
2829 }
2830}
2831
2832impl IntoDiagnostics for ImportError {
2833 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2834 match self {
2835 ImportError::IOError(path, error, span_opt) => {
2836 let labels = span_opt
2837 .as_opt_ref()
2838 .map(|span| vec![secondary(span).with_message("imported here")])
2839 .unwrap_or_default();
2840
2841 vec![Diagnostic::error()
2842 .with_message(format!("import of {path} failed: {error}"))
2843 .with_labels(labels)]
2844 }
2845 ImportError::ParseErrors(error, span_opt) => {
2846 let mut diagnostic: Vec<Diagnostic<FileId>> = error
2847 .errors
2848 .into_iter()
2849 .flat_map(|e| e.into_diagnostics(files))
2850 .collect();
2851
2852 if let Some(span) = span_opt.as_opt_ref() {
2853 diagnostic[0]
2854 .labels
2855 .push(secondary(span).with_message("imported here"));
2856 }
2857
2858 diagnostic
2859 }
2860 ImportError::MissingDependency {
2861 parent,
2862 missing,
2863 pos,
2864 } => {
2865 let labels = pos
2866 .as_opt_ref()
2867 .map(|span| vec![primary(span).with_message("imported here")])
2868 .unwrap_or_default();
2869 let msg = if let Some(parent_path) = parent.as_deref() {
2870 format!(
2871 "unknown package {missing}, imported from package {}",
2872 parent_path.display()
2873 )
2874 } else {
2875 format!("unknown package {missing}")
2876 };
2877
2878 vec![Diagnostic::error().with_message(msg).with_labels(labels)]
2879 }
2880 ImportError::NoPackageMap { pos } => {
2881 let labels = pos
2882 .as_opt_ref()
2883 .map(|span| vec![primary(span).with_message("imported here")])
2884 .unwrap_or_default();
2885 vec![Diagnostic::error()
2886 .with_message("tried to import from a package, but no package manifest found")
2887 .with_labels(labels)
2888 .with_notes(vec!["did you forget a --manifest-path argument?".to_owned()])]
2889 }
2890 }
2891 }
2892}
2893
2894impl IntoDiagnostics for ExportError {
2895 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2896 let mut notes = if !self.path.0.is_empty() {
2897 vec![format!("When exporting field `{}`", self.path)]
2898 } else {
2899 vec![]
2900 };
2901
2902 match self.data {
2903 ExportErrorData::NotAString(rt) => vec![Diagnostic::error()
2904 .with_message(format!(
2905 "raw export expects a String value, but got {}",
2906 rt.as_ref()
2907 .type_of()
2908 .unwrap_or_else(|| String::from("<unevaluated>"))
2909 ))
2910 .with_labels(vec![primary_term(&rt, files)])
2911 .with_notes(notes)],
2912 ExportErrorData::UnsupportedNull(format, rt) => vec![Diagnostic::error()
2913 .with_message(format!("{format} format doesn't support null values"))
2914 .with_labels(vec![primary_term(&rt, files)])
2915 .with_notes(notes)],
2916 ExportErrorData::NonSerializable(rt) => {
2917 notes.extend([
2918 "Nickel only supports serializing to and from strings, booleans, numbers, \
2919 enum tags, `null` (depending on the format), as well as records and arrays \
2920 of serializable values."
2921 .into(),
2922 "Functions and special values (such as contract labels) aren't \
2923 serializable."
2924 .into(),
2925 "If you want serialization to ignore a specific value, please use the \
2926 `not_exported` metadata."
2927 .into(),
2928 ]);
2929
2930 vec![Diagnostic::error()
2931 .with_message("non serializable term")
2932 .with_labels(vec![primary_term(&rt, files)])
2933 .with_notes(notes)]
2934 }
2935 ExportErrorData::NoDocumentation(rt) => {
2936 notes.push("documentation can only be collected from a record.".to_owned());
2937
2938 vec![Diagnostic::error()
2939 .with_message("no documentation found")
2940 .with_labels(vec![primary_term(&rt, files)])
2941 .with_notes(notes)]
2942 }
2943 ExportErrorData::NumberOutOfRange { term, value } => {
2944 notes.push(format!(
2945 "Only numbers in the range {:e} to {:e} can be portably serialized",
2946 f64::MIN,
2947 f64::MAX
2948 ));
2949
2950 vec![Diagnostic::error()
2951 .with_message(format!(
2952 "The number {} is too large (in absolute value) to be serialized.",
2953 value.to_sci()
2954 ))
2955 .with_labels(vec![primary_term(&term, files)])
2956 .with_notes(notes)]
2957 }
2958 ExportErrorData::Other(msg) => {
2959 notes.push(msg);
2960
2961 vec![Diagnostic::error()
2962 .with_message("serialization failed")
2963 .with_notes(notes)]
2964 }
2965 }
2966 }
2967}
2968
2969impl IntoDiagnostics for IOError {
2970 fn into_diagnostics(self, _fil: &mut Files) -> Vec<Diagnostic<FileId>> {
2971 match self {
2972 IOError(msg) => vec![Diagnostic::error().with_message(msg)],
2973 }
2974 }
2975}
2976
2977impl IntoDiagnostics for ReplError {
2978 fn into_diagnostics(self, files: &mut Files) -> Vec<Diagnostic<FileId>> {
2979 match self {
2980 ReplError::UnknownCommand(s) => vec![Diagnostic::error()
2981 .with_message(format!("unknown command `{s}`"))
2982 .with_notes(vec![String::from(
2983 "type `:?` or `:help` for a list of available commands.",
2984 )])],
2985 ReplError::InvalidQueryPath(err) => err.into_diagnostics(files),
2986 ReplError::MissingArg { cmd, msg_opt } => {
2987 let mut notes = msg_opt
2988 .as_ref()
2989 .map(|msg| vec![msg.clone()])
2990 .unwrap_or_default();
2991 notes.push(format!(
2992 "type `:? {cmd}` or `:help {cmd}` for more information."
2993 ));
2994
2995 vec![Diagnostic::error()
2996 .with_message(format!("{cmd}: missing argument"))
2997 .with_notes(notes)]
2998 }
2999 }
3000 }
3001}
3002
3003impl CloneTo for TypecheckErrorData<'_> {
3004 type Data<'ast> = TypecheckErrorData<'ast>;
3005
3006 fn clone_to<'to>(data: Self::Data<'_>, dest: &'to AstAlloc) -> Self::Data<'to> {
3007 match data {
3008 TypecheckErrorData::UnboundIdentifier(loc_ident) => {
3009 TypecheckErrorData::UnboundIdentifier(loc_ident)
3010 }
3011 TypecheckErrorData::MissingRow {
3012 id,
3013 expected,
3014 inferred,
3015 pos,
3016 } => TypecheckErrorData::MissingRow {
3017 id,
3018 expected: Type::clone_to(expected, dest),
3019 inferred: Type::clone_to(inferred, dest),
3020 pos,
3021 },
3022 TypecheckErrorData::MissingDynTail {
3023 expected,
3024 inferred,
3025 pos,
3026 } => TypecheckErrorData::MissingDynTail {
3027 expected: Type::clone_to(expected, dest),
3028 inferred: Type::clone_to(inferred, dest),
3029 pos,
3030 },
3031 TypecheckErrorData::ExtraRow {
3032 id,
3033 expected,
3034 inferred,
3035 pos,
3036 } => TypecheckErrorData::ExtraRow {
3037 id,
3038 expected: Type::clone_to(expected, dest),
3039 inferred: Type::clone_to(inferred, dest),
3040 pos,
3041 },
3042 TypecheckErrorData::ExtraDynTail {
3043 expected,
3044 inferred,
3045 pos,
3046 } => TypecheckErrorData::ExtraDynTail {
3047 expected: Type::clone_to(expected, dest),
3048 inferred: Type::clone_to(inferred, dest),
3049 pos,
3050 },
3051 TypecheckErrorData::ForallParametricityViolation {
3052 kind,
3053 tail,
3054 violating_type,
3055 pos,
3056 } => TypecheckErrorData::ForallParametricityViolation {
3057 kind,
3058 tail: Type::clone_to(tail, dest),
3059 violating_type: Type::clone_to(violating_type, dest),
3060 pos,
3061 },
3062 TypecheckErrorData::UnboundTypeVariable(loc_ident) => {
3063 TypecheckErrorData::UnboundTypeVariable(loc_ident)
3064 }
3065 TypecheckErrorData::TypeMismatch {
3066 expected,
3067 inferred,
3068 pos,
3069 } => TypecheckErrorData::TypeMismatch {
3070 expected: Type::clone_to(expected, dest),
3071 inferred: Type::clone_to(inferred, dest),
3072 pos,
3073 },
3074 TypecheckErrorData::RecordRowMismatch {
3075 id,
3076 expected,
3077 inferred,
3078 cause,
3079 pos,
3080 } => TypecheckErrorData::RecordRowMismatch {
3081 id,
3082 expected: Type::clone_to(expected, dest),
3083 inferred: Type::clone_to(inferred, dest),
3084 cause: Box::new(TypecheckErrorData::clone_to(*cause, dest)),
3085 pos,
3086 },
3087 TypecheckErrorData::EnumRowMismatch {
3088 id,
3089 expected,
3090 inferred,
3091 cause,
3092 pos,
3093 } => TypecheckErrorData::EnumRowMismatch {
3094 id,
3095 expected: Type::clone_to(expected, dest),
3096 inferred: Type::clone_to(inferred, dest),
3097 cause: cause.map(|cause| Box::new(TypecheckErrorData::clone_to(*cause, dest))),
3098 pos,
3099 },
3100 TypecheckErrorData::RecordRowConflict {
3101 row,
3102 expected,
3103 inferred,
3104 pos,
3105 } => TypecheckErrorData::RecordRowConflict {
3106 row: RecordRow::clone_to(row, dest),
3107 expected: Type::clone_to(expected, dest),
3108 inferred: Type::clone_to(inferred, dest),
3109 pos,
3110 },
3111 TypecheckErrorData::EnumRowConflict {
3112 row,
3113 expected,
3114 inferred,
3115 pos,
3116 } => TypecheckErrorData::EnumRowConflict {
3117 row: EnumRow::clone_to(row, dest),
3118 expected: Type::clone_to(expected, dest),
3119 inferred: Type::clone_to(inferred, dest),
3120 pos,
3121 },
3122 TypecheckErrorData::ArrowTypeMismatch {
3123 expected,
3124 inferred,
3125 type_path,
3126 cause,
3127 pos,
3128 } => TypecheckErrorData::ArrowTypeMismatch {
3129 expected: Type::clone_to(expected, dest),
3130 inferred: Type::clone_to(inferred, dest),
3131 type_path,
3132 cause: Box::new(TypecheckErrorData::clone_to(*cause, dest)),
3133 pos,
3134 },
3135 TypecheckErrorData::CtrTypeInTermPos { contract, pos } => {
3136 TypecheckErrorData::CtrTypeInTermPos {
3137 contract: Ast::clone_to(contract, dest),
3138 pos,
3139 }
3140 }
3141 TypecheckErrorData::VarLevelMismatch { type_var, pos } => {
3142 TypecheckErrorData::VarLevelMismatch { type_var, pos }
3143 }
3144 TypecheckErrorData::InhomogeneousRecord { row_a, row_b, pos } => {
3145 TypecheckErrorData::InhomogeneousRecord {
3146 row_a: Type::clone_to(row_a, dest),
3147 row_b: Type::clone_to(row_b, dest),
3148 pos,
3149 }
3150 }
3151 TypecheckErrorData::OrPatternVarsMismatch { var, pos } => {
3152 TypecheckErrorData::OrPatternVarsMismatch { var, pos }
3153 }
3154 TypecheckErrorData::ImportError(import_error) => {
3155 TypecheckErrorData::ImportError(import_error)
3156 }
3157 }
3158 }
3159}