sway_core/
lib.rs

1#![recursion_limit = "256"]
2
3#[macro_use]
4pub mod error;
5
6#[macro_use]
7pub mod engine_threading;
8
9pub mod abi_generation;
10pub mod asm_generation;
11mod asm_lang;
12mod build_config;
13pub mod compiler_generated;
14mod concurrent_slab;
15mod control_flow_analysis;
16mod debug_generation;
17pub mod decl_engine;
18pub mod ir_generation;
19pub mod language;
20pub mod marker_traits;
21mod metadata;
22pub mod query_engine;
23pub mod semantic_analysis;
24pub mod source_map;
25pub mod transform;
26pub mod type_system;
27
28use crate::ir_generation::check_function_purity;
29use crate::query_engine::ModuleCacheEntry;
30use crate::source_map::SourceMap;
31pub use asm_generation::from_ir::compile_ir_context_to_finalized_asm;
32use asm_generation::FinalizedAsm;
33pub use asm_generation::{CompiledBytecode, FinalizedEntry};
34pub use build_config::DbgGeneration;
35pub use build_config::{
36    Backtrace, BuildConfig, BuildTarget, LspConfig, OptLevel, PrintAsm, PrintIr,
37};
38use control_flow_analysis::ControlFlowGraph;
39pub use debug_generation::write_dwarf;
40use itertools::Itertools;
41use metadata::MetadataManager;
42use query_engine::{ModuleCacheKey, ModuleCommonInfo, ParsedModuleInfo, ProgramsCacheEntry};
43use semantic_analysis::program::TypeCheckFailed;
44use std::collections::hash_map::DefaultHasher;
45use std::collections::HashMap;
46use std::hash::{Hash, Hasher};
47use std::path::{Path, PathBuf};
48use std::sync::atomic::{AtomicBool, Ordering};
49use std::sync::Arc;
50use sway_ast::AttributeDecl;
51use sway_error::convert_parse_tree_error::ConvertParseTreeError;
52use sway_error::handler::{ErrorEmitted, Handler};
53use sway_error::warning::{CompileWarning, Warning};
54use sway_features::ExperimentalFeatures;
55use sway_ir::{
56    create_o1_pass_group, register_known_passes, Context, Kind, Module, PassGroup, PassManager,
57    PrintPassesOpts, ARG_DEMOTION_NAME, CONST_DEMOTION_NAME, DCE_NAME, FN_DEDUP_DEBUG_PROFILE_NAME,
58    FN_INLINE_NAME, GLOBALS_DCE_NAME, MEM2REG_NAME, MEMCPYOPT_NAME, MISC_DEMOTION_NAME,
59    RET_DEMOTION_NAME, SIMPLIFY_CFG_NAME, SROA_NAME,
60};
61use sway_types::span::Source;
62use sway_types::{SourceEngine, SourceLocation, Span};
63use sway_utils::{time_expr, PerformanceData, PerformanceMetric};
64use transform::{ArgsExpectValues, Attribute, AttributeKind, Attributes, ExpectedArgs};
65use types::{CollectTypesMetadata, CollectTypesMetadataContext, LogId, TypeMetadata};
66
67pub use semantic_analysis::namespace::{self, Namespace};
68pub mod types;
69
70use sway_error::error::CompileError;
71use sway_types::{ident::Ident, span, Spanned};
72pub use type_system::*;
73
74pub use language::Programs;
75use language::{lexed, parsed, ty, Visibility};
76use transform::to_parsed_lang::{self, convert_module_kind};
77
78pub mod fuel_prelude {
79    pub use fuel_vm::{self, fuel_asm, fuel_crypto, fuel_tx, fuel_types};
80}
81
82pub use engine_threading::Engines;
83
84/// Given an input `Arc<str>` and an optional [BuildConfig], parse the input into a [lexed::LexedProgram] and [parsed::ParseProgram].
85///
86/// # Example
87/// ```ignore
88/// # use sway_core::parse;
89/// # fn main() {
90///     let input = "script; fn main() -> bool { true }";
91///     let result = parse(input.into(), <_>::default(), None);
92/// # }
93/// ```
94///
95/// # Panics
96/// Panics if the parser panics.
97pub fn parse(
98    src: Source,
99    handler: &Handler,
100    engines: &Engines,
101    config: Option<&BuildConfig>,
102    experimental: ExperimentalFeatures,
103    package_name: &str,
104) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
105    match config {
106        None => parse_in_memory(
107            handler,
108            engines,
109            src,
110            experimental,
111            DbgGeneration::None,
112            package_name,
113        ),
114        // When a `BuildConfig` is given,
115        // the module source may declare `mod`s that must be parsed from other files.
116        Some(config) => parse_module_tree(
117            handler,
118            engines,
119            src,
120            config.canonical_root_module(),
121            None,
122            config.build_target,
123            config.dbg_generation,
124            config.include_tests,
125            experimental,
126            config.lsp_mode.as_ref(),
127            package_name,
128        )
129        .map(
130            |ParsedModuleTree {
131                 tree_type: kind,
132                 lexed_module,
133                 parse_module,
134             }| {
135                let lexed = lexed::LexedProgram {
136                    kind,
137                    root: lexed_module,
138                };
139                let parsed = parsed::ParseProgram {
140                    kind,
141                    root: parse_module,
142                };
143                (lexed, parsed)
144            },
145        ),
146    }
147}
148
149/// Parses the tree kind in the input provided.
150///
151/// This will lex the entire input, but parses only the module kind.
152pub fn parse_tree_type(handler: &Handler, src: Source) -> Result<parsed::TreeType, ErrorEmitted> {
153    // Parsing only the module kind does not depend on any
154    // experimental feature. So, we can just pass the default
155    // experimental features here.
156    let experimental = ExperimentalFeatures::default();
157    sway_parse::parse_module_kind(handler, src, None, experimental)
158        .map(|kind| convert_module_kind(&kind))
159}
160
161/// Converts `attribute_decls` to [Attributes].
162///
163/// This function always returns [Attributes], even if the attributes are erroneous.
164/// Errors and warnings are returned via [Handler]. The callers should ignore eventual errors
165/// in attributes and proceed with the compilation. [Attributes] are tolerant to erroneous
166/// attributes and follows the last-wins principle, which allows annotated elements to
167/// proceed with compilation. After their successful compilation, callers need to inspect
168/// the [Handler] and still emit errors if there were any.
169pub(crate) fn attr_decls_to_attributes(
170    attribute_decls: &[AttributeDecl],
171    can_annotate: impl Fn(&Attribute) -> bool,
172    target_friendly_name: &'static str,
173) -> (Handler, Attributes) {
174    let handler = Handler::default();
175    // Check if attribute is an unsupported inner attribute (`#!`).
176    // Note that we are doing that before creating the flattened `attributes`,
177    // because we want the error to point at the `#!` token.
178    // Note also that we will still include those attributes into
179    // the `attributes`. There are cases, like e.g., LSP, where
180    // having complete list of attributes is needed.
181    // In the below analysis, though, we will be ignoring inner attributes,
182    // means not checking their content.
183    for attr_decl in attribute_decls
184        .iter()
185        .filter(|attr| !attr.is_doc_comment() && attr.is_inner())
186    {
187        handler.emit_err(CompileError::Unimplemented {
188            span: attr_decl.hash_kind.span(),
189            feature: "Using inner attributes (`#!`)".to_string(),
190            help: vec![],
191        });
192    }
193
194    let attributes = Attributes::new(attribute_decls);
195
196    // Check for unknown attributes.
197    for attribute in attributes.unknown().filter(|attr| attr.is_outer()) {
198        handler.emit_warn(CompileWarning {
199            span: attribute.name.span(),
200            warning_content: Warning::UnknownAttribute {
201                attribute: (&attribute.name).into(),
202                known_attributes: attributes.known_attribute_names(),
203            },
204        });
205    }
206
207    // Check for attributes annotating invalid targets.
208    for ((attribute_kind, _attribute_direction), mut attributes) in &attributes
209        .all()
210        .filter(|attr| attr.is_doc_comment() || attr.is_outer())
211        .chunk_by(|attr| (attr.kind, attr.direction))
212    {
213        // For doc comments, we want to show the error on a complete doc comment,
214        // and not on every documentation line.
215        if attribute_kind == AttributeKind::DocComment {
216            let first_doc_line = attributes
217                .next()
218                .expect("`chunk_by` guarantees existence of at least one element in the chunk");
219            if !can_annotate(first_doc_line) {
220                let last_doc_line = match attributes.last() {
221                    Some(last_attr) => last_attr,
222                    // There is only one doc line in the complete doc comment.
223                    None => first_doc_line,
224                };
225                handler.emit_err(
226                    ConvertParseTreeError::InvalidAttributeTarget {
227                        span: Span::join(
228                            first_doc_line.span.clone(),
229                            &last_doc_line.span.start_span(),
230                        ),
231                        attribute: first_doc_line.name.clone(),
232                        target_friendly_name,
233                        can_only_annotate_help: first_doc_line
234                            .can_only_annotate_help(target_friendly_name),
235                    }
236                    .into(),
237                );
238            }
239        } else {
240            // For other attributes, the error is shown for every individual attribute.
241            for attribute in attributes {
242                if !can_annotate(attribute) {
243                    handler.emit_err(
244                        ConvertParseTreeError::InvalidAttributeTarget {
245                            span: attribute.name.span(),
246                            attribute: attribute.name.clone(),
247                            target_friendly_name,
248                            can_only_annotate_help: attribute
249                                .can_only_annotate_help(target_friendly_name),
250                        }
251                        .into(),
252                    );
253                }
254            }
255        }
256    }
257
258    // In all the subsequent test we are checking only non-doc-comment attributes
259    // and only those that didn't produce invalid target or unsupported inner attributes errors.
260    let should_be_checked =
261        |attr: &&Attribute| !attr.is_doc_comment() && attr.is_outer() && can_annotate(attr);
262
263    // Check for attributes multiplicity.
264    for (_attribute_kind, attributes_of_kind) in
265        attributes.all_by_kind(|attr| should_be_checked(attr) && !attr.kind.allows_multiple())
266    {
267        if attributes_of_kind.len() > 1 {
268            let (last_attribute, previous_attributes) = attributes_of_kind
269                .split_last()
270                .expect("`attributes_of_kind` has more then one element");
271            handler.emit_err(
272                ConvertParseTreeError::InvalidAttributeMultiplicity {
273                    last_occurrence: (&last_attribute.name).into(),
274                    previous_occurrences: previous_attributes
275                        .iter()
276                        .map(|attr| (&attr.name).into())
277                        .collect(),
278                }
279                .into(),
280            );
281        }
282    }
283
284    // Check for arguments multiplicity.
285    // For attributes that can be applied only once but are applied several times
286    // we will still check arguments in every attribute occurrence.
287    for attribute in attributes.all().filter(should_be_checked) {
288        let _ = attribute.check_args_multiplicity(&handler);
289    }
290
291    // Check for expected arguments.
292    // For attributes that can be applied only once but are applied more times
293    // we will check arguments of every attribute occurrence.
294    // If an attribute does not expect any arguments, we will not check them,
295    // but emit only the above error about invalid number of arguments.
296    for attribute in attributes
297        .all()
298        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
299    {
300        match attribute.expected_args() {
301            ExpectedArgs::None => unreachable!("`attribute` can have arguments"),
302            ExpectedArgs::Any => {}
303            ExpectedArgs::MustBeIn(expected_args) => {
304                for arg in attribute.args.iter() {
305                    if !expected_args.contains(&arg.name.as_str()) {
306                        handler.emit_err(
307                            ConvertParseTreeError::InvalidAttributeArg {
308                                attribute: attribute.name.clone(),
309                                arg: (&arg.name).into(),
310                                expected_args: expected_args.clone(),
311                            }
312                            .into(),
313                        );
314                    }
315                }
316            }
317            ExpectedArgs::ShouldBeIn(expected_args) => {
318                for arg in attribute.args.iter() {
319                    if !expected_args.contains(&arg.name.as_str()) {
320                        handler.emit_warn(CompileWarning {
321                            span: arg.name.span(),
322                            warning_content: Warning::UnknownAttributeArg {
323                                attribute: attribute.name.clone(),
324                                arg: (&arg.name).into(),
325                                expected_args: expected_args.clone(),
326                            },
327                        });
328                    }
329                }
330            }
331        }
332    }
333
334    // Check for expected argument values.
335    // We use here the same logic for what to check, as in the above check
336    // for expected arguments.
337    for attribute in attributes
338        .all()
339        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
340    {
341        // In addition, if an argument **must** be in expected args but is not,
342        // we will not be checking it, but only emit the error above.
343        // But if it **should** be in expected args and is not,
344        // we still impose on it the expectation coming from its attribute.
345        fn check_value_expected(handler: &Handler, attribute: &Attribute, is_value_expected: bool) {
346            for arg in attribute.args.iter() {
347                if let ExpectedArgs::MustBeIn(expected_args) = attribute.expected_args() {
348                    if !expected_args.contains(&arg.name.as_str()) {
349                        continue;
350                    }
351                }
352
353                if (is_value_expected && arg.value.is_none())
354                    || (!is_value_expected && arg.value.is_some())
355                {
356                    handler.emit_err(
357                        ConvertParseTreeError::InvalidAttributeArgExpectsValue {
358                            attribute: attribute.name.clone(),
359                            arg: (&arg.name).into(),
360                            value_span: arg.value.as_ref().map(|literal| literal.span()),
361                        }
362                        .into(),
363                    );
364                }
365            }
366        }
367
368        match attribute.args_expect_values() {
369            ArgsExpectValues::Yes => check_value_expected(&handler, attribute, true),
370            ArgsExpectValues::No => check_value_expected(&handler, attribute, false),
371            ArgsExpectValues::Maybe => {}
372        }
373    }
374
375    (handler, attributes)
376}
377
378/// When no `BuildConfig` is given, we're assumed to be parsing in-memory with no submodules.
379fn parse_in_memory(
380    handler: &Handler,
381    engines: &Engines,
382    src: Source,
383    experimental: ExperimentalFeatures,
384    dbg_generation: DbgGeneration,
385    package_name: &str,
386) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
387    let mut hasher = DefaultHasher::new();
388    src.text.hash(&mut hasher);
389    let hash = hasher.finish();
390    let module = sway_parse::parse_file(handler, src, None, experimental)?;
391
392    let (attributes_handler, attributes) = attr_decls_to_attributes(
393        &module.attributes,
394        |attr| attr.can_annotate_module_kind(),
395        module.value.kind.friendly_name(),
396    );
397    let attributes_error_emitted = handler.append(attributes_handler);
398
399    let (kind, tree) = to_parsed_lang::convert_parse_tree(
400        &mut to_parsed_lang::Context::new(
401            BuildTarget::EVM,
402            dbg_generation,
403            experimental,
404            package_name,
405        ),
406        handler,
407        engines,
408        module.value.clone(),
409    )?;
410
411    match attributes_error_emitted {
412        Some(err) => Err(err),
413        None => {
414            let root = parsed::ParseModule {
415                span: span::Span::dummy(),
416                module_kind_span: module.value.kind.span(),
417                module_eval_order: vec![],
418                tree,
419                submodules: vec![],
420                attributes,
421                hash,
422            };
423            let lexed_program = lexed::LexedProgram::new(
424                kind,
425                lexed::LexedModule {
426                    tree: module,
427                    submodules: vec![],
428                },
429            );
430            Ok((lexed_program, parsed::ParseProgram { kind, root }))
431        }
432    }
433}
434
435pub struct Submodule {
436    name: Ident,
437    path: Arc<PathBuf>,
438    lexed: lexed::LexedSubmodule,
439    parsed: parsed::ParseSubmodule,
440}
441
442/// Contains the lexed and parsed submodules 'deps' of a module.
443pub type Submodules = Vec<Submodule>;
444
445/// Parse all dependencies `deps` as submodules.
446#[allow(clippy::too_many_arguments)]
447fn parse_submodules(
448    handler: &Handler,
449    engines: &Engines,
450    module_name: Option<&str>,
451    module: &sway_ast::Module,
452    module_dir: &Path,
453    build_target: BuildTarget,
454    dbg_generation: DbgGeneration,
455    include_tests: bool,
456    experimental: ExperimentalFeatures,
457    lsp_mode: Option<&LspConfig>,
458    package_name: &str,
459) -> Submodules {
460    // Assume the happy path, so there'll be as many submodules as dependencies, but no more.
461    let mut submods = Vec::with_capacity(module.submodules().count());
462    module.submodules().for_each(|submod| {
463        // Read the source code from the dependency.
464        // If we cannot, record as an error, but continue with other files.
465        let submod_path = Arc::new(module_path(module_dir, module_name, submod));
466        let submod_src: Source = match std::fs::read_to_string(&*submod_path) {
467            Ok(s) => s.as_str().into(),
468            Err(e) => {
469                handler.emit_err(CompileError::FileCouldNotBeRead {
470                    span: submod.name.span(),
471                    file_path: submod_path.to_string_lossy().to_string(),
472                    stringified_error: e.to_string(),
473                });
474                return;
475            }
476        };
477        if let Ok(ParsedModuleTree {
478            tree_type: kind,
479            lexed_module,
480            parse_module,
481        }) = parse_module_tree(
482            handler,
483            engines,
484            submod_src.clone(),
485            submod_path.clone(),
486            Some(submod.name.as_str()),
487            build_target,
488            dbg_generation,
489            include_tests,
490            experimental,
491            lsp_mode,
492            package_name,
493        ) {
494            if !matches!(kind, parsed::TreeType::Library) {
495                let source_id = engines.se().get_source_id(submod_path.as_ref());
496                let span = span::Span::new(submod_src, 0, 0, Some(source_id)).unwrap();
497                handler.emit_err(CompileError::ImportMustBeLibrary { span });
498                return;
499            }
500
501            let parse_submodule = parsed::ParseSubmodule {
502                module: parse_module,
503                visibility: match submod.visibility {
504                    Some(..) => Visibility::Public,
505                    None => Visibility::Private,
506                },
507                mod_name_span: submod.name.span(),
508            };
509            let lexed_submodule = lexed::LexedSubmodule {
510                module: lexed_module,
511            };
512            let submodule = Submodule {
513                name: submod.name.clone(),
514                path: submod_path,
515                lexed: lexed_submodule,
516                parsed: parse_submodule,
517            };
518            submods.push(submodule);
519        }
520    });
521    submods
522}
523
524pub type SourceHash = u64;
525
526#[derive(Clone, Debug)]
527pub struct ParsedModuleTree {
528    pub tree_type: parsed::TreeType,
529    pub lexed_module: lexed::LexedModule,
530    pub parse_module: parsed::ParseModule,
531}
532
533/// Given the source of the module along with its path,
534/// parse this module including all of its submodules.
535#[allow(clippy::too_many_arguments)]
536fn parse_module_tree(
537    handler: &Handler,
538    engines: &Engines,
539    src: Source,
540    path: Arc<PathBuf>,
541    module_name: Option<&str>,
542    build_target: BuildTarget,
543    dbg_generation: DbgGeneration,
544    include_tests: bool,
545    experimental: ExperimentalFeatures,
546    lsp_mode: Option<&LspConfig>,
547    package_name: &str,
548) -> Result<ParsedModuleTree, ErrorEmitted> {
549    let query_engine = engines.qe();
550
551    // Parse this module first.
552    let module_dir = path.parent().expect("module file has no parent directory");
553    let source_id = engines.se().get_source_id(&path.clone());
554    let module = sway_parse::parse_file(handler, src.clone(), Some(source_id), experimental)?;
555
556    // Parse all submodules before converting to the `ParseTree`.
557    // This always recovers on parse errors for the file itself by skipping that file.
558    let submodules = parse_submodules(
559        handler,
560        engines,
561        module_name,
562        &module.value,
563        module_dir,
564        build_target,
565        dbg_generation,
566        include_tests,
567        experimental,
568        lsp_mode,
569        package_name,
570    );
571
572    let (attributes_handler, attributes) = attr_decls_to_attributes(
573        &module.attributes,
574        |attr| attr.can_annotate_module_kind(),
575        module.value.kind.friendly_name(),
576    );
577    let attributes_error_emitted = handler.append(attributes_handler);
578
579    // Convert from the raw parsed module to the `ParseTree` ready for type-check.
580    let (kind, tree) = to_parsed_lang::convert_parse_tree(
581        &mut to_parsed_lang::Context::new(build_target, dbg_generation, experimental, package_name),
582        handler,
583        engines,
584        module.value.clone(),
585    )?;
586
587    if let Some(err) = attributes_error_emitted {
588        return Err(err);
589    }
590
591    let module_kind_span = module.value.kind.span();
592    let lexed_submodules = submodules
593        .iter()
594        .map(|s| (s.name.clone(), s.lexed.clone()))
595        .collect::<Vec<_>>();
596    let lexed = lexed::LexedModule {
597        tree: module,
598        submodules: lexed_submodules,
599    };
600
601    let mut hasher = DefaultHasher::new();
602    src.text.hash(&mut hasher);
603    let hash = hasher.finish();
604
605    let parsed_submodules = submodules
606        .iter()
607        .map(|s| (s.name.clone(), s.parsed.clone()))
608        .collect::<Vec<_>>();
609    let parsed = parsed::ParseModule {
610        span: span::Span::new(src, 0, 0, Some(source_id)).unwrap(),
611        module_kind_span,
612        module_eval_order: vec![],
613        tree,
614        submodules: parsed_submodules,
615        attributes,
616        hash,
617    };
618
619    // Let's prime the cache with the module dependency and hash data.
620    let modified_time = std::fs::metadata(path.as_path())
621        .ok()
622        .and_then(|m| m.modified().ok());
623    let dependencies = submodules.into_iter().map(|s| s.path).collect::<Vec<_>>();
624    let version = lsp_mode
625        .and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied())
626        .unwrap_or(None);
627
628    let common_info = ModuleCommonInfo {
629        path: path.clone(),
630        include_tests,
631        dependencies,
632        hash,
633    };
634    let parsed_info = ParsedModuleInfo {
635        modified_time,
636        version,
637    };
638    let cache_entry = ModuleCacheEntry::new(common_info, parsed_info);
639    query_engine.update_or_insert_parsed_module_cache_entry(cache_entry);
640
641    Ok(ParsedModuleTree {
642        tree_type: kind,
643        lexed_module: lexed,
644        parse_module: parsed,
645    })
646}
647
648/// Checks if the typed module cache for a given path is up to date.
649///
650/// This function determines whether the cached typed representation of a module
651/// is still valid based on file versions and dependencies.
652///
653/// Note: This functionality is currently only supported when the compiler is
654/// initiated from the language server.
655pub(crate) fn is_ty_module_cache_up_to_date(
656    engines: &Engines,
657    path: &Arc<PathBuf>,
658    include_tests: bool,
659    build_config: Option<&BuildConfig>,
660) -> bool {
661    let cache = engines.qe().module_cache.read();
662    let key = ModuleCacheKey::new(path.clone(), include_tests);
663    cache.get(&key).is_some_and(|entry| {
664        entry.typed.as_ref().is_some_and(|typed| {
665            // Check if the cache is up to date based on file versions
666            let cache_up_to_date = build_config
667                .and_then(|x| x.lsp_mode.as_ref())
668                .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
669                .is_none_or(|version| {
670                    version.is_none_or(|v| typed.version.is_some_and(|tv| v <= tv))
671                });
672
673            // If the cache is up to date, recursively check all dependencies
674            cache_up_to_date
675                && entry.common.dependencies.iter().all(|dep_path| {
676                    is_ty_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
677                })
678        })
679    })
680}
681
682/// Checks if the parsed module cache for a given path is up to date.
683///
684/// This function determines whether the cached parsed representation of a module
685/// is still valid based on file versions, modification times, or content hashes.
686pub(crate) fn is_parse_module_cache_up_to_date(
687    engines: &Engines,
688    path: &Arc<PathBuf>,
689    include_tests: bool,
690    build_config: Option<&BuildConfig>,
691) -> bool {
692    let cache = engines.qe().module_cache.read();
693    let key = ModuleCacheKey::new(path.clone(), include_tests);
694    cache.get(&key).is_some_and(|entry| {
695        // Determine if the cached dependency information is still valid
696        let cache_up_to_date = build_config
697            .and_then(|x| x.lsp_mode.as_ref())
698            .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
699            .map_or_else(
700                || {
701                    // If LSP mode is not active or file version is unavailable, fall back to filesystem checks.
702                    let modified_time = std::fs::metadata(path.as_path())
703                        .ok()
704                        .and_then(|m| m.modified().ok());
705                    // Check if modification time matches, or if not, compare file content hash
706                    entry.parsed.modified_time == modified_time || {
707                        let src = std::fs::read_to_string(path.as_path()).unwrap();
708                        let mut hasher = DefaultHasher::new();
709                        src.hash(&mut hasher);
710                        hasher.finish() == entry.common.hash
711                    }
712                },
713                |version| {
714                    // Determine if the parse cache is up-to-date in LSP mode:
715                    // - If there's no LSP file version (version is None), consider the cache up-to-date.
716                    // - If there is an LSP file version:
717                    //   - If there's no cached version (entry.parsed.version is None), the cache is outdated.
718                    //   - If there's a cached version, compare them: cache is up-to-date if the LSP file version
719                    //     is not greater than the cached version.
720                    version.is_none_or(|v| entry.parsed.version.is_some_and(|ev| v <= ev))
721                },
722            );
723
724        // Checks if the typed module cache for a given path is up to date// If the cache is up to date, recursively check all dependencies to make sure they have not been
725        // modified either.
726        cache_up_to_date
727            && entry.common.dependencies.iter().all(|dep_path| {
728                is_parse_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
729            })
730    })
731}
732
733fn module_path(
734    parent_module_dir: &Path,
735    parent_module_name: Option<&str>,
736    submod: &sway_ast::Submodule,
737) -> PathBuf {
738    if let Some(parent_name) = parent_module_name {
739        parent_module_dir
740            .join(parent_name)
741            .join(submod.name.to_string())
742            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
743    } else {
744        // top level module
745        parent_module_dir
746            .join(submod.name.to_string())
747            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
748    }
749}
750
751pub fn build_module_dep_graph(
752    handler: &Handler,
753    parse_module: &mut parsed::ParseModule,
754) -> Result<(), ErrorEmitted> {
755    let module_dep_graph = ty::TyModule::build_dep_graph(handler, parse_module)?;
756    parse_module.module_eval_order = module_dep_graph.compute_order(handler)?;
757
758    for (_, submodule) in &mut parse_module.submodules {
759        build_module_dep_graph(handler, &mut submodule.module)?;
760    }
761    Ok(())
762}
763
764/// A possible occurrence of a `panic` expression that is located in code at [PanicOccurrence::loc].
765///
766/// Note that a single `panic` expression can have multiple [PanicOccurrence]s related to it.
767///
768/// For example:
769/// - `panic "Some message.";` will have just a single occurrence, with `msg` containing the message.
770/// - `panic some_value_of_a_concrete_type;` will have just a single occurrence, with `log_id` containing the [LogId] of the concrete type.
771/// - `panic some_value_of_a_generic_type;` will have multiple occurrences, one with `log_id` for every monomorphized type.
772///
773/// **Every [PanicOccurrence] has exactly one revert code assigned to it.**
774#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
775pub struct PanicOccurrence {
776    pub loc: SourceLocation,
777    pub log_id: Option<LogId>,
778    pub msg: Option<String>,
779}
780
781/// [PanicOccurrence]s mapped to their corresponding revert codes.
782pub type PanicOccurrences = HashMap<PanicOccurrence, u64>;
783
784pub struct CompiledAsm {
785    pub finalized_asm: FinalizedAsm,
786    pub panic_occurrences: PanicOccurrences,
787}
788
789#[allow(clippy::too_many_arguments)]
790pub fn parsed_to_ast(
791    handler: &Handler,
792    engines: &Engines,
793    parse_program: &mut parsed::ParseProgram,
794    initial_namespace: namespace::Package,
795    build_config: Option<&BuildConfig>,
796    package_name: &str,
797    retrigger_compilation: Option<Arc<AtomicBool>>,
798    experimental: ExperimentalFeatures,
799) -> Result<ty::TyProgram, TypeCheckFailed> {
800    let lsp_config = build_config.map(|x| x.lsp_mode.clone()).unwrap_or_default();
801
802    // Build the dependency graph for the submodules.
803    build_module_dep_graph(handler, &mut parse_program.root).map_err(|error| TypeCheckFailed {
804        root_module: None,
805        namespace: initial_namespace.clone(),
806        error,
807    })?;
808
809    let collection_namespace = Namespace::new(handler, engines, initial_namespace.clone(), true)
810        .map_err(|error| TypeCheckFailed {
811            root_module: None,
812            namespace: initial_namespace.clone(),
813            error,
814        })?;
815    // Collect the program symbols.
816
817    let mut collection_ctx =
818        ty::TyProgram::collect(handler, engines, parse_program, collection_namespace).map_err(
819            |error| TypeCheckFailed {
820                root_module: None,
821                namespace: initial_namespace.clone(),
822                error,
823            },
824        )?;
825
826    let typecheck_namespace =
827        Namespace::new(handler, engines, initial_namespace, true).map_err(|error| {
828            TypeCheckFailed {
829                root_module: None,
830                namespace: collection_ctx.namespace().current_package_ref().clone(),
831                error,
832            }
833        })?;
834    // Type check the program.
835    let typed_program_opt = ty::TyProgram::type_check(
836        handler,
837        engines,
838        parse_program,
839        &mut collection_ctx,
840        typecheck_namespace,
841        package_name,
842        build_config,
843        experimental,
844    );
845
846    let mut typed_program = typed_program_opt?;
847
848    check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
849        TypeCheckFailed {
850            root_module: Some(Arc::new(typed_program.root_module.clone())),
851            namespace: typed_program.namespace.current_package_ref().clone(),
852            error,
853        }
854    })?;
855    // Only clear the parsed AST nodes if we are running a regular compilation pipeline.
856    // LSP needs these to build its token map, and they are cleared by `clear_program` as
857    // part of the LSP garbage collection functionality instead.
858    if lsp_config.is_none() {
859        engines.pe().clear();
860    }
861
862    typed_program.check_deprecated(engines, handler);
863
864    match typed_program.check_recursive(engines, handler) {
865        Ok(()) => {}
866        Err(error) => {
867            handler.dedup();
868            return Err(TypeCheckFailed {
869                root_module: Some(Arc::new(typed_program.root_module.clone())),
870                namespace: typed_program.namespace.current_package().clone(),
871                error,
872            });
873        }
874    };
875
876    // Skip collecting metadata if we triggered an optimised build from LSP.
877    let types_metadata = if !lsp_config.as_ref().is_some_and(|lsp| lsp.optimized_build) {
878        // Collect information about the types used in this program
879        let types_metadata_result = typed_program.collect_types_metadata(
880            handler,
881            &mut CollectTypesMetadataContext::new(engines, experimental, package_name.to_string()),
882        );
883        let types_metadata = match types_metadata_result {
884            Ok(types_metadata) => types_metadata,
885            Err(error) => {
886                handler.dedup();
887                return Err(TypeCheckFailed {
888                    root_module: Some(Arc::new(typed_program.root_module.clone())),
889                    namespace: typed_program.namespace.current_package().clone(),
890                    error,
891                });
892            }
893        };
894
895        typed_program
896            .logged_types
897            .extend(types_metadata.iter().filter_map(|m| match m {
898                TypeMetadata::LoggedType(log_id, type_id) => Some((*log_id, *type_id)),
899                _ => None,
900            }));
901
902        typed_program
903            .messages_types
904            .extend(types_metadata.iter().filter_map(|m| match m {
905                TypeMetadata::MessageType(message_id, type_id) => Some((*message_id, *type_id)),
906                _ => None,
907            }));
908
909        let (print_graph, print_graph_url_format) = match build_config {
910            Some(cfg) => (
911                cfg.print_dca_graph.clone(),
912                cfg.print_dca_graph_url_format.clone(),
913            ),
914            None => (None, None),
915        };
916
917        check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
918            TypeCheckFailed {
919                root_module: Some(Arc::new(typed_program.root_module.clone())),
920                namespace: typed_program.namespace.current_package_ref().clone(),
921                error,
922            }
923        })?;
924
925        // Perform control flow analysis and extend with any errors.
926        let _ = perform_control_flow_analysis(
927            handler,
928            engines,
929            &typed_program,
930            print_graph,
931            print_graph_url_format,
932        );
933
934        types_metadata
935    } else {
936        vec![]
937    };
938
939    // Evaluate const declarations, to allow storage slots initialization with consts.
940    let mut ctx = Context::new(engines.se(), experimental);
941    let module = Module::new(&mut ctx, Kind::Contract);
942    if let Err(errs) = ir_generation::compile::compile_constants_for_package(
943        engines,
944        &mut ctx,
945        module,
946        &typed_program.namespace,
947    ) {
948        errs.into_iter().for_each(|err| {
949            handler.emit_err(err.clone());
950        });
951    }
952
953    // CEI pattern analysis
954    let cei_analysis_warnings =
955        semantic_analysis::cei_pattern_analysis::analyze_program(engines, &typed_program);
956    for warn in cei_analysis_warnings {
957        handler.emit_warn(warn);
958    }
959
960    let mut md_mgr = MetadataManager::default();
961    // Check that all storage initializers can be evaluated at compile time.
962    typed_program
963        .get_typed_program_with_initialized_storage_slots(
964            handler,
965            engines,
966            &mut ctx,
967            &mut md_mgr,
968            module,
969        )
970        .map_err(|error: ErrorEmitted| {
971            handler.dedup();
972            TypeCheckFailed {
973                root_module: Some(Arc::new(typed_program.root_module.clone())),
974                namespace: typed_program.namespace.current_package_ref().clone(),
975                error,
976            }
977        })?;
978
979    // All unresolved types lead to compile errors.
980    for err in types_metadata.iter().filter_map(|m| match m {
981        TypeMetadata::UnresolvedType(name, call_site_span_opt) => {
982            Some(CompileError::UnableToInferGeneric {
983                ty: name.as_str().to_string(),
984                span: call_site_span_opt.clone().unwrap_or_else(|| name.span()),
985            })
986        }
987        _ => None,
988    }) {
989        handler.emit_err(err);
990    }
991
992    Ok(typed_program)
993}
994
995#[allow(clippy::too_many_arguments)]
996pub fn compile_to_ast(
997    handler: &Handler,
998    engines: &Engines,
999    src: Source,
1000    initial_namespace: namespace::Package,
1001    build_config: Option<&BuildConfig>,
1002    package_name: &str,
1003    retrigger_compilation: Option<Arc<AtomicBool>>,
1004    experimental: ExperimentalFeatures,
1005) -> Result<Programs, ErrorEmitted> {
1006    check_should_abort(handler, retrigger_compilation.clone())?;
1007
1008    let query_engine = engines.qe();
1009    let mut metrics = PerformanceData::default();
1010    if let Some(config) = build_config {
1011        let path = config.canonical_root_module();
1012        let include_tests = config.include_tests;
1013        // Check if we can re-use the data in the cache.
1014        if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) {
1015            let mut entry = query_engine.get_programs_cache_entry(&path).unwrap();
1016            entry.programs.metrics.reused_programs += 1;
1017
1018            let (warnings, errors) = entry.handler_data;
1019            let new_handler = Handler::from_parts(warnings, errors);
1020            handler.append(new_handler);
1021            return Ok(entry.programs);
1022        };
1023    }
1024
1025    // Parse the program to a concrete syntax tree (CST).
1026    let parse_program_opt = time_expr!(
1027        package_name,
1028        "parse the program to a concrete syntax tree (CST)",
1029        "parse_cst",
1030        parse(
1031            src,
1032            handler,
1033            engines,
1034            build_config,
1035            experimental,
1036            package_name
1037        ),
1038        build_config,
1039        metrics
1040    );
1041
1042    check_should_abort(handler, retrigger_compilation.clone())?;
1043
1044    let (lexed_program, mut parsed_program) = match parse_program_opt {
1045        Ok(modules) => modules,
1046        Err(e) => {
1047            handler.dedup();
1048            return Err(e);
1049        }
1050    };
1051
1052    // If tests are not enabled, exclude them from `parsed_program`.
1053    if build_config.is_none_or(|config| !config.include_tests) {
1054        parsed_program.exclude_tests(engines);
1055    }
1056
1057    // Type check (+ other static analysis) the CST to a typed AST.
1058    let program = time_expr!(
1059        package_name,
1060        "parse the concrete syntax tree (CST) to a typed AST",
1061        "parse_ast",
1062        parsed_to_ast(
1063            handler,
1064            engines,
1065            &mut parsed_program,
1066            initial_namespace,
1067            build_config,
1068            package_name,
1069            retrigger_compilation.clone(),
1070            experimental
1071        ),
1072        build_config,
1073        metrics
1074    );
1075
1076    check_should_abort(handler, retrigger_compilation.clone())?;
1077
1078    handler.dedup();
1079
1080    let programs = Programs::new(
1081        Arc::new(lexed_program),
1082        Arc::new(parsed_program),
1083        program.map(Arc::new),
1084        metrics,
1085    );
1086
1087    if let Some(config) = build_config {
1088        let path = config.canonical_root_module();
1089        let cache_entry = ProgramsCacheEntry {
1090            path,
1091            programs: programs.clone(),
1092            handler_data: handler.clone().consume(),
1093        };
1094        query_engine.insert_programs_cache_entry(cache_entry);
1095    }
1096
1097    check_should_abort(handler, retrigger_compilation.clone())?;
1098
1099    Ok(programs)
1100}
1101
1102/// Given input Sway source code, try compiling to a `CompiledAsm`,
1103/// containing the asm in opcode form (not raw bytes/bytecode).
1104pub fn compile_to_asm(
1105    handler: &Handler,
1106    engines: &Engines,
1107    src: Source,
1108    initial_namespace: namespace::Package,
1109    build_config: &BuildConfig,
1110    package_name: &str,
1111    experimental: ExperimentalFeatures,
1112) -> Result<CompiledAsm, ErrorEmitted> {
1113    let ast_res = compile_to_ast(
1114        handler,
1115        engines,
1116        src,
1117        initial_namespace,
1118        Some(build_config),
1119        package_name,
1120        None,
1121        experimental,
1122    )?;
1123
1124    ast_to_asm(handler, engines, &ast_res, build_config, experimental)
1125}
1126
1127/// Given an AST compilation result, try compiling to a `CompiledAsm`,
1128/// containing the asm in opcode form (not raw bytes/bytecode).
1129pub fn ast_to_asm(
1130    handler: &Handler,
1131    engines: &Engines,
1132    programs: &Programs,
1133    build_config: &BuildConfig,
1134    experimental: ExperimentalFeatures,
1135) -> Result<CompiledAsm, ErrorEmitted> {
1136    let typed_program = match &programs.typed {
1137        Ok(typed_program) => typed_program,
1138        Err(err) => return Err(err.error),
1139    };
1140
1141    let mut panic_occurrences = PanicOccurrences::default();
1142
1143    let asm = match compile_ast_to_ir_to_asm(
1144        handler,
1145        engines,
1146        typed_program,
1147        &mut panic_occurrences,
1148        build_config,
1149        experimental,
1150    ) {
1151        Ok(res) => res,
1152        Err(err) => {
1153            handler.dedup();
1154            return Err(err);
1155        }
1156    };
1157
1158    Ok(CompiledAsm {
1159        finalized_asm: asm,
1160        panic_occurrences,
1161    })
1162}
1163
1164pub(crate) fn compile_ast_to_ir_to_asm(
1165    handler: &Handler,
1166    engines: &Engines,
1167    program: &ty::TyProgram,
1168    panic_occurrences: &mut PanicOccurrences,
1169    build_config: &BuildConfig,
1170    experimental: ExperimentalFeatures,
1171) -> Result<FinalizedAsm, ErrorEmitted> {
1172    // The IR pipeline relies on type information being fully resolved.
1173    // If type information is found to still be generic or unresolved inside of
1174    // IR, this is considered an internal compiler error. To resolve this situation,
1175    // we need to explicitly ensure all types are resolved before going into IR.
1176    //
1177    // We _could_ introduce a new type here that uses TypeInfo instead of TypeId and throw away
1178    // the engine, since we don't need inference for IR. That'd be a _lot_ of copy-pasted code,
1179    // though, so instead, we are just going to do a pass and throw any unresolved generics as
1180    // errors and then hold as a runtime invariant that none of the types will be unresolved in the
1181    // IR phase.
1182
1183    let mut ir = match ir_generation::compile_program(
1184        program,
1185        panic_occurrences,
1186        build_config.include_tests,
1187        engines,
1188        experimental,
1189    ) {
1190        Ok(ir) => ir,
1191        Err(errors) => {
1192            let mut last = None;
1193            for e in errors {
1194                last = Some(handler.emit_err(e));
1195            }
1196            return Err(last.unwrap());
1197        }
1198    };
1199
1200    // Find all the entry points for purity checking and DCE.
1201    let entry_point_functions: Vec<::sway_ir::Function> = ir
1202        .module_iter()
1203        .flat_map(|module| module.function_iter(&ir))
1204        .filter(|func| func.is_entry(&ir))
1205        .collect();
1206
1207    // Do a purity check on the _unoptimised_ IR.
1208    {
1209        let mut env = ir_generation::PurityEnv::default();
1210        let mut md_mgr = metadata::MetadataManager::default();
1211        for entry_point in &entry_point_functions {
1212            check_function_purity(handler, &mut env, &ir, &mut md_mgr, entry_point);
1213        }
1214    }
1215
1216    // Initialize the pass manager and register known passes.
1217    let mut pass_mgr = PassManager::default();
1218    register_known_passes(&mut pass_mgr);
1219    let mut pass_group = PassGroup::default();
1220
1221    match build_config.optimization_level {
1222        OptLevel::Opt1 => {
1223            pass_group.append_group(create_o1_pass_group());
1224        }
1225        OptLevel::Opt0 => {
1226            // We run a function deduplication pass that only removes duplicate
1227            // functions when everything, including the metadata are identical.
1228            pass_group.append_pass(FN_DEDUP_DEBUG_PROFILE_NAME);
1229
1230            // Inlining is necessary until #4899 is resolved.
1231            pass_group.append_pass(FN_INLINE_NAME);
1232
1233            // Do DCE so other optimizations run faster.
1234            pass_group.append_pass(GLOBALS_DCE_NAME);
1235            pass_group.append_pass(DCE_NAME);
1236        }
1237    }
1238
1239    // Target specific transforms should be moved into something more configured.
1240    if build_config.build_target == BuildTarget::Fuel {
1241        // FuelVM target specific transforms.
1242        //
1243        // Demote large by-value constants, arguments and return values to by-reference values
1244        // using temporaries.
1245        pass_group.append_pass(CONST_DEMOTION_NAME);
1246        pass_group.append_pass(ARG_DEMOTION_NAME);
1247        pass_group.append_pass(RET_DEMOTION_NAME);
1248        pass_group.append_pass(MISC_DEMOTION_NAME);
1249
1250        // Convert loads and stores to mem_copies where possible.
1251        pass_group.append_pass(MEMCPYOPT_NAME);
1252
1253        // Run a DCE and simplify-cfg to clean up any obsolete instructions.
1254        pass_group.append_pass(DCE_NAME);
1255        pass_group.append_pass(SIMPLIFY_CFG_NAME);
1256
1257        match build_config.optimization_level {
1258            OptLevel::Opt1 => {
1259                pass_group.append_pass(SROA_NAME);
1260                pass_group.append_pass(MEM2REG_NAME);
1261                pass_group.append_pass(DCE_NAME);
1262            }
1263            OptLevel::Opt0 => {}
1264        }
1265    }
1266
1267    // Run the passes.
1268    let print_passes_opts: PrintPassesOpts = (&build_config.print_ir).into();
1269    let res =
1270        if let Err(ir_error) = pass_mgr.run_with_print(&mut ir, &pass_group, &print_passes_opts) {
1271            Err(handler.emit_err(CompileError::InternalOwned(
1272                ir_error.to_string(),
1273                span::Span::dummy(),
1274            )))
1275        } else {
1276            Ok(())
1277        };
1278    res?;
1279
1280    compile_ir_context_to_finalized_asm(handler, &ir, Some(build_config))
1281}
1282
1283/// Given input Sway source code, compile to [CompiledBytecode], containing the asm in bytecode form.
1284#[allow(clippy::too_many_arguments)]
1285pub fn compile_to_bytecode(
1286    handler: &Handler,
1287    engines: &Engines,
1288    src: Source,
1289    initial_namespace: namespace::Package,
1290    build_config: &BuildConfig,
1291    source_map: &mut SourceMap,
1292    package_name: &str,
1293    experimental: ExperimentalFeatures,
1294) -> Result<CompiledBytecode, ErrorEmitted> {
1295    let mut asm_res = compile_to_asm(
1296        handler,
1297        engines,
1298        src,
1299        initial_namespace,
1300        build_config,
1301        package_name,
1302        experimental,
1303    )?;
1304    asm_to_bytecode(
1305        handler,
1306        &mut asm_res,
1307        source_map,
1308        engines.se(),
1309        build_config,
1310    )
1311}
1312
1313/// Size of the prelude's CONFIGURABLES_OFFSET section, in bytes.
1314pub const PRELUDE_CONFIGURABLES_SIZE_IN_BYTES: usize = 8;
1315/// Offset (in bytes) of the CONFIGURABLES_OFFSET section in the prelude.
1316pub const PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES: usize = 16;
1317/// Total size of the prelude in bytes. Instructions start right after.
1318pub const PRELUDE_SIZE_IN_BYTES: usize = 32;
1319
1320/// Given bytecode, overwrite the existing offset to configurables offset in the prelude with the given one.
1321pub fn set_bytecode_configurables_offset(
1322    compiled_bytecode: &mut CompiledBytecode,
1323    md: &[u8; PRELUDE_CONFIGURABLES_SIZE_IN_BYTES],
1324) {
1325    assert!(
1326        compiled_bytecode.bytecode.len()
1327            >= PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES + PRELUDE_CONFIGURABLES_SIZE_IN_BYTES
1328    );
1329    let code = &mut compiled_bytecode.bytecode;
1330    for (index, byte) in md.iter().enumerate() {
1331        code[index + PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES] = *byte;
1332    }
1333}
1334
1335/// Given the assembly (opcodes), compile to [CompiledBytecode], containing the asm in bytecode form.
1336pub fn asm_to_bytecode(
1337    handler: &Handler,
1338    asm: &mut CompiledAsm,
1339    source_map: &mut SourceMap,
1340    source_engine: &SourceEngine,
1341    build_config: &BuildConfig,
1342) -> Result<CompiledBytecode, ErrorEmitted> {
1343    let compiled_bytecode =
1344        asm.finalized_asm
1345            .to_bytecode_mut(handler, source_map, source_engine, build_config)?;
1346    Ok(compiled_bytecode)
1347}
1348
1349/// Given a [ty::TyProgram], which is type-checked Sway source, construct a graph to analyze
1350/// control flow and determine if it is valid.
1351fn perform_control_flow_analysis(
1352    handler: &Handler,
1353    engines: &Engines,
1354    program: &ty::TyProgram,
1355    print_graph: Option<String>,
1356    print_graph_url_format: Option<String>,
1357) -> Result<(), ErrorEmitted> {
1358    let dca_res = dead_code_analysis(handler, engines, program);
1359    let rpa_errors = return_path_analysis(engines, program);
1360    let rpa_res = handler.scope(|handler| {
1361        for err in rpa_errors {
1362            handler.emit_err(err);
1363        }
1364        Ok(())
1365    });
1366
1367    if let Ok(graph) = dca_res.clone() {
1368        graph.visualize(engines, print_graph, print_graph_url_format);
1369    }
1370    dca_res?;
1371    rpa_res
1372}
1373
1374/// Constructs a dead code graph from all modules within the graph and then attempts to find dead
1375/// code.
1376///
1377/// Returns the graph that was used for analysis.
1378fn dead_code_analysis<'a>(
1379    handler: &Handler,
1380    engines: &'a Engines,
1381    program: &ty::TyProgram,
1382) -> Result<ControlFlowGraph<'a>, ErrorEmitted> {
1383    let decl_engine = engines.de();
1384    let mut dead_code_graph = ControlFlowGraph::new(engines);
1385    let tree_type = program.kind.tree_type();
1386    module_dead_code_analysis(
1387        handler,
1388        engines,
1389        &program.root_module,
1390        &tree_type,
1391        &mut dead_code_graph,
1392    )?;
1393    let warnings = dead_code_graph.find_dead_code(decl_engine);
1394    for warn in warnings {
1395        handler.emit_warn(warn);
1396    }
1397    Ok(dead_code_graph)
1398}
1399
1400/// Recursively collect modules into the given `ControlFlowGraph` ready for dead code analysis.
1401fn module_dead_code_analysis<'eng: 'cfg, 'cfg>(
1402    handler: &Handler,
1403    engines: &'eng Engines,
1404    module: &ty::TyModule,
1405    tree_type: &parsed::TreeType,
1406    graph: &mut ControlFlowGraph<'cfg>,
1407) -> Result<(), ErrorEmitted> {
1408    module
1409        .submodules
1410        .iter()
1411        .try_fold((), |(), (_, submodule)| {
1412            let tree_type = parsed::TreeType::Library;
1413            module_dead_code_analysis(handler, engines, &submodule.module, &tree_type, graph)
1414        })?;
1415    let res = {
1416        ControlFlowGraph::append_module_to_dead_code_graph(
1417            engines,
1418            &module.all_nodes,
1419            tree_type,
1420            graph,
1421        )
1422        .map_err(|err| handler.emit_err(err))
1423    };
1424    graph.connect_pending_entry_edges();
1425    res
1426}
1427
1428fn return_path_analysis(engines: &Engines, program: &ty::TyProgram) -> Vec<CompileError> {
1429    let mut errors = vec![];
1430    module_return_path_analysis(engines, &program.root_module, &mut errors);
1431    errors
1432}
1433
1434fn module_return_path_analysis(
1435    engines: &Engines,
1436    module: &ty::TyModule,
1437    errors: &mut Vec<CompileError>,
1438) {
1439    for (_, submodule) in &module.submodules {
1440        module_return_path_analysis(engines, &submodule.module, errors);
1441    }
1442    let graph = ControlFlowGraph::construct_return_path_graph(engines, &module.all_nodes);
1443    match graph {
1444        Ok(graph) => errors.extend(graph.analyze_return_paths(engines)),
1445        Err(mut error) => errors.append(&mut error),
1446    }
1447}
1448
1449/// Check if the retrigger compilation flag has been set to true in the language server.
1450/// If it has, there is a new compilation request, so we should abort the current compilation.
1451fn check_should_abort(
1452    handler: &Handler,
1453    retrigger_compilation: Option<Arc<AtomicBool>>,
1454) -> Result<(), ErrorEmitted> {
1455    if let Some(ref retrigger_compilation) = retrigger_compilation {
1456        if retrigger_compilation.load(Ordering::SeqCst) {
1457            return Err(handler.cancel());
1458        }
1459    }
1460    Ok(())
1461}
1462
1463#[test]
1464fn test_basic_prog() {
1465    let handler = Handler::default();
1466    let engines = Engines::default();
1467    let prog = parse(
1468        r#"
1469        contract;
1470
1471    enum yo
1472    <T>
1473    where
1474    T: IsAThing
1475    {
1476        x: u32,
1477        y: MyStruct<u32>
1478    }
1479
1480    enum  MyOtherSumType
1481    {
1482        x: u32,
1483        y: MyStruct<u32>
1484    }
1485        struct MyStruct<T> {
1486            field_name: u64,
1487            other_field: T,
1488        }
1489
1490
1491    fn generic_function
1492    <T>
1493    (arg1: u64,
1494    arg2: T)
1495    ->
1496    T
1497    where T: Display,
1498          T: Debug {
1499          let x: MyStruct =
1500          MyStruct
1501          {
1502              field_name:
1503              5
1504          };
1505          return
1506          match
1507            arg1
1508          {
1509               1
1510               => true,
1511               _ => { return false; },
1512          };
1513    }
1514
1515    struct MyStruct {
1516        test: string,
1517    }
1518
1519
1520
1521    use stdlib::println;
1522
1523    trait MyTrait {
1524        // interface points
1525        fn myfunc(x: int) -> unit;
1526        } {
1527        // methods
1528        fn calls_interface_fn(x: int) -> unit {
1529            // declare a byte
1530            let x = 0b10101111;
1531            let mut y = 0b11111111;
1532            self.interface_fn(x);
1533        }
1534    }
1535
1536    pub fn prints_number_five() -> u8 {
1537        let x: u8 = 5;
1538        println(x);
1539         x.to_string();
1540         let some_list = [
1541         5,
1542         10 + 3 / 2,
1543         func_app(my_args, (so_many_args))];
1544        return 5;
1545    }
1546    "#
1547        .into(),
1548        &handler,
1549        &engines,
1550        None,
1551        ExperimentalFeatures::default(),
1552        "test",
1553    );
1554    prog.unwrap();
1555}
1556#[test]
1557fn test_parenthesized() {
1558    let handler = Handler::default();
1559    let engines = Engines::default();
1560    let prog = parse(
1561        r#"
1562        contract;
1563        pub fn some_abi_func() -> unit {
1564            let x = (5 + 6 / (1 + (2 / 1) + 4));
1565            return;
1566        }
1567    "#
1568        .into(),
1569        &handler,
1570        &engines,
1571        None,
1572        ExperimentalFeatures::default(),
1573        "test",
1574    );
1575    prog.unwrap();
1576}
1577
1578#[test]
1579fn test_unary_ordering() {
1580    use crate::language::{self, parsed};
1581    let handler = Handler::default();
1582    let engines = Engines::default();
1583    let prog = parse(
1584        r#"
1585    script;
1586    fn main() -> bool {
1587        let a = true;
1588        let b = true;
1589        !a && b;
1590    }"#
1591        .into(),
1592        &handler,
1593        &engines,
1594        None,
1595        ExperimentalFeatures::default(),
1596        "test",
1597    );
1598    let (.., prog) = prog.unwrap();
1599    // this should parse as `(!a) && b`, not `!(a && b)`. So, the top level
1600    // expression should be `&&`
1601    if let parsed::AstNode {
1602        content:
1603            parsed::AstNodeContent::Declaration(parsed::Declaration::FunctionDeclaration(decl_id)),
1604        ..
1605    } = &prog.root.tree.root_nodes[0]
1606    {
1607        let fn_decl = engines.pe().get_function(decl_id);
1608        if let parsed::AstNode {
1609            content:
1610                parsed::AstNodeContent::Expression(parsed::Expression {
1611                    kind:
1612                        parsed::ExpressionKind::LazyOperator(parsed::LazyOperatorExpression {
1613                            op, ..
1614                        }),
1615                    ..
1616                }),
1617            ..
1618        } = &fn_decl.body.contents[2]
1619        {
1620            assert_eq!(op, &language::LazyOp::And)
1621        } else {
1622            panic!("Was not lazy operator.")
1623        }
1624    } else {
1625        panic!("Was not ast node")
1626    };
1627}
1628
1629#[test]
1630fn test_parser_recovery() {
1631    let handler = Handler::default();
1632    let engines = Engines::default();
1633    let prog = parse(
1634        r#"
1635    script;
1636    fn main() -> bool {
1637        let
1638        let a = true;
1639        true
1640    }"#
1641        .into(),
1642        &handler,
1643        &engines,
1644        None,
1645        ExperimentalFeatures::default(),
1646        "test",
1647    );
1648    let (_, _) = prog.unwrap();
1649    assert!(handler.has_errors());
1650    dbg!(handler);
1651}