sway_core/
lib.rs

1#![recursion_limit = "256"]
2
3#[macro_use]
4pub mod error;
5
6#[macro_use]
7pub mod engine_threading;
8
9pub mod abi_generation;
10pub mod asm_generation;
11mod asm_lang;
12mod build_config;
13pub mod compiler_generated;
14mod concurrent_slab;
15mod control_flow_analysis;
16mod debug_generation;
17pub mod decl_engine;
18pub mod ir_generation;
19pub mod language;
20pub mod marker_traits;
21mod metadata;
22pub mod obs_engine;
23pub mod query_engine;
24pub mod semantic_analysis;
25pub mod source_map;
26pub mod transform;
27pub mod type_system;
28
29use crate::ir_generation::check_function_purity;
30use crate::language::{CallPath, CallPathType};
31use crate::query_engine::ModuleCacheEntry;
32use crate::semantic_analysis::namespace::ResolvedDeclaration;
33use crate::semantic_analysis::type_resolve::{resolve_call_path, VisibilityCheck};
34use crate::source_map::SourceMap;
35pub use asm_generation::from_ir::compile_ir_context_to_finalized_asm;
36use asm_generation::FinalizedAsm;
37pub use asm_generation::{CompiledBytecode, FinalizedEntry};
38pub use build_config::DbgGeneration;
39pub use build_config::{Backtrace, BuildConfig, BuildTarget, IrCli, LspConfig, OptLevel, PrintAsm};
40use control_flow_analysis::ControlFlowGraph;
41pub use debug_generation::write_dwarf;
42use itertools::Itertools;
43use metadata::MetadataManager;
44use query_engine::{ModuleCacheKey, ModuleCommonInfo, ParsedModuleInfo, ProgramsCacheEntry};
45use semantic_analysis::program::TypeCheckFailed;
46use std::collections::hash_map::DefaultHasher;
47use std::collections::HashMap;
48use std::hash::{Hash, Hasher};
49use std::path::{Path, PathBuf};
50use std::sync::atomic::{AtomicBool, Ordering};
51use std::sync::Arc;
52use sway_ast::AttributeDecl;
53use sway_error::convert_parse_tree_error::ConvertParseTreeError;
54use sway_error::handler::{ErrorEmitted, Handler};
55use sway_error::warning::{CollectedTraitImpl, CompileInfo, CompileWarning, Info, Warning};
56use sway_features::ExperimentalFeatures;
57use sway_ir::{
58    create_o1_pass_group, register_known_passes, Context, Kind, Module, PassGroup, PassManager,
59    PrintPassesOpts, VerifyPassesOpts, ARG_DEMOTION_NAME, ARG_POINTEE_MUTABILITY_TAGGER_NAME,
60    CONST_DEMOTION_NAME, DCE_NAME, FN_DEDUP_DEBUG_PROFILE_NAME, FN_INLINE_NAME, GLOBALS_DCE_NAME,
61    MEM2REG_NAME, MEMCPYOPT_NAME, MEMCPYPROP_REVERSE_NAME, MISC_DEMOTION_NAME, RET_DEMOTION_NAME,
62    SIMPLIFY_CFG_NAME, SROA_NAME,
63};
64use sway_types::span::Source;
65use sway_types::{SourceEngine, SourceLocation, Span};
66use sway_utils::{time_expr, PerformanceData, PerformanceMetric};
67use transform::{ArgsExpectValues, Attribute, AttributeKind, Attributes, ExpectedArgs};
68use types::{CollectTypesMetadata, CollectTypesMetadataContext, LogId, TypeMetadata};
69
70pub use semantic_analysis::namespace::{self, Namespace};
71pub mod types;
72
73use sway_error::error::CompileError;
74use sway_types::{ident::Ident, span, Spanned};
75pub use type_system::*;
76
77pub use language::Programs;
78use language::{lexed, parsed, ty, Visibility};
79use transform::to_parsed_lang::{self, convert_module_kind};
80
81pub mod fuel_prelude {
82    pub use fuel_vm::{self, fuel_asm, fuel_crypto, fuel_tx, fuel_types};
83}
84
85pub use engine_threading::Engines;
86pub use obs_engine::{ObservabilityEngine, Observer};
87
88/// Given an input `Arc<str>` and an optional [BuildConfig], parse the input into a [lexed::LexedProgram] and [parsed::ParseProgram].
89///
90/// # Example
91/// ```ignore
92/// # use sway_core::parse;
93/// # fn main() {
94///     let input = "script; fn main() -> bool { true }";
95///     let result = parse(input.into(), <_>::default(), None);
96/// # }
97/// ```
98///
99/// # Panics
100/// Panics if the parser panics.
101pub fn parse(
102    src: Source,
103    handler: &Handler,
104    engines: &Engines,
105    config: Option<&BuildConfig>,
106    experimental: ExperimentalFeatures,
107    package_name: &str,
108) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
109    match config {
110        None => parse_in_memory(
111            handler,
112            engines,
113            src,
114            experimental,
115            DbgGeneration::None,
116            package_name,
117        ),
118        // When a `BuildConfig` is given,
119        // the module source may declare `mod`s that must be parsed from other files.
120        Some(config) => parse_module_tree(
121            handler,
122            engines,
123            src,
124            config.canonical_root_module(),
125            None,
126            config.build_target,
127            config.dbg_generation,
128            config.include_tests,
129            experimental,
130            config.lsp_mode.as_ref(),
131            package_name,
132        )
133        .map(
134            |ParsedModuleTree {
135                 tree_type: kind,
136                 lexed_module,
137                 parse_module,
138             }| {
139                let lexed = lexed::LexedProgram {
140                    kind,
141                    root: lexed_module,
142                };
143                let parsed = parsed::ParseProgram {
144                    kind,
145                    root: parse_module,
146                };
147                (lexed, parsed)
148            },
149        ),
150    }
151}
152
153/// Parses the tree kind in the input provided.
154///
155/// This will lex the entire input, but parses only the module kind.
156pub fn parse_tree_type(handler: &Handler, src: Source) -> Result<parsed::TreeType, ErrorEmitted> {
157    // Parsing only the module kind does not depend on any
158    // experimental feature. So, we can just pass the default
159    // experimental features here.
160    let experimental = ExperimentalFeatures::default();
161    sway_parse::parse_module_kind(handler, src, None, experimental)
162        .map(|kind| convert_module_kind(&kind))
163}
164
165/// Converts `attribute_decls` to [Attributes].
166///
167/// This function always returns [Attributes], even if the attributes are erroneous.
168/// Errors and warnings are returned via [Handler]. The callers should ignore eventual errors
169/// in attributes and proceed with the compilation. [Attributes] are tolerant to erroneous
170/// attributes and follows the last-wins principle, which allows annotated elements to
171/// proceed with compilation. After their successful compilation, callers need to inspect
172/// the [Handler] and still emit errors if there were any.
173pub(crate) fn attr_decls_to_attributes(
174    attribute_decls: &[AttributeDecl],
175    can_annotate: impl Fn(&Attribute) -> bool,
176    target_friendly_name: &'static str,
177) -> (Handler, Attributes) {
178    let handler = Handler::default();
179    // Check if attribute is an unsupported inner attribute (`#!`).
180    // Note that we are doing that before creating the flattened `attributes`,
181    // because we want the error to point at the `#!` token.
182    // Note also that we will still include those attributes into
183    // the `attributes`. There are cases, like e.g., LSP, where
184    // having complete list of attributes is needed.
185    // In the below analysis, though, we will be ignoring inner attributes,
186    // means not checking their content.
187    for attr_decl in attribute_decls
188        .iter()
189        .filter(|attr| !attr.is_doc_comment() && attr.is_inner())
190    {
191        handler.emit_err(CompileError::Unimplemented {
192            span: attr_decl.hash_kind.span(),
193            feature: "Using inner attributes (`#!`)".to_string(),
194            help: vec![],
195        });
196    }
197
198    let attributes = Attributes::new(attribute_decls);
199
200    // Check for unknown attributes.
201    for attribute in attributes.unknown().filter(|attr| attr.is_outer()) {
202        handler.emit_warn(CompileWarning {
203            span: attribute.name.span(),
204            warning_content: Warning::UnknownAttribute {
205                attribute: (&attribute.name).into(),
206                known_attributes: attributes.known_attribute_names(),
207            },
208        });
209    }
210
211    // Check for attributes annotating invalid targets.
212    for ((attribute_kind, _attribute_direction), mut attributes) in &attributes
213        .all()
214        .filter(|attr| attr.is_doc_comment() || attr.is_outer())
215        .chunk_by(|attr| (attr.kind, attr.direction))
216    {
217        // For doc comments, we want to show the error on a complete doc comment,
218        // and not on every documentation line.
219        if attribute_kind == AttributeKind::DocComment {
220            let first_doc_line = attributes
221                .next()
222                .expect("`chunk_by` guarantees existence of at least one element in the chunk");
223            if !can_annotate(first_doc_line) {
224                let last_doc_line = match attributes.last() {
225                    Some(last_attr) => last_attr,
226                    // There is only one doc line in the complete doc comment.
227                    None => first_doc_line,
228                };
229                handler.emit_err(
230                    ConvertParseTreeError::InvalidAttributeTarget {
231                        span: Span::join(
232                            first_doc_line.span.clone(),
233                            &last_doc_line.span.start_span(),
234                        ),
235                        attribute: first_doc_line.name.clone(),
236                        target_friendly_name,
237                        can_only_annotate_help: first_doc_line
238                            .can_only_annotate_help(target_friendly_name),
239                    }
240                    .into(),
241                );
242            }
243        } else {
244            // For other attributes, the error is shown for every individual attribute.
245            for attribute in attributes {
246                if !can_annotate(attribute) {
247                    handler.emit_err(
248                        ConvertParseTreeError::InvalidAttributeTarget {
249                            span: attribute.name.span(),
250                            attribute: attribute.name.clone(),
251                            target_friendly_name,
252                            can_only_annotate_help: attribute
253                                .can_only_annotate_help(target_friendly_name),
254                        }
255                        .into(),
256                    );
257                }
258            }
259        }
260    }
261
262    // In all the subsequent test we are checking only non-doc-comment attributes
263    // and only those that didn't produce invalid target or unsupported inner attributes errors.
264    let should_be_checked =
265        |attr: &&Attribute| !attr.is_doc_comment() && attr.is_outer() && can_annotate(attr);
266
267    // Check for attributes multiplicity.
268    for (_attribute_kind, attributes_of_kind) in
269        attributes.all_by_kind(|attr| should_be_checked(attr) && !attr.kind.allows_multiple())
270    {
271        if attributes_of_kind.len() > 1 {
272            let (last_attribute, previous_attributes) = attributes_of_kind
273                .split_last()
274                .expect("`attributes_of_kind` has more than one element");
275            handler.emit_err(
276                ConvertParseTreeError::InvalidAttributeMultiplicity {
277                    last_occurrence: (&last_attribute.name).into(),
278                    previous_occurrences: previous_attributes
279                        .iter()
280                        .map(|attr| (&attr.name).into())
281                        .collect(),
282                }
283                .into(),
284            );
285        }
286    }
287
288    // Check for arguments multiplicity.
289    // For attributes that can be applied only once but are applied several times
290    // we will still check arguments in every attribute occurrence.
291    for attribute in attributes.all().filter(should_be_checked) {
292        let _ = attribute.check_args_multiplicity(&handler);
293    }
294
295    // Check for expected arguments.
296    // For attributes that can be applied only once but are applied more times
297    // we will check arguments of every attribute occurrence.
298    // If an attribute does not expect any arguments, we will not check them,
299    // but emit only the above error about invalid number of arguments.
300    for attribute in attributes
301        .all()
302        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
303    {
304        match attribute.expected_args() {
305            ExpectedArgs::None => unreachable!("`attribute` can have arguments"),
306            ExpectedArgs::Any => {}
307            ExpectedArgs::MustBeIn(expected_args) => {
308                for arg in attribute.args.iter() {
309                    if !expected_args.contains(&arg.name.as_str()) {
310                        handler.emit_err(
311                            ConvertParseTreeError::InvalidAttributeArg {
312                                attribute: attribute.name.clone(),
313                                arg: (&arg.name).into(),
314                                expected_args: expected_args.clone(),
315                            }
316                            .into(),
317                        );
318                    }
319                }
320            }
321            ExpectedArgs::ShouldBeIn(expected_args) => {
322                for arg in attribute.args.iter() {
323                    if !expected_args.contains(&arg.name.as_str()) {
324                        handler.emit_warn(CompileWarning {
325                            span: arg.name.span(),
326                            warning_content: Warning::UnknownAttributeArg {
327                                attribute: attribute.name.clone(),
328                                arg: (&arg.name).into(),
329                                expected_args: expected_args.clone(),
330                            },
331                        });
332                    }
333                }
334            }
335        }
336    }
337
338    // Check for expected argument values.
339    // We use here the same logic for what to check, as in the above check
340    // for expected arguments.
341    for attribute in attributes
342        .all()
343        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
344    {
345        // In addition, if an argument **must** be in expected args but is not,
346        // we will not be checking it, but only emit the error above.
347        // But if it **should** be in expected args and is not,
348        // we still impose on it the expectation coming from its attribute.
349        fn check_value_expected(handler: &Handler, attribute: &Attribute, is_value_expected: bool) {
350            for arg in attribute.args.iter() {
351                if let ExpectedArgs::MustBeIn(expected_args) = attribute.expected_args() {
352                    if !expected_args.contains(&arg.name.as_str()) {
353                        continue;
354                    }
355                }
356
357                if (is_value_expected && arg.value.is_none())
358                    || (!is_value_expected && arg.value.is_some())
359                {
360                    handler.emit_err(
361                        ConvertParseTreeError::InvalidAttributeArgExpectsValue {
362                            attribute: attribute.name.clone(),
363                            arg: (&arg.name).into(),
364                            value_span: arg.value.as_ref().map(|literal| literal.span()),
365                        }
366                        .into(),
367                    );
368                }
369            }
370        }
371
372        match attribute.args_expect_values() {
373            ArgsExpectValues::Yes => check_value_expected(&handler, attribute, true),
374            ArgsExpectValues::No => check_value_expected(&handler, attribute, false),
375            ArgsExpectValues::Maybe => {}
376        }
377    }
378
379    (handler, attributes)
380}
381
382/// When no `BuildConfig` is given, we're assumed to be parsing in-memory with no submodules.
383fn parse_in_memory(
384    handler: &Handler,
385    engines: &Engines,
386    src: Source,
387    experimental: ExperimentalFeatures,
388    dbg_generation: DbgGeneration,
389    package_name: &str,
390) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
391    let mut hasher = DefaultHasher::new();
392    src.text.hash(&mut hasher);
393    let hash = hasher.finish();
394    let module = sway_parse::parse_file(handler, src, None, experimental)?;
395
396    let (attributes_handler, attributes) = attr_decls_to_attributes(
397        &module.attributes,
398        |attr| attr.can_annotate_module_kind(),
399        module.value.kind.friendly_name(),
400    );
401    let attributes_error_emitted = handler.append(attributes_handler);
402
403    let (kind, tree) = to_parsed_lang::convert_parse_tree(
404        &mut to_parsed_lang::Context::new(
405            BuildTarget::EVM,
406            dbg_generation,
407            experimental,
408            package_name,
409        ),
410        handler,
411        engines,
412        module.value.clone(),
413    )?;
414
415    match attributes_error_emitted {
416        Some(err) => Err(err),
417        None => {
418            let root = parsed::ParseModule {
419                span: span::Span::dummy(),
420                module_kind_span: module.value.kind.span(),
421                module_eval_order: vec![],
422                tree,
423                submodules: vec![],
424                attributes,
425                hash,
426            };
427            let lexed_program = lexed::LexedProgram::new(
428                kind,
429                lexed::LexedModule {
430                    tree: module,
431                    submodules: vec![],
432                },
433            );
434            Ok((lexed_program, parsed::ParseProgram { kind, root }))
435        }
436    }
437}
438
439pub struct Submodule {
440    name: Ident,
441    path: Arc<PathBuf>,
442    lexed: lexed::LexedSubmodule,
443    parsed: parsed::ParseSubmodule,
444}
445
446/// Contains the lexed and parsed submodules 'deps' of a module.
447pub type Submodules = Vec<Submodule>;
448
449/// Parse all dependencies `deps` as submodules.
450#[allow(clippy::too_many_arguments)]
451fn parse_submodules(
452    handler: &Handler,
453    engines: &Engines,
454    module_name: Option<&str>,
455    module: &sway_ast::Module,
456    module_dir: &Path,
457    build_target: BuildTarget,
458    dbg_generation: DbgGeneration,
459    include_tests: bool,
460    experimental: ExperimentalFeatures,
461    lsp_mode: Option<&LspConfig>,
462    package_name: &str,
463) -> Submodules {
464    // Assume the happy path, so there'll be as many submodules as dependencies, but no more.
465    let mut submods = Vec::with_capacity(module.submodules().count());
466    module.submodules().for_each(|submod| {
467        // Read the source code from the dependency.
468        // If we cannot, record as an error, but continue with other files.
469        let submod_path = Arc::new(module_path(module_dir, module_name, submod));
470        let submod_src: Source = match std::fs::read_to_string(&*submod_path) {
471            Ok(s) => s.as_str().into(),
472            Err(e) => {
473                handler.emit_err(CompileError::FileCouldNotBeRead {
474                    span: submod.name.span(),
475                    file_path: submod_path.to_string_lossy().to_string(),
476                    stringified_error: e.to_string(),
477                });
478                return;
479            }
480        };
481        if let Ok(ParsedModuleTree {
482            tree_type: kind,
483            lexed_module,
484            parse_module,
485        }) = parse_module_tree(
486            handler,
487            engines,
488            submod_src.clone(),
489            submod_path.clone(),
490            Some(submod.name.as_str()),
491            build_target,
492            dbg_generation,
493            include_tests,
494            experimental,
495            lsp_mode,
496            package_name,
497        ) {
498            if !matches!(kind, parsed::TreeType::Library) {
499                let source_id = engines.se().get_source_id(submod_path.as_ref());
500                let span = span::Span::new(submod_src, 0, 0, Some(source_id)).unwrap();
501                handler.emit_err(CompileError::ImportMustBeLibrary { span });
502                return;
503            }
504
505            let parse_submodule = parsed::ParseSubmodule {
506                module: parse_module,
507                visibility: match submod.visibility {
508                    Some(..) => Visibility::Public,
509                    None => Visibility::Private,
510                },
511                mod_name_span: submod.name.span(),
512            };
513            let lexed_submodule = lexed::LexedSubmodule {
514                module: lexed_module,
515            };
516            let submodule = Submodule {
517                name: submod.name.clone(),
518                path: submod_path,
519                lexed: lexed_submodule,
520                parsed: parse_submodule,
521            };
522            submods.push(submodule);
523        }
524    });
525    submods
526}
527
528pub type SourceHash = u64;
529
530#[derive(Clone, Debug)]
531pub struct ParsedModuleTree {
532    pub tree_type: parsed::TreeType,
533    pub lexed_module: lexed::LexedModule,
534    pub parse_module: parsed::ParseModule,
535}
536
537/// Given the source of the module along with its path,
538/// parse this module including all of its submodules.
539#[allow(clippy::too_many_arguments)]
540fn parse_module_tree(
541    handler: &Handler,
542    engines: &Engines,
543    src: Source,
544    path: Arc<PathBuf>,
545    module_name: Option<&str>,
546    build_target: BuildTarget,
547    dbg_generation: DbgGeneration,
548    include_tests: bool,
549    experimental: ExperimentalFeatures,
550    lsp_mode: Option<&LspConfig>,
551    package_name: &str,
552) -> Result<ParsedModuleTree, ErrorEmitted> {
553    let query_engine = engines.qe();
554
555    // Parse this module first.
556    let module_dir = path.parent().expect("module file has no parent directory");
557    let source_id = engines.se().get_source_id(&path.clone());
558    // don't use reloaded file if we already have it in memory, that way new spans will still point to the same string
559    let src = engines.se().get_or_create_source_buffer(&source_id, src);
560    let module = sway_parse::parse_file(handler, src.clone(), Some(source_id), experimental)?;
561
562    // Parse all submodules before converting to the `ParseTree`.
563    // This always recovers on parse errors for the file itself by skipping that file.
564    let submodules = parse_submodules(
565        handler,
566        engines,
567        module_name,
568        &module.value,
569        module_dir,
570        build_target,
571        dbg_generation,
572        include_tests,
573        experimental,
574        lsp_mode,
575        package_name,
576    );
577
578    let (attributes_handler, attributes) = attr_decls_to_attributes(
579        &module.attributes,
580        |attr| attr.can_annotate_module_kind(),
581        module.value.kind.friendly_name(),
582    );
583    let attributes_error_emitted = handler.append(attributes_handler);
584
585    // Convert from the raw parsed module to the `ParseTree` ready for type-check.
586    let (kind, tree) = to_parsed_lang::convert_parse_tree(
587        &mut to_parsed_lang::Context::new(build_target, dbg_generation, experimental, package_name),
588        handler,
589        engines,
590        module.value.clone(),
591    )?;
592
593    if let Some(err) = attributes_error_emitted {
594        return Err(err);
595    }
596
597    let module_kind_span = module.value.kind.span();
598    let lexed_submodules = submodules
599        .iter()
600        .map(|s| (s.name.clone(), s.lexed.clone()))
601        .collect::<Vec<_>>();
602    let lexed = lexed::LexedModule {
603        tree: module,
604        submodules: lexed_submodules,
605    };
606
607    let mut hasher = DefaultHasher::new();
608    src.text.hash(&mut hasher);
609    let hash = hasher.finish();
610
611    let parsed_submodules = submodules
612        .iter()
613        .map(|s| (s.name.clone(), s.parsed.clone()))
614        .collect::<Vec<_>>();
615    let parsed = parsed::ParseModule {
616        span: span::Span::new(src, 0, 0, Some(source_id)).unwrap(),
617        module_kind_span,
618        module_eval_order: vec![],
619        tree,
620        submodules: parsed_submodules,
621        attributes,
622        hash,
623    };
624
625    // Let's prime the cache with the module dependency and hash data.
626    let modified_time = std::fs::metadata(path.as_path())
627        .ok()
628        .and_then(|m| m.modified().ok());
629    let dependencies = submodules.into_iter().map(|s| s.path).collect::<Vec<_>>();
630    let version = lsp_mode
631        .and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied())
632        .unwrap_or(None);
633
634    let common_info = ModuleCommonInfo {
635        path: path.clone(),
636        include_tests,
637        dependencies,
638        hash,
639    };
640    let parsed_info = ParsedModuleInfo {
641        modified_time,
642        version,
643    };
644    let cache_entry = ModuleCacheEntry::new(common_info, parsed_info);
645    query_engine.update_or_insert_parsed_module_cache_entry(cache_entry);
646
647    Ok(ParsedModuleTree {
648        tree_type: kind,
649        lexed_module: lexed,
650        parse_module: parsed,
651    })
652}
653
654/// Checks if the typed module cache for a given path is up to date.
655///
656/// This function determines whether the cached typed representation of a module
657/// is still valid based on file versions and dependencies.
658///
659/// Note: This functionality is currently only supported when the compiler is
660/// initiated from the language server.
661pub(crate) fn is_ty_module_cache_up_to_date(
662    engines: &Engines,
663    path: &Arc<PathBuf>,
664    include_tests: bool,
665    build_config: Option<&BuildConfig>,
666) -> bool {
667    let cache = engines.qe().module_cache.read();
668    let key = ModuleCacheKey::new(path.clone(), include_tests);
669    cache.get(&key).is_some_and(|entry| {
670        entry.typed.as_ref().is_some_and(|typed| {
671            // Check if the cache is up to date based on file versions
672            let cache_up_to_date = build_config
673                .and_then(|x| x.lsp_mode.as_ref())
674                .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
675                .is_none_or(|version| {
676                    version.is_none_or(|v| typed.version.is_some_and(|tv| v <= tv))
677                });
678
679            // If the cache is up to date, recursively check all dependencies
680            cache_up_to_date
681                && entry.common.dependencies.iter().all(|dep_path| {
682                    is_ty_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
683                })
684        })
685    })
686}
687
688/// Checks if the parsed module cache for a given path is up to date.
689///
690/// This function determines whether the cached parsed representation of a module
691/// is still valid based on file versions, modification times, or content hashes.
692pub(crate) fn is_parse_module_cache_up_to_date(
693    engines: &Engines,
694    path: &Arc<PathBuf>,
695    include_tests: bool,
696    build_config: Option<&BuildConfig>,
697) -> bool {
698    let cache = engines.qe().module_cache.read();
699    let key = ModuleCacheKey::new(path.clone(), include_tests);
700    cache.get(&key).is_some_and(|entry| {
701        // Determine if the cached dependency information is still valid
702        let cache_up_to_date = build_config
703            .and_then(|x| x.lsp_mode.as_ref())
704            .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
705            .map_or_else(
706                || {
707                    // If LSP mode is not active or file version is unavailable, fall back to filesystem checks.
708                    let modified_time = std::fs::metadata(path.as_path())
709                        .ok()
710                        .and_then(|m| m.modified().ok());
711                    // Check if modification time matches, or if not, compare file content hash
712                    entry.parsed.modified_time == modified_time || {
713                        let src = std::fs::read_to_string(path.as_path()).unwrap();
714                        let mut hasher = DefaultHasher::new();
715                        src.hash(&mut hasher);
716                        hasher.finish() == entry.common.hash
717                    }
718                },
719                |version| {
720                    // Determine if the parse cache is up-to-date in LSP mode:
721                    // - If there's no LSP file version (version is None), consider the cache up-to-date.
722                    // - If there is an LSP file version:
723                    //   - If there's no cached version (entry.parsed.version is None), the cache is outdated.
724                    //   - If there's a cached version, compare them: cache is up-to-date if the LSP file version
725                    //     is not greater than the cached version.
726                    version.is_none_or(|v| entry.parsed.version.is_some_and(|ev| v <= ev))
727                },
728            );
729
730        // Checks if the typed module cache for a given path is up to date// If the cache is up to date, recursively check all dependencies to make sure they have not been
731        // modified either.
732        cache_up_to_date
733            && entry.common.dependencies.iter().all(|dep_path| {
734                is_parse_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
735            })
736    })
737}
738
739fn module_path(
740    parent_module_dir: &Path,
741    parent_module_name: Option<&str>,
742    submod: &sway_ast::Submodule,
743) -> PathBuf {
744    if let Some(parent_name) = parent_module_name {
745        parent_module_dir
746            .join(parent_name)
747            .join(submod.name.to_string())
748            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
749    } else {
750        // top level module
751        parent_module_dir
752            .join(submod.name.to_string())
753            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
754    }
755}
756
757pub fn build_module_dep_graph(
758    handler: &Handler,
759    parse_module: &mut parsed::ParseModule,
760) -> Result<(), ErrorEmitted> {
761    let module_dep_graph = ty::TyModule::build_dep_graph(handler, parse_module)?;
762    parse_module.module_eval_order = module_dep_graph.compute_order(handler)?;
763
764    for (_, submodule) in &mut parse_module.submodules {
765        build_module_dep_graph(handler, &mut submodule.module)?;
766    }
767    Ok(())
768}
769
770/// A possible occurrence of a `panic` expression that is located in code at [PanicOccurrence::loc].
771///
772/// Note that a single `panic` expression can have multiple [PanicOccurrence]s related to it.
773///
774/// For example:
775/// - `panic "Some message.";` will have just a single occurrence, with `msg` containing the message.
776/// - `panic some_value_of_a_concrete_type;` will have just a single occurrence, with `log_id` containing the [LogId] of the concrete type.
777/// - `panic some_value_of_a_generic_type;` will have multiple occurrences, one with `log_id` for every monomorphized type.
778///
779/// **Every [PanicOccurrence] has exactly one revert code assigned to it.**
780#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
781pub struct PanicOccurrence {
782    pub function: String,
783    pub loc: SourceLocation,
784    pub log_id: Option<LogId>,
785    pub msg: Option<String>,
786}
787
788/// Represents a function call that could panic during execution.
789/// E.g., for the following code:
790///
791/// ```ignore
792/// fn some_function() {
793///    let _ = this_function_might_panic(42);
794///}
795/// ```
796///
797/// the `function` field will contain the name of the function that might panic:
798///   `function: "some_other_package::module::this_function_might_panic"`
799///
800/// and the `loc` and `caller_function` fields will contain the source location of the call to the `function`
801/// that might panic:
802///
803/// ```ignore
804///     caller_function: "some_package::some_module::some_function",
805///     pkg: "some_package@0.1.0",
806///     file: "src/some_module.sw",
807///     ...
808/// ```
809///
810/// Note that, in case of panicking function or caller function being
811/// generic functions, a single panicking call can have multiple
812/// [PanickingCallOccurrence]s related to it.
813///
814/// For example:
815/// - `this_function_might_panic(42);` will have a single occurrence,
816///   with `function` containing the full name of the function that might panic.
817/// - `this_generic_function_might_panic::<u64>(42);` will have a single occurrence,
818///   with `function` containing the full name of the function that might panic,
819///   but with the generic type parameter `u64` included in the name.
820/// - `this_generic_function_might_panic::<T>(42);` will have multiple occurrences,
821///   one for every monomorphized type.
822///
823/// Similar is for a generic caller function.
824///
825/// **Every [PanickingCallOccurrence] has exactly one panicking call code assigned to it.**
826#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
827pub struct PanickingCallOccurrence {
828    pub function: String,
829    pub caller_function: String,
830    pub loc: SourceLocation,
831}
832
833/// [PanicOccurrence]s mapped to their corresponding panic error codes.
834pub type PanicOccurrences = HashMap<PanicOccurrence, u64>;
835
836/// [PanickingCallOccurrence]s mapped to their corresponding panicking call codes.
837pub type PanickingCallOccurrences = HashMap<PanickingCallOccurrence, u64>;
838
839pub struct CompiledAsm {
840    pub finalized_asm: FinalizedAsm,
841    pub panic_occurrences: PanicOccurrences,
842    pub panicking_call_occurrences: PanickingCallOccurrences,
843}
844
845#[allow(clippy::result_large_err)]
846#[allow(clippy::too_many_arguments)]
847pub fn parsed_to_ast(
848    handler: &Handler,
849    engines: &Engines,
850    parse_program: &mut parsed::ParseProgram,
851    initial_namespace: namespace::Package,
852    build_config: Option<&BuildConfig>,
853    package_name: &str,
854    retrigger_compilation: Option<Arc<AtomicBool>>,
855    experimental: ExperimentalFeatures,
856    backtrace: Backtrace,
857) -> Result<ty::TyProgram, TypeCheckFailed> {
858    let lsp_config = build_config.map(|x| x.lsp_mode.clone()).unwrap_or_default();
859
860    // Build the dependency graph for the submodules.
861    build_module_dep_graph(handler, &mut parse_program.root).map_err(|error| TypeCheckFailed {
862        root_module: None,
863        namespace: initial_namespace.clone(),
864        error,
865    })?;
866
867    let collection_namespace = Namespace::new(handler, engines, initial_namespace.clone(), true)
868        .map_err(|error| TypeCheckFailed {
869            root_module: None,
870            namespace: initial_namespace.clone(),
871            error,
872        })?;
873    // Collect the program symbols.
874
875    let mut collection_ctx =
876        ty::TyProgram::collect(handler, engines, parse_program, collection_namespace).map_err(
877            |error| TypeCheckFailed {
878                root_module: None,
879                namespace: initial_namespace.clone(),
880                error,
881            },
882        )?;
883
884    let typecheck_namespace =
885        Namespace::new(handler, engines, initial_namespace, true).map_err(|error| {
886            TypeCheckFailed {
887                root_module: None,
888                namespace: collection_ctx.namespace().current_package_ref().clone(),
889                error,
890            }
891        })?;
892    // Type check the program.
893    let typed_program_opt = ty::TyProgram::type_check(
894        handler,
895        engines,
896        parse_program,
897        &mut collection_ctx,
898        typecheck_namespace,
899        package_name,
900        build_config,
901        experimental,
902    );
903
904    let mut typed_program = typed_program_opt?;
905
906    check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
907        TypeCheckFailed {
908            root_module: Some(Arc::new(typed_program.root_module.clone())),
909            namespace: typed_program.namespace.current_package_ref().clone(),
910            error,
911        }
912    })?;
913    // Only clear the parsed AST nodes if we are running a regular compilation pipeline.
914    // LSP needs these to build its token map, and they are cleared by `clear_program` as
915    // part of the LSP garbage collection functionality instead.
916    if lsp_config.is_none() {
917        engines.pe().clear();
918    }
919
920    typed_program.check_deprecated(engines, handler);
921
922    match typed_program.check_recursive(engines, handler) {
923        Ok(()) => {}
924        Err(error) => {
925            handler.dedup();
926            return Err(TypeCheckFailed {
927                root_module: Some(Arc::new(typed_program.root_module.clone())),
928                namespace: typed_program.namespace.current_package().clone(),
929                error,
930            });
931        }
932    };
933
934    // Skip collecting metadata if we triggered an optimised build from LSP.
935    let types_metadata = if !lsp_config.as_ref().is_some_and(|lsp| lsp.optimized_build) {
936        // Collect information about the types used in this program
937        let types_metadata_result = typed_program.collect_types_metadata(
938            handler,
939            &mut CollectTypesMetadataContext::new(engines, experimental, package_name.to_string()),
940        );
941        let types_metadata = match types_metadata_result {
942            Ok(types_metadata) => types_metadata,
943            Err(error) => {
944                handler.dedup();
945                return Err(TypeCheckFailed {
946                    root_module: Some(Arc::new(typed_program.root_module.clone())),
947                    namespace: typed_program.namespace.current_package().clone(),
948                    error,
949                });
950            }
951        };
952
953        typed_program
954            .logged_types
955            .extend(types_metadata.iter().filter_map(|m| match m {
956                TypeMetadata::LoggedType(log_id, type_id) => Some((*log_id, *type_id)),
957                _ => None,
958            }));
959
960        typed_program
961            .messages_types
962            .extend(types_metadata.iter().filter_map(|m| match m {
963                TypeMetadata::MessageType(message_id, type_id) => Some((*message_id, *type_id)),
964                _ => None,
965            }));
966
967        let (print_graph, print_graph_url_format) = match build_config {
968            Some(cfg) => (
969                cfg.print_dca_graph.clone(),
970                cfg.print_dca_graph_url_format.clone(),
971            ),
972            None => (None, None),
973        };
974
975        check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
976            TypeCheckFailed {
977                root_module: Some(Arc::new(typed_program.root_module.clone())),
978                namespace: typed_program.namespace.current_package_ref().clone(),
979                error,
980            }
981        })?;
982
983        // Perform control flow analysis and extend with any errors.
984        let _ = perform_control_flow_analysis(
985            handler,
986            engines,
987            &typed_program,
988            print_graph,
989            print_graph_url_format,
990        );
991
992        types_metadata
993    } else {
994        vec![]
995    };
996
997    // Evaluate const declarations, to allow storage slots initialization with consts.
998    let mut ctx = Context::new(engines.se(), experimental, backtrace.into());
999    let module = Module::new(&mut ctx, Kind::Contract);
1000    if let Err(errs) = ir_generation::compile::compile_constants_for_package(
1001        engines,
1002        &mut ctx,
1003        module,
1004        &typed_program.namespace,
1005    ) {
1006        errs.into_iter().for_each(|err| {
1007            handler.emit_err(err.clone());
1008        });
1009    }
1010
1011    // CEI pattern analysis
1012    let cei_analysis_warnings =
1013        semantic_analysis::cei_pattern_analysis::analyze_program(engines, &typed_program);
1014    for warn in cei_analysis_warnings {
1015        handler.emit_warn(warn);
1016    }
1017
1018    let mut md_mgr = MetadataManager::default();
1019    // Check that all storage initializers can be evaluated at compile time.
1020    typed_program
1021        .get_typed_program_with_initialized_storage_slots(
1022            handler,
1023            engines,
1024            &mut ctx,
1025            &mut md_mgr,
1026            module,
1027        )
1028        .map_err(|error: ErrorEmitted| {
1029            handler.dedup();
1030            TypeCheckFailed {
1031                root_module: Some(Arc::new(typed_program.root_module.clone())),
1032                namespace: typed_program.namespace.current_package_ref().clone(),
1033                error,
1034            }
1035        })?;
1036
1037    // All unresolved types lead to compile errors.
1038    for err in types_metadata.iter().filter_map(|m| match m {
1039        TypeMetadata::UnresolvedType(name, call_site_span_opt) => {
1040            Some(CompileError::UnableToInferGeneric {
1041                ty: name.as_str().to_string(),
1042                span: call_site_span_opt.clone().unwrap_or_else(|| name.span()),
1043            })
1044        }
1045        _ => None,
1046    }) {
1047        handler.emit_err(err);
1048    }
1049
1050    Ok(typed_program)
1051}
1052
1053#[allow(clippy::too_many_arguments)]
1054pub fn compile_to_ast(
1055    handler: &Handler,
1056    engines: &Engines,
1057    src: Source,
1058    initial_namespace: namespace::Package,
1059    build_config: Option<&BuildConfig>,
1060    package_name: &str,
1061    retrigger_compilation: Option<Arc<AtomicBool>>,
1062    experimental: ExperimentalFeatures,
1063) -> Result<Programs, ErrorEmitted> {
1064    check_should_abort(handler, retrigger_compilation.clone())?;
1065
1066    let query_engine = engines.qe();
1067    let mut metrics = PerformanceData::default();
1068    if let Some(config) = build_config {
1069        let path = config.canonical_root_module();
1070        let include_tests = config.include_tests;
1071        // Check if we can re-use the data in the cache.
1072        if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) {
1073            let mut entry = query_engine.get_programs_cache_entry(&path).unwrap();
1074            entry.programs.metrics.reused_programs += 1;
1075
1076            let (warnings, errors, infos) = entry.handler_data;
1077            let new_handler = Handler::from_parts(warnings, errors, infos);
1078            handler.append(new_handler);
1079            return Ok(entry.programs);
1080        };
1081    }
1082
1083    // Parse the program to a concrete syntax tree (CST).
1084    let parse_program_opt = time_expr!(
1085        package_name,
1086        "parse the program to a concrete syntax tree (CST)",
1087        "parse_cst",
1088        parse(
1089            src,
1090            handler,
1091            engines,
1092            build_config,
1093            experimental,
1094            package_name
1095        ),
1096        build_config,
1097        metrics
1098    );
1099
1100    check_should_abort(handler, retrigger_compilation.clone())?;
1101
1102    let (lexed_program, mut parsed_program) = match parse_program_opt {
1103        Ok(modules) => modules,
1104        Err(e) => {
1105            handler.dedup();
1106            return Err(e);
1107        }
1108    };
1109
1110    // If tests are not enabled, exclude them from `parsed_program`.
1111    if build_config.is_none_or(|config| !config.include_tests) {
1112        parsed_program.exclude_tests(engines);
1113    }
1114
1115    // Type check (+ other static analysis) the CST to a typed AST.
1116    let program = time_expr!(
1117        package_name,
1118        "parse the concrete syntax tree (CST) to a typed AST",
1119        "parse_ast",
1120        parsed_to_ast(
1121            handler,
1122            engines,
1123            &mut parsed_program,
1124            initial_namespace,
1125            build_config,
1126            package_name,
1127            retrigger_compilation.clone(),
1128            experimental,
1129            build_config.map(|cfg| cfg.backtrace).unwrap_or_default()
1130        ),
1131        build_config,
1132        metrics
1133    );
1134
1135    check_should_abort(handler, retrigger_compilation.clone())?;
1136
1137    handler.dedup();
1138
1139    let programs = Programs::new(
1140        Arc::new(lexed_program),
1141        Arc::new(parsed_program),
1142        program.map(Arc::new),
1143        metrics,
1144    );
1145
1146    if let Some(config) = build_config {
1147        let path = config.canonical_root_module();
1148        let cache_entry = ProgramsCacheEntry {
1149            path,
1150            programs: programs.clone(),
1151            handler_data: handler.clone().consume(),
1152        };
1153        query_engine.insert_programs_cache_entry(cache_entry);
1154    }
1155
1156    check_should_abort(handler, retrigger_compilation.clone())?;
1157
1158    Ok(programs)
1159}
1160
1161/// Given input Sway source code, try compiling to a `CompiledAsm`,
1162/// containing the asm in opcode form (not raw bytes/bytecode).
1163pub fn compile_to_asm(
1164    handler: &Handler,
1165    engines: &Engines,
1166    src: Source,
1167    initial_namespace: namespace::Package,
1168    build_config: &BuildConfig,
1169    package_name: &str,
1170    experimental: ExperimentalFeatures,
1171) -> Result<CompiledAsm, ErrorEmitted> {
1172    let ast_res = compile_to_ast(
1173        handler,
1174        engines,
1175        src,
1176        initial_namespace,
1177        Some(build_config),
1178        package_name,
1179        None,
1180        experimental,
1181    )?;
1182
1183    ast_to_asm(handler, engines, &ast_res, build_config, experimental)
1184}
1185
1186/// Given an AST compilation result, try compiling to a `CompiledAsm`,
1187/// containing the asm in opcode form (not raw bytes/bytecode).
1188pub fn ast_to_asm(
1189    handler: &Handler,
1190    engines: &Engines,
1191    programs: &Programs,
1192    build_config: &BuildConfig,
1193    experimental: ExperimentalFeatures,
1194) -> Result<CompiledAsm, ErrorEmitted> {
1195    let typed_program = match &programs.typed {
1196        Ok(typed_program) => typed_program,
1197        Err(err) => return Err(err.error),
1198    };
1199
1200    let mut panic_occurrences = PanicOccurrences::default();
1201    let mut panicking_call_occurrences = PanickingCallOccurrences::default();
1202
1203    let asm = match compile_ast_to_ir_to_asm(
1204        handler,
1205        engines,
1206        typed_program,
1207        &mut panic_occurrences,
1208        &mut panicking_call_occurrences,
1209        build_config,
1210        experimental,
1211    ) {
1212        Ok(res) => res,
1213        Err(err) => {
1214            handler.dedup();
1215            return Err(err);
1216        }
1217    };
1218
1219    Ok(CompiledAsm {
1220        finalized_asm: asm,
1221        panic_occurrences,
1222        panicking_call_occurrences,
1223    })
1224}
1225
1226pub(crate) fn compile_ast_to_ir_to_asm(
1227    handler: &Handler,
1228    engines: &Engines,
1229    program: &ty::TyProgram,
1230    panic_occurrences: &mut PanicOccurrences,
1231    panicking_call_occurrences: &mut PanickingCallOccurrences,
1232    build_config: &BuildConfig,
1233    experimental: ExperimentalFeatures,
1234) -> Result<FinalizedAsm, ErrorEmitted> {
1235    // The IR pipeline relies on type information being fully resolved.
1236    // If type information is found to still be generic or unresolved inside of
1237    // IR, this is considered an internal compiler error. To resolve this situation,
1238    // we need to explicitly ensure all types are resolved before going into IR.
1239    //
1240    // We _could_ introduce a new type here that uses TypeInfo instead of TypeId and throw away
1241    // the engine, since we don't need inference for IR. That'd be a _lot_ of copy-pasted code,
1242    // though, so instead, we are just going to do a pass and throw any unresolved generics as
1243    // errors and then hold as a runtime invariant that none of the types will be unresolved in the
1244    // IR phase.
1245
1246    let mut ir = match ir_generation::compile_program(
1247        program,
1248        panic_occurrences,
1249        panicking_call_occurrences,
1250        build_config.include_tests,
1251        engines,
1252        experimental,
1253        build_config.backtrace.into(),
1254    ) {
1255        Ok(ir) => ir,
1256        Err(errors) => {
1257            let mut last = None;
1258            for e in errors {
1259                last = Some(handler.emit_err(e));
1260            }
1261            return Err(last.unwrap());
1262        }
1263    };
1264
1265    // Find all the entry points for purity checking and DCE.
1266    let entry_point_functions: Vec<::sway_ir::Function> = ir
1267        .module_iter()
1268        .flat_map(|module| module.function_iter(&ir))
1269        .filter(|func| func.is_entry(&ir))
1270        .collect();
1271
1272    // Do a purity check on the _unoptimised_ IR.
1273    {
1274        let mut env = ir_generation::PurityEnv::default();
1275        let mut md_mgr = metadata::MetadataManager::default();
1276        for entry_point in &entry_point_functions {
1277            check_function_purity(handler, &mut env, &ir, &mut md_mgr, entry_point);
1278        }
1279    }
1280
1281    // Initialize the pass manager and register known passes.
1282    let mut pass_mgr = PassManager::default();
1283    register_known_passes(&mut pass_mgr);
1284    let mut pass_group = PassGroup::default();
1285
1286    match build_config.optimization_level {
1287        OptLevel::Opt1 => {
1288            pass_group.append_group(create_o1_pass_group());
1289        }
1290        OptLevel::Opt0 => {
1291            // We run a function deduplication pass that only removes duplicate
1292            // functions when everything, including the metadata are identical.
1293            pass_group.append_pass(FN_DEDUP_DEBUG_PROFILE_NAME);
1294
1295            // Inlining is necessary until #4899 is resolved.
1296            pass_group.append_pass(FN_INLINE_NAME);
1297
1298            // Do DCE so other optimizations run faster.
1299            pass_group.append_pass(GLOBALS_DCE_NAME);
1300            pass_group.append_pass(DCE_NAME);
1301        }
1302    }
1303
1304    // Target specific transforms should be moved into something more configured.
1305    if build_config.build_target == BuildTarget::Fuel {
1306        // FuelVM target specific transforms.
1307        //
1308        // Demote large by-value constants, arguments and return values to by-reference values
1309        // using temporaries.
1310        pass_group.append_pass(CONST_DEMOTION_NAME);
1311        pass_group.append_pass(ARG_DEMOTION_NAME);
1312        pass_group.append_pass(RET_DEMOTION_NAME);
1313        pass_group.append_pass(MISC_DEMOTION_NAME);
1314
1315        // Convert loads and stores to mem_copies where possible.
1316        pass_group.append_pass(ARG_POINTEE_MUTABILITY_TAGGER_NAME);
1317        pass_group.append_pass(MEMCPYOPT_NAME);
1318
1319        // Run a DCE and simplify-cfg to clean up any obsolete instructions.
1320        pass_group.append_pass(DCE_NAME);
1321        pass_group.append_pass(SIMPLIFY_CFG_NAME);
1322
1323        match build_config.optimization_level {
1324            OptLevel::Opt1 => {
1325                pass_group.append_pass(MEMCPYPROP_REVERSE_NAME);
1326                pass_group.append_pass(SROA_NAME);
1327                pass_group.append_pass(MEM2REG_NAME);
1328                pass_group.append_pass(DCE_NAME);
1329            }
1330            OptLevel::Opt0 => {}
1331        }
1332    }
1333
1334    // Run the passes.
1335    let print_passes_opts: PrintPassesOpts = (&build_config.print_ir).into();
1336    let verify_passes_opts: VerifyPassesOpts = (&build_config.verify_ir).into();
1337    let res = if let Err(ir_error) = pass_mgr.run_with_print_verify(
1338        &mut ir,
1339        &pass_group,
1340        &print_passes_opts,
1341        &verify_passes_opts,
1342    ) {
1343        Err(handler.emit_err(CompileError::InternalOwned(
1344            ir_error.to_string(),
1345            span::Span::dummy(),
1346        )))
1347    } else {
1348        Ok(())
1349    };
1350    res?;
1351
1352    compile_ir_context_to_finalized_asm(handler, &ir, Some(build_config))
1353}
1354
1355/// Given input Sway source code, compile to [CompiledBytecode], containing the asm in bytecode form.
1356#[allow(clippy::too_many_arguments)]
1357pub fn compile_to_bytecode(
1358    handler: &Handler,
1359    engines: &Engines,
1360    src: Source,
1361    initial_namespace: namespace::Package,
1362    build_config: &BuildConfig,
1363    source_map: &mut SourceMap,
1364    package_name: &str,
1365    experimental: ExperimentalFeatures,
1366) -> Result<CompiledBytecode, ErrorEmitted> {
1367    let mut asm_res = compile_to_asm(
1368        handler,
1369        engines,
1370        src,
1371        initial_namespace,
1372        build_config,
1373        package_name,
1374        experimental,
1375    )?;
1376    asm_to_bytecode(
1377        handler,
1378        &mut asm_res,
1379        source_map,
1380        engines.se(),
1381        build_config,
1382    )
1383}
1384
1385/// Size of the prelude's CONFIGURABLES_OFFSET section, in bytes.
1386pub const PRELUDE_CONFIGURABLES_SIZE_IN_BYTES: usize = 8;
1387/// Offset (in bytes) of the CONFIGURABLES_OFFSET section in the prelude.
1388pub const PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES: usize = 16;
1389/// Total size of the prelude in bytes. Instructions start right after.
1390pub const PRELUDE_SIZE_IN_BYTES: usize = 32;
1391
1392/// Given bytecode, overwrite the existing offset to configurables offset in the prelude with the given one.
1393pub fn set_bytecode_configurables_offset(
1394    compiled_bytecode: &mut CompiledBytecode,
1395    md: &[u8; PRELUDE_CONFIGURABLES_SIZE_IN_BYTES],
1396) {
1397    assert!(
1398        compiled_bytecode.bytecode.len()
1399            >= PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES + PRELUDE_CONFIGURABLES_SIZE_IN_BYTES
1400    );
1401    let code = &mut compiled_bytecode.bytecode;
1402    for (index, byte) in md.iter().enumerate() {
1403        code[index + PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES] = *byte;
1404    }
1405}
1406
1407/// Given the assembly (opcodes), compile to [CompiledBytecode], containing the asm in bytecode form.
1408pub fn asm_to_bytecode(
1409    handler: &Handler,
1410    asm: &mut CompiledAsm,
1411    source_map: &mut SourceMap,
1412    source_engine: &SourceEngine,
1413    build_config: &BuildConfig,
1414) -> Result<CompiledBytecode, ErrorEmitted> {
1415    let compiled_bytecode =
1416        asm.finalized_asm
1417            .to_bytecode_mut(handler, source_map, source_engine, build_config)?;
1418    Ok(compiled_bytecode)
1419}
1420
1421/// Given a [ty::TyProgram], which is type-checked Sway source, construct a graph to analyze
1422/// control flow and determine if it is valid.
1423fn perform_control_flow_analysis(
1424    handler: &Handler,
1425    engines: &Engines,
1426    program: &ty::TyProgram,
1427    print_graph: Option<String>,
1428    print_graph_url_format: Option<String>,
1429) -> Result<(), ErrorEmitted> {
1430    let dca_res = dead_code_analysis(handler, engines, program);
1431    let rpa_errors = return_path_analysis(engines, program);
1432    let rpa_res = handler.scope(|handler| {
1433        for err in rpa_errors {
1434            handler.emit_err(err);
1435        }
1436        Ok(())
1437    });
1438
1439    if let Ok(graph) = dca_res.clone() {
1440        graph.visualize(engines, print_graph, print_graph_url_format);
1441    }
1442    dca_res?;
1443    rpa_res
1444}
1445
1446/// Constructs a dead code graph from all modules within the graph and then attempts to find dead
1447/// code.
1448///
1449/// Returns the graph that was used for analysis.
1450fn dead_code_analysis<'a>(
1451    handler: &Handler,
1452    engines: &'a Engines,
1453    program: &ty::TyProgram,
1454) -> Result<ControlFlowGraph<'a>, ErrorEmitted> {
1455    let decl_engine = engines.de();
1456    let mut dead_code_graph = ControlFlowGraph::new(engines);
1457    let tree_type = program.kind.tree_type();
1458    module_dead_code_analysis(
1459        handler,
1460        engines,
1461        &program.root_module,
1462        &tree_type,
1463        &mut dead_code_graph,
1464    )?;
1465    let warnings = dead_code_graph.find_dead_code(decl_engine);
1466    for warn in warnings {
1467        handler.emit_warn(warn);
1468    }
1469    Ok(dead_code_graph)
1470}
1471
1472/// Recursively collect modules into the given `ControlFlowGraph` ready for dead code analysis.
1473fn module_dead_code_analysis<'eng: 'cfg, 'cfg>(
1474    handler: &Handler,
1475    engines: &'eng Engines,
1476    module: &ty::TyModule,
1477    tree_type: &parsed::TreeType,
1478    graph: &mut ControlFlowGraph<'cfg>,
1479) -> Result<(), ErrorEmitted> {
1480    module
1481        .submodules
1482        .iter()
1483        .try_fold((), |(), (_, submodule)| {
1484            let tree_type = parsed::TreeType::Library;
1485            module_dead_code_analysis(handler, engines, &submodule.module, &tree_type, graph)
1486        })?;
1487    let res = {
1488        ControlFlowGraph::append_module_to_dead_code_graph(
1489            engines,
1490            &module.all_nodes,
1491            tree_type,
1492            graph,
1493        )
1494        .map_err(|err| handler.emit_err(err))
1495    };
1496    graph.connect_pending_entry_edges();
1497    res
1498}
1499
1500fn return_path_analysis(engines: &Engines, program: &ty::TyProgram) -> Vec<CompileError> {
1501    let mut errors = vec![];
1502    module_return_path_analysis(engines, &program.root_module, &mut errors);
1503    errors
1504}
1505
1506fn module_return_path_analysis(
1507    engines: &Engines,
1508    module: &ty::TyModule,
1509    errors: &mut Vec<CompileError>,
1510) {
1511    for (_, submodule) in &module.submodules {
1512        module_return_path_analysis(engines, &submodule.module, errors);
1513    }
1514    let graph = ControlFlowGraph::construct_return_path_graph(engines, &module.all_nodes);
1515    match graph {
1516        Ok(graph) => errors.extend(graph.analyze_return_paths(engines)),
1517        Err(mut error) => errors.append(&mut error),
1518    }
1519}
1520
1521/// Check if the retrigger compilation flag has been set to true in the language server.
1522/// If it has, there is a new compilation request, so we should abort the current compilation.
1523fn check_should_abort(
1524    handler: &Handler,
1525    retrigger_compilation: Option<Arc<AtomicBool>>,
1526) -> Result<(), ErrorEmitted> {
1527    if let Some(ref retrigger_compilation) = retrigger_compilation {
1528        if retrigger_compilation.load(Ordering::SeqCst) {
1529            return Err(handler.cancel());
1530        }
1531    }
1532    Ok(())
1533}
1534
1535pub fn dump_trait_impls_for_typename(
1536    handler: &Handler,
1537    engines: &Engines,
1538    namespace: &namespace::Namespace,
1539    typename: &str,
1540) -> Result<(), ErrorEmitted> {
1541    let path: Vec<&str> = typename.split("::").collect();
1542    let mut call_path = CallPath::fullpath(&path);
1543    call_path.callpath_type = CallPathType::Ambiguous;
1544
1545    let pkg_namespace = namespace.current_package_ref();
1546    let mod_path = [pkg_namespace.root_module().name().clone()];
1547
1548    let resolve_handler = Handler::default();
1549    let resolved = resolve_call_path(
1550        &resolve_handler,
1551        engines,
1552        namespace,
1553        &mod_path,
1554        &call_path,
1555        None,
1556        VisibilityCheck::No,
1557    );
1558
1559    if let Ok(resolved) = resolved {
1560        let module = &pkg_namespace.root_module();
1561
1562        let mut impls = Vec::new();
1563        find_trait_impls_for_type(engines, namespace, &resolved, module, &mut impls);
1564
1565        for ext_pkg in pkg_namespace.external_packages.iter() {
1566            let ext_module = ext_pkg.1.root_module();
1567            find_trait_impls_for_type(engines, namespace, &resolved, ext_module, &mut impls);
1568        }
1569
1570        let unique_impls = impls
1571            .iter()
1572            .unique_by(|i| i.impl_span.clone())
1573            .cloned()
1574            .collect::<Vec<_>>();
1575        handler.emit_info(CompileInfo {
1576            span: resolved.span(engines).subset_first_of("{").unwrap(),
1577            content: Info::ImplTraitsForType {
1578                impls: unique_impls,
1579            },
1580        });
1581    }
1582
1583    Ok(())
1584}
1585
1586fn find_trait_impls_for_type(
1587    engines: &Engines,
1588    namespace: &namespace::Namespace,
1589    resolved_decl: &ResolvedDeclaration,
1590    module: &namespace::Module,
1591    impls: &mut Vec<CollectedTraitImpl>,
1592) {
1593    let handler = Handler::default();
1594    let struct_decl_source_id = resolved_decl
1595        .to_struct_decl(&handler, engines)
1596        .map(|d| d.expect_typed())
1597        .and_then(|decl| decl.to_struct_decl(&handler, engines))
1598        .map(|decl_id| engines.de().get_struct(&decl_id).span.source_id().cloned())
1599        .ok()
1600        .flatten();
1601
1602    let enum_decl_source_id = resolved_decl
1603        .to_enum_decl(&handler, engines)
1604        .map(|d| d.expect_typed())
1605        .and_then(|decl| decl.to_enum_id(&handler, engines))
1606        .map(|decl_id| engines.de().get_enum(&decl_id).span.source_id().cloned())
1607        .ok()
1608        .flatten();
1609
1610    module.walk_scope_chain(|lexical_scope| {
1611        module.submodules().iter().for_each(|(_, sub)| {
1612            find_trait_impls_for_type(engines, namespace, resolved_decl, sub, impls);
1613        });
1614
1615        let trait_map = &lexical_scope.items.implemented_traits;
1616
1617        for key in trait_map.trait_impls.keys() {
1618            for trait_entry in trait_map.trait_impls[key].iter() {
1619                let trait_type = engines.te().get(trait_entry.inner.key.type_id);
1620
1621                let matched = match *trait_type {
1622                    TypeInfo::Enum(decl_id) => {
1623                        let trait_enum = engines.de().get_enum(&decl_id);
1624                        enum_decl_source_id == trait_enum.span.source_id().cloned()
1625                    }
1626                    TypeInfo::Struct(decl_id) => {
1627                        let trait_struct = engines.de().get_struct(&decl_id);
1628                        struct_decl_source_id == trait_struct.span.source_id().cloned()
1629                    }
1630                    _ => false,
1631                };
1632
1633                if matched {
1634                    let trait_callpath = trait_entry.inner.key.name.to_fullpath(engines, namespace);
1635                    impls.push(CollectedTraitImpl {
1636                        impl_span: trait_entry
1637                            .inner
1638                            .value
1639                            .impl_span
1640                            .subset_first_of("{")
1641                            .unwrap(),
1642                        trait_name: engines.help_out(trait_callpath).to_string(),
1643                    });
1644                }
1645            }
1646        }
1647    });
1648}
1649
1650#[test]
1651fn test_basic_prog() {
1652    let handler = Handler::default();
1653    let engines = Engines::default();
1654    let prog = parse(
1655        r#"
1656        contract;
1657
1658    enum yo
1659    <T>
1660    where
1661    T: IsAThing
1662    {
1663        x: u32,
1664        y: MyStruct<u32>
1665    }
1666
1667    enum  MyOtherSumType
1668    {
1669        x: u32,
1670        y: MyStruct<u32>
1671    }
1672        struct MyStruct<T> {
1673            field_name: u64,
1674            other_field: T,
1675        }
1676
1677
1678    fn generic_function
1679    <T>
1680    (arg1: u64,
1681    arg2: T)
1682    ->
1683    T
1684    where T: Display,
1685          T: Debug {
1686          let x: MyStruct =
1687          MyStruct
1688          {
1689              field_name:
1690              5
1691          };
1692          return
1693          match
1694            arg1
1695          {
1696               1
1697               => true,
1698               _ => { return false; },
1699          };
1700    }
1701
1702    struct MyStruct {
1703        test: string,
1704    }
1705
1706
1707
1708    use stdlib::println;
1709
1710    trait MyTrait {
1711        // interface points
1712        fn myfunc(x: int) -> unit;
1713        } {
1714        // methods
1715        fn calls_interface_fn(x: int) -> unit {
1716            // declare a byte
1717            let x = 0b10101111;
1718            let mut y = 0b11111111;
1719            self.interface_fn(x);
1720        }
1721    }
1722
1723    pub fn prints_number_five() -> u8 {
1724        let x: u8 = 5;
1725        println(x);
1726         x.to_string();
1727         let some_list = [
1728         5,
1729         10 + 3 / 2,
1730         func_app(my_args, (so_many_args))];
1731        return 5;
1732    }
1733    "#
1734        .into(),
1735        &handler,
1736        &engines,
1737        None,
1738        ExperimentalFeatures::default(),
1739        "test",
1740    );
1741    prog.unwrap();
1742}
1743#[test]
1744fn test_parenthesized() {
1745    let handler = Handler::default();
1746    let engines = Engines::default();
1747    let prog = parse(
1748        r#"
1749        contract;
1750        pub fn some_abi_func() -> unit {
1751            let x = (5 + 6 / (1 + (2 / 1) + 4));
1752            return;
1753        }
1754    "#
1755        .into(),
1756        &handler,
1757        &engines,
1758        None,
1759        ExperimentalFeatures::default(),
1760        "test",
1761    );
1762    prog.unwrap();
1763}
1764
1765#[test]
1766fn test_unary_ordering() {
1767    use crate::language::{self, parsed};
1768    let handler = Handler::default();
1769    let engines = Engines::default();
1770    let prog = parse(
1771        r#"
1772    script;
1773    fn main() -> bool {
1774        let a = true;
1775        let b = true;
1776        !a && b;
1777    }"#
1778        .into(),
1779        &handler,
1780        &engines,
1781        None,
1782        ExperimentalFeatures::default(),
1783        "test",
1784    );
1785    let (.., prog) = prog.unwrap();
1786    // this should parse as `(!a) && b`, not `!(a && b)`. So, the top level
1787    // expression should be `&&`
1788    if let parsed::AstNode {
1789        content:
1790            parsed::AstNodeContent::Declaration(parsed::Declaration::FunctionDeclaration(decl_id)),
1791        ..
1792    } = &prog.root.tree.root_nodes[0]
1793    {
1794        let fn_decl = engines.pe().get_function(decl_id);
1795        if let parsed::AstNode {
1796            content:
1797                parsed::AstNodeContent::Expression(parsed::Expression {
1798                    kind:
1799                        parsed::ExpressionKind::LazyOperator(parsed::LazyOperatorExpression {
1800                            op, ..
1801                        }),
1802                    ..
1803                }),
1804            ..
1805        } = &fn_decl.body.contents[2]
1806        {
1807            assert_eq!(op, &language::LazyOp::And)
1808        } else {
1809            panic!("Was not lazy operator.")
1810        }
1811    } else {
1812        panic!("Was not ast node")
1813    };
1814}
1815
1816#[test]
1817fn test_parser_recovery() {
1818    let handler = Handler::default();
1819    let engines = Engines::default();
1820    let prog = parse(
1821        r#"
1822    script;
1823    fn main() -> bool {
1824        let
1825        let a = true;
1826        true
1827    }"#
1828        .into(),
1829        &handler,
1830        &engines,
1831        None,
1832        ExperimentalFeatures::default(),
1833        "test",
1834    );
1835    let (_, _) = prog.unwrap();
1836    assert!(handler.has_errors());
1837    dbg!(handler);
1838}