sway_core/
lib.rs

1#![recursion_limit = "256"]
2
3#[macro_use]
4pub mod error;
5
6#[macro_use]
7pub mod engine_threading;
8
9pub mod abi_generation;
10pub mod asm_generation;
11mod asm_lang;
12mod build_config;
13pub mod compiler_generated;
14mod concurrent_slab;
15mod control_flow_analysis;
16mod debug_generation;
17pub mod decl_engine;
18pub mod ir_generation;
19pub mod language;
20pub mod marker_traits;
21mod metadata;
22pub mod obs_engine;
23pub mod query_engine;
24pub mod semantic_analysis;
25pub mod source_map;
26pub mod transform;
27pub mod type_system;
28
29use crate::ir_generation::check_function_purity;
30use crate::language::{CallPath, CallPathType};
31use crate::query_engine::ModuleCacheEntry;
32use crate::semantic_analysis::namespace::ResolvedDeclaration;
33use crate::semantic_analysis::type_resolve::{resolve_call_path, VisibilityCheck};
34use crate::source_map::SourceMap;
35pub use asm_generation::from_ir::compile_ir_context_to_finalized_asm;
36use asm_generation::FinalizedAsm;
37pub use asm_generation::{CompiledBytecode, FinalizedEntry};
38pub use build_config::DbgGeneration;
39pub use build_config::{
40    Backtrace, BuildConfig, BuildTarget, LspConfig, OptLevel, PrintAsm, PrintIr,
41};
42use control_flow_analysis::ControlFlowGraph;
43pub use debug_generation::write_dwarf;
44use itertools::Itertools;
45use metadata::MetadataManager;
46use query_engine::{ModuleCacheKey, ModuleCommonInfo, ParsedModuleInfo, ProgramsCacheEntry};
47use semantic_analysis::program::TypeCheckFailed;
48use std::collections::hash_map::DefaultHasher;
49use std::collections::HashMap;
50use std::hash::{Hash, Hasher};
51use std::path::{Path, PathBuf};
52use std::sync::atomic::{AtomicBool, Ordering};
53use std::sync::Arc;
54use sway_ast::AttributeDecl;
55use sway_error::convert_parse_tree_error::ConvertParseTreeError;
56use sway_error::handler::{ErrorEmitted, Handler};
57use sway_error::warning::{CollectedTraitImpl, CompileInfo, CompileWarning, Info, Warning};
58use sway_features::ExperimentalFeatures;
59use sway_ir::{
60    create_o1_pass_group, register_known_passes, Context, Kind, Module, PassGroup, PassManager,
61    PrintPassesOpts, ARG_DEMOTION_NAME, ARG_POINTEE_MUTABILITY_TAGGER_NAME, CONST_DEMOTION_NAME,
62    DCE_NAME, FN_DEDUP_DEBUG_PROFILE_NAME, FN_INLINE_NAME, GLOBALS_DCE_NAME, MEM2REG_NAME,
63    MEMCPYOPT_NAME, MISC_DEMOTION_NAME, RET_DEMOTION_NAME, SIMPLIFY_CFG_NAME, SROA_NAME,
64};
65use sway_types::span::Source;
66use sway_types::{SourceEngine, SourceLocation, Span};
67use sway_utils::{time_expr, PerformanceData, PerformanceMetric};
68use transform::{ArgsExpectValues, Attribute, AttributeKind, Attributes, ExpectedArgs};
69use types::{CollectTypesMetadata, CollectTypesMetadataContext, LogId, TypeMetadata};
70
71pub use semantic_analysis::namespace::{self, Namespace};
72pub mod types;
73
74use sway_error::error::CompileError;
75use sway_types::{ident::Ident, span, Spanned};
76pub use type_system::*;
77
78pub use language::Programs;
79use language::{lexed, parsed, ty, Visibility};
80use transform::to_parsed_lang::{self, convert_module_kind};
81
82pub mod fuel_prelude {
83    pub use fuel_vm::{self, fuel_asm, fuel_crypto, fuel_tx, fuel_types};
84}
85
86pub use engine_threading::Engines;
87pub use obs_engine::{ObservabilityEngine, Observer};
88
89/// Given an input `Arc<str>` and an optional [BuildConfig], parse the input into a [lexed::LexedProgram] and [parsed::ParseProgram].
90///
91/// # Example
92/// ```ignore
93/// # use sway_core::parse;
94/// # fn main() {
95///     let input = "script; fn main() -> bool { true }";
96///     let result = parse(input.into(), <_>::default(), None);
97/// # }
98/// ```
99///
100/// # Panics
101/// Panics if the parser panics.
102pub fn parse(
103    src: Source,
104    handler: &Handler,
105    engines: &Engines,
106    config: Option<&BuildConfig>,
107    experimental: ExperimentalFeatures,
108    package_name: &str,
109) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
110    match config {
111        None => parse_in_memory(
112            handler,
113            engines,
114            src,
115            experimental,
116            DbgGeneration::None,
117            package_name,
118        ),
119        // When a `BuildConfig` is given,
120        // the module source may declare `mod`s that must be parsed from other files.
121        Some(config) => parse_module_tree(
122            handler,
123            engines,
124            src,
125            config.canonical_root_module(),
126            None,
127            config.build_target,
128            config.dbg_generation,
129            config.include_tests,
130            experimental,
131            config.lsp_mode.as_ref(),
132            package_name,
133        )
134        .map(
135            |ParsedModuleTree {
136                 tree_type: kind,
137                 lexed_module,
138                 parse_module,
139             }| {
140                let lexed = lexed::LexedProgram {
141                    kind,
142                    root: lexed_module,
143                };
144                let parsed = parsed::ParseProgram {
145                    kind,
146                    root: parse_module,
147                };
148                (lexed, parsed)
149            },
150        ),
151    }
152}
153
154/// Parses the tree kind in the input provided.
155///
156/// This will lex the entire input, but parses only the module kind.
157pub fn parse_tree_type(handler: &Handler, src: Source) -> Result<parsed::TreeType, ErrorEmitted> {
158    // Parsing only the module kind does not depend on any
159    // experimental feature. So, we can just pass the default
160    // experimental features here.
161    let experimental = ExperimentalFeatures::default();
162    sway_parse::parse_module_kind(handler, src, None, experimental)
163        .map(|kind| convert_module_kind(&kind))
164}
165
166/// Converts `attribute_decls` to [Attributes].
167///
168/// This function always returns [Attributes], even if the attributes are erroneous.
169/// Errors and warnings are returned via [Handler]. The callers should ignore eventual errors
170/// in attributes and proceed with the compilation. [Attributes] are tolerant to erroneous
171/// attributes and follows the last-wins principle, which allows annotated elements to
172/// proceed with compilation. After their successful compilation, callers need to inspect
173/// the [Handler] and still emit errors if there were any.
174pub(crate) fn attr_decls_to_attributes(
175    attribute_decls: &[AttributeDecl],
176    can_annotate: impl Fn(&Attribute) -> bool,
177    target_friendly_name: &'static str,
178) -> (Handler, Attributes) {
179    let handler = Handler::default();
180    // Check if attribute is an unsupported inner attribute (`#!`).
181    // Note that we are doing that before creating the flattened `attributes`,
182    // because we want the error to point at the `#!` token.
183    // Note also that we will still include those attributes into
184    // the `attributes`. There are cases, like e.g., LSP, where
185    // having complete list of attributes is needed.
186    // In the below analysis, though, we will be ignoring inner attributes,
187    // means not checking their content.
188    for attr_decl in attribute_decls
189        .iter()
190        .filter(|attr| !attr.is_doc_comment() && attr.is_inner())
191    {
192        handler.emit_err(CompileError::Unimplemented {
193            span: attr_decl.hash_kind.span(),
194            feature: "Using inner attributes (`#!`)".to_string(),
195            help: vec![],
196        });
197    }
198
199    let attributes = Attributes::new(attribute_decls);
200
201    // Check for unknown attributes.
202    for attribute in attributes.unknown().filter(|attr| attr.is_outer()) {
203        handler.emit_warn(CompileWarning {
204            span: attribute.name.span(),
205            warning_content: Warning::UnknownAttribute {
206                attribute: (&attribute.name).into(),
207                known_attributes: attributes.known_attribute_names(),
208            },
209        });
210    }
211
212    // Check for attributes annotating invalid targets.
213    for ((attribute_kind, _attribute_direction), mut attributes) in &attributes
214        .all()
215        .filter(|attr| attr.is_doc_comment() || attr.is_outer())
216        .chunk_by(|attr| (attr.kind, attr.direction))
217    {
218        // For doc comments, we want to show the error on a complete doc comment,
219        // and not on every documentation line.
220        if attribute_kind == AttributeKind::DocComment {
221            let first_doc_line = attributes
222                .next()
223                .expect("`chunk_by` guarantees existence of at least one element in the chunk");
224            if !can_annotate(first_doc_line) {
225                let last_doc_line = match attributes.last() {
226                    Some(last_attr) => last_attr,
227                    // There is only one doc line in the complete doc comment.
228                    None => first_doc_line,
229                };
230                handler.emit_err(
231                    ConvertParseTreeError::InvalidAttributeTarget {
232                        span: Span::join(
233                            first_doc_line.span.clone(),
234                            &last_doc_line.span.start_span(),
235                        ),
236                        attribute: first_doc_line.name.clone(),
237                        target_friendly_name,
238                        can_only_annotate_help: first_doc_line
239                            .can_only_annotate_help(target_friendly_name),
240                    }
241                    .into(),
242                );
243            }
244        } else {
245            // For other attributes, the error is shown for every individual attribute.
246            for attribute in attributes {
247                if !can_annotate(attribute) {
248                    handler.emit_err(
249                        ConvertParseTreeError::InvalidAttributeTarget {
250                            span: attribute.name.span(),
251                            attribute: attribute.name.clone(),
252                            target_friendly_name,
253                            can_only_annotate_help: attribute
254                                .can_only_annotate_help(target_friendly_name),
255                        }
256                        .into(),
257                    );
258                }
259            }
260        }
261    }
262
263    // In all the subsequent test we are checking only non-doc-comment attributes
264    // and only those that didn't produce invalid target or unsupported inner attributes errors.
265    let should_be_checked =
266        |attr: &&Attribute| !attr.is_doc_comment() && attr.is_outer() && can_annotate(attr);
267
268    // Check for attributes multiplicity.
269    for (_attribute_kind, attributes_of_kind) in
270        attributes.all_by_kind(|attr| should_be_checked(attr) && !attr.kind.allows_multiple())
271    {
272        if attributes_of_kind.len() > 1 {
273            let (last_attribute, previous_attributes) = attributes_of_kind
274                .split_last()
275                .expect("`attributes_of_kind` has more than one element");
276            handler.emit_err(
277                ConvertParseTreeError::InvalidAttributeMultiplicity {
278                    last_occurrence: (&last_attribute.name).into(),
279                    previous_occurrences: previous_attributes
280                        .iter()
281                        .map(|attr| (&attr.name).into())
282                        .collect(),
283                }
284                .into(),
285            );
286        }
287    }
288
289    // Check for arguments multiplicity.
290    // For attributes that can be applied only once but are applied several times
291    // we will still check arguments in every attribute occurrence.
292    for attribute in attributes.all().filter(should_be_checked) {
293        let _ = attribute.check_args_multiplicity(&handler);
294    }
295
296    // Check for expected arguments.
297    // For attributes that can be applied only once but are applied more times
298    // we will check arguments of every attribute occurrence.
299    // If an attribute does not expect any arguments, we will not check them,
300    // but emit only the above error about invalid number of arguments.
301    for attribute in attributes
302        .all()
303        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
304    {
305        match attribute.expected_args() {
306            ExpectedArgs::None => unreachable!("`attribute` can have arguments"),
307            ExpectedArgs::Any => {}
308            ExpectedArgs::MustBeIn(expected_args) => {
309                for arg in attribute.args.iter() {
310                    if !expected_args.contains(&arg.name.as_str()) {
311                        handler.emit_err(
312                            ConvertParseTreeError::InvalidAttributeArg {
313                                attribute: attribute.name.clone(),
314                                arg: (&arg.name).into(),
315                                expected_args: expected_args.clone(),
316                            }
317                            .into(),
318                        );
319                    }
320                }
321            }
322            ExpectedArgs::ShouldBeIn(expected_args) => {
323                for arg in attribute.args.iter() {
324                    if !expected_args.contains(&arg.name.as_str()) {
325                        handler.emit_warn(CompileWarning {
326                            span: arg.name.span(),
327                            warning_content: Warning::UnknownAttributeArg {
328                                attribute: attribute.name.clone(),
329                                arg: (&arg.name).into(),
330                                expected_args: expected_args.clone(),
331                            },
332                        });
333                    }
334                }
335            }
336        }
337    }
338
339    // Check for expected argument values.
340    // We use here the same logic for what to check, as in the above check
341    // for expected arguments.
342    for attribute in attributes
343        .all()
344        .filter(|attr| should_be_checked(attr) && attr.can_have_arguments())
345    {
346        // In addition, if an argument **must** be in expected args but is not,
347        // we will not be checking it, but only emit the error above.
348        // But if it **should** be in expected args and is not,
349        // we still impose on it the expectation coming from its attribute.
350        fn check_value_expected(handler: &Handler, attribute: &Attribute, is_value_expected: bool) {
351            for arg in attribute.args.iter() {
352                if let ExpectedArgs::MustBeIn(expected_args) = attribute.expected_args() {
353                    if !expected_args.contains(&arg.name.as_str()) {
354                        continue;
355                    }
356                }
357
358                if (is_value_expected && arg.value.is_none())
359                    || (!is_value_expected && arg.value.is_some())
360                {
361                    handler.emit_err(
362                        ConvertParseTreeError::InvalidAttributeArgExpectsValue {
363                            attribute: attribute.name.clone(),
364                            arg: (&arg.name).into(),
365                            value_span: arg.value.as_ref().map(|literal| literal.span()),
366                        }
367                        .into(),
368                    );
369                }
370            }
371        }
372
373        match attribute.args_expect_values() {
374            ArgsExpectValues::Yes => check_value_expected(&handler, attribute, true),
375            ArgsExpectValues::No => check_value_expected(&handler, attribute, false),
376            ArgsExpectValues::Maybe => {}
377        }
378    }
379
380    (handler, attributes)
381}
382
383/// When no `BuildConfig` is given, we're assumed to be parsing in-memory with no submodules.
384fn parse_in_memory(
385    handler: &Handler,
386    engines: &Engines,
387    src: Source,
388    experimental: ExperimentalFeatures,
389    dbg_generation: DbgGeneration,
390    package_name: &str,
391) -> Result<(lexed::LexedProgram, parsed::ParseProgram), ErrorEmitted> {
392    let mut hasher = DefaultHasher::new();
393    src.text.hash(&mut hasher);
394    let hash = hasher.finish();
395    let module = sway_parse::parse_file(handler, src, None, experimental)?;
396
397    let (attributes_handler, attributes) = attr_decls_to_attributes(
398        &module.attributes,
399        |attr| attr.can_annotate_module_kind(),
400        module.value.kind.friendly_name(),
401    );
402    let attributes_error_emitted = handler.append(attributes_handler);
403
404    let (kind, tree) = to_parsed_lang::convert_parse_tree(
405        &mut to_parsed_lang::Context::new(
406            BuildTarget::EVM,
407            dbg_generation,
408            experimental,
409            package_name,
410        ),
411        handler,
412        engines,
413        module.value.clone(),
414    )?;
415
416    match attributes_error_emitted {
417        Some(err) => Err(err),
418        None => {
419            let root = parsed::ParseModule {
420                span: span::Span::dummy(),
421                module_kind_span: module.value.kind.span(),
422                module_eval_order: vec![],
423                tree,
424                submodules: vec![],
425                attributes,
426                hash,
427            };
428            let lexed_program = lexed::LexedProgram::new(
429                kind,
430                lexed::LexedModule {
431                    tree: module,
432                    submodules: vec![],
433                },
434            );
435            Ok((lexed_program, parsed::ParseProgram { kind, root }))
436        }
437    }
438}
439
440pub struct Submodule {
441    name: Ident,
442    path: Arc<PathBuf>,
443    lexed: lexed::LexedSubmodule,
444    parsed: parsed::ParseSubmodule,
445}
446
447/// Contains the lexed and parsed submodules 'deps' of a module.
448pub type Submodules = Vec<Submodule>;
449
450/// Parse all dependencies `deps` as submodules.
451#[allow(clippy::too_many_arguments)]
452fn parse_submodules(
453    handler: &Handler,
454    engines: &Engines,
455    module_name: Option<&str>,
456    module: &sway_ast::Module,
457    module_dir: &Path,
458    build_target: BuildTarget,
459    dbg_generation: DbgGeneration,
460    include_tests: bool,
461    experimental: ExperimentalFeatures,
462    lsp_mode: Option<&LspConfig>,
463    package_name: &str,
464) -> Submodules {
465    // Assume the happy path, so there'll be as many submodules as dependencies, but no more.
466    let mut submods = Vec::with_capacity(module.submodules().count());
467    module.submodules().for_each(|submod| {
468        // Read the source code from the dependency.
469        // If we cannot, record as an error, but continue with other files.
470        let submod_path = Arc::new(module_path(module_dir, module_name, submod));
471        let submod_src: Source = match std::fs::read_to_string(&*submod_path) {
472            Ok(s) => s.as_str().into(),
473            Err(e) => {
474                handler.emit_err(CompileError::FileCouldNotBeRead {
475                    span: submod.name.span(),
476                    file_path: submod_path.to_string_lossy().to_string(),
477                    stringified_error: e.to_string(),
478                });
479                return;
480            }
481        };
482        if let Ok(ParsedModuleTree {
483            tree_type: kind,
484            lexed_module,
485            parse_module,
486        }) = parse_module_tree(
487            handler,
488            engines,
489            submod_src.clone(),
490            submod_path.clone(),
491            Some(submod.name.as_str()),
492            build_target,
493            dbg_generation,
494            include_tests,
495            experimental,
496            lsp_mode,
497            package_name,
498        ) {
499            if !matches!(kind, parsed::TreeType::Library) {
500                let source_id = engines.se().get_source_id(submod_path.as_ref());
501                let span = span::Span::new(submod_src, 0, 0, Some(source_id)).unwrap();
502                handler.emit_err(CompileError::ImportMustBeLibrary { span });
503                return;
504            }
505
506            let parse_submodule = parsed::ParseSubmodule {
507                module: parse_module,
508                visibility: match submod.visibility {
509                    Some(..) => Visibility::Public,
510                    None => Visibility::Private,
511                },
512                mod_name_span: submod.name.span(),
513            };
514            let lexed_submodule = lexed::LexedSubmodule {
515                module: lexed_module,
516            };
517            let submodule = Submodule {
518                name: submod.name.clone(),
519                path: submod_path,
520                lexed: lexed_submodule,
521                parsed: parse_submodule,
522            };
523            submods.push(submodule);
524        }
525    });
526    submods
527}
528
529pub type SourceHash = u64;
530
531#[derive(Clone, Debug)]
532pub struct ParsedModuleTree {
533    pub tree_type: parsed::TreeType,
534    pub lexed_module: lexed::LexedModule,
535    pub parse_module: parsed::ParseModule,
536}
537
538/// Given the source of the module along with its path,
539/// parse this module including all of its submodules.
540#[allow(clippy::too_many_arguments)]
541fn parse_module_tree(
542    handler: &Handler,
543    engines: &Engines,
544    src: Source,
545    path: Arc<PathBuf>,
546    module_name: Option<&str>,
547    build_target: BuildTarget,
548    dbg_generation: DbgGeneration,
549    include_tests: bool,
550    experimental: ExperimentalFeatures,
551    lsp_mode: Option<&LspConfig>,
552    package_name: &str,
553) -> Result<ParsedModuleTree, ErrorEmitted> {
554    let query_engine = engines.qe();
555
556    // Parse this module first.
557    let module_dir = path.parent().expect("module file has no parent directory");
558    let source_id = engines.se().get_source_id(&path.clone());
559    // don't use reloaded file if we already have it in memory, that way new spans will still point to the same string
560    let src = engines.se().get_or_create_source_buffer(&source_id, src);
561    let module = sway_parse::parse_file(handler, src.clone(), Some(source_id), experimental)?;
562
563    // Parse all submodules before converting to the `ParseTree`.
564    // This always recovers on parse errors for the file itself by skipping that file.
565    let submodules = parse_submodules(
566        handler,
567        engines,
568        module_name,
569        &module.value,
570        module_dir,
571        build_target,
572        dbg_generation,
573        include_tests,
574        experimental,
575        lsp_mode,
576        package_name,
577    );
578
579    let (attributes_handler, attributes) = attr_decls_to_attributes(
580        &module.attributes,
581        |attr| attr.can_annotate_module_kind(),
582        module.value.kind.friendly_name(),
583    );
584    let attributes_error_emitted = handler.append(attributes_handler);
585
586    // Convert from the raw parsed module to the `ParseTree` ready for type-check.
587    let (kind, tree) = to_parsed_lang::convert_parse_tree(
588        &mut to_parsed_lang::Context::new(build_target, dbg_generation, experimental, package_name),
589        handler,
590        engines,
591        module.value.clone(),
592    )?;
593
594    if let Some(err) = attributes_error_emitted {
595        return Err(err);
596    }
597
598    let module_kind_span = module.value.kind.span();
599    let lexed_submodules = submodules
600        .iter()
601        .map(|s| (s.name.clone(), s.lexed.clone()))
602        .collect::<Vec<_>>();
603    let lexed = lexed::LexedModule {
604        tree: module,
605        submodules: lexed_submodules,
606    };
607
608    let mut hasher = DefaultHasher::new();
609    src.text.hash(&mut hasher);
610    let hash = hasher.finish();
611
612    let parsed_submodules = submodules
613        .iter()
614        .map(|s| (s.name.clone(), s.parsed.clone()))
615        .collect::<Vec<_>>();
616    let parsed = parsed::ParseModule {
617        span: span::Span::new(src, 0, 0, Some(source_id)).unwrap(),
618        module_kind_span,
619        module_eval_order: vec![],
620        tree,
621        submodules: parsed_submodules,
622        attributes,
623        hash,
624    };
625
626    // Let's prime the cache with the module dependency and hash data.
627    let modified_time = std::fs::metadata(path.as_path())
628        .ok()
629        .and_then(|m| m.modified().ok());
630    let dependencies = submodules.into_iter().map(|s| s.path).collect::<Vec<_>>();
631    let version = lsp_mode
632        .and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied())
633        .unwrap_or(None);
634
635    let common_info = ModuleCommonInfo {
636        path: path.clone(),
637        include_tests,
638        dependencies,
639        hash,
640    };
641    let parsed_info = ParsedModuleInfo {
642        modified_time,
643        version,
644    };
645    let cache_entry = ModuleCacheEntry::new(common_info, parsed_info);
646    query_engine.update_or_insert_parsed_module_cache_entry(cache_entry);
647
648    Ok(ParsedModuleTree {
649        tree_type: kind,
650        lexed_module: lexed,
651        parse_module: parsed,
652    })
653}
654
655/// Checks if the typed module cache for a given path is up to date.
656///
657/// This function determines whether the cached typed representation of a module
658/// is still valid based on file versions and dependencies.
659///
660/// Note: This functionality is currently only supported when the compiler is
661/// initiated from the language server.
662pub(crate) fn is_ty_module_cache_up_to_date(
663    engines: &Engines,
664    path: &Arc<PathBuf>,
665    include_tests: bool,
666    build_config: Option<&BuildConfig>,
667) -> bool {
668    let cache = engines.qe().module_cache.read();
669    let key = ModuleCacheKey::new(path.clone(), include_tests);
670    cache.get(&key).is_some_and(|entry| {
671        entry.typed.as_ref().is_some_and(|typed| {
672            // Check if the cache is up to date based on file versions
673            let cache_up_to_date = build_config
674                .and_then(|x| x.lsp_mode.as_ref())
675                .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
676                .is_none_or(|version| {
677                    version.is_none_or(|v| typed.version.is_some_and(|tv| v <= tv))
678                });
679
680            // If the cache is up to date, recursively check all dependencies
681            cache_up_to_date
682                && entry.common.dependencies.iter().all(|dep_path| {
683                    is_ty_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
684                })
685        })
686    })
687}
688
689/// Checks if the parsed module cache for a given path is up to date.
690///
691/// This function determines whether the cached parsed representation of a module
692/// is still valid based on file versions, modification times, or content hashes.
693pub(crate) fn is_parse_module_cache_up_to_date(
694    engines: &Engines,
695    path: &Arc<PathBuf>,
696    include_tests: bool,
697    build_config: Option<&BuildConfig>,
698) -> bool {
699    let cache = engines.qe().module_cache.read();
700    let key = ModuleCacheKey::new(path.clone(), include_tests);
701    cache.get(&key).is_some_and(|entry| {
702        // Determine if the cached dependency information is still valid
703        let cache_up_to_date = build_config
704            .and_then(|x| x.lsp_mode.as_ref())
705            .and_then(|lsp| lsp.file_versions.get(path.as_ref()))
706            .map_or_else(
707                || {
708                    // If LSP mode is not active or file version is unavailable, fall back to filesystem checks.
709                    let modified_time = std::fs::metadata(path.as_path())
710                        .ok()
711                        .and_then(|m| m.modified().ok());
712                    // Check if modification time matches, or if not, compare file content hash
713                    entry.parsed.modified_time == modified_time || {
714                        let src = std::fs::read_to_string(path.as_path()).unwrap();
715                        let mut hasher = DefaultHasher::new();
716                        src.hash(&mut hasher);
717                        hasher.finish() == entry.common.hash
718                    }
719                },
720                |version| {
721                    // Determine if the parse cache is up-to-date in LSP mode:
722                    // - If there's no LSP file version (version is None), consider the cache up-to-date.
723                    // - If there is an LSP file version:
724                    //   - If there's no cached version (entry.parsed.version is None), the cache is outdated.
725                    //   - If there's a cached version, compare them: cache is up-to-date if the LSP file version
726                    //     is not greater than the cached version.
727                    version.is_none_or(|v| entry.parsed.version.is_some_and(|ev| v <= ev))
728                },
729            );
730
731        // Checks if the typed module cache for a given path is up to date// If the cache is up to date, recursively check all dependencies to make sure they have not been
732        // modified either.
733        cache_up_to_date
734            && entry.common.dependencies.iter().all(|dep_path| {
735                is_parse_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
736            })
737    })
738}
739
740fn module_path(
741    parent_module_dir: &Path,
742    parent_module_name: Option<&str>,
743    submod: &sway_ast::Submodule,
744) -> PathBuf {
745    if let Some(parent_name) = parent_module_name {
746        parent_module_dir
747            .join(parent_name)
748            .join(submod.name.to_string())
749            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
750    } else {
751        // top level module
752        parent_module_dir
753            .join(submod.name.to_string())
754            .with_extension(sway_types::constants::DEFAULT_FILE_EXTENSION)
755    }
756}
757
758pub fn build_module_dep_graph(
759    handler: &Handler,
760    parse_module: &mut parsed::ParseModule,
761) -> Result<(), ErrorEmitted> {
762    let module_dep_graph = ty::TyModule::build_dep_graph(handler, parse_module)?;
763    parse_module.module_eval_order = module_dep_graph.compute_order(handler)?;
764
765    for (_, submodule) in &mut parse_module.submodules {
766        build_module_dep_graph(handler, &mut submodule.module)?;
767    }
768    Ok(())
769}
770
771/// A possible occurrence of a `panic` expression that is located in code at [PanicOccurrence::loc].
772///
773/// Note that a single `panic` expression can have multiple [PanicOccurrence]s related to it.
774///
775/// For example:
776/// - `panic "Some message.";` will have just a single occurrence, with `msg` containing the message.
777/// - `panic some_value_of_a_concrete_type;` will have just a single occurrence, with `log_id` containing the [LogId] of the concrete type.
778/// - `panic some_value_of_a_generic_type;` will have multiple occurrences, one with `log_id` for every monomorphized type.
779///
780/// **Every [PanicOccurrence] has exactly one revert code assigned to it.**
781#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
782pub struct PanicOccurrence {
783    pub function: String,
784    pub loc: SourceLocation,
785    pub log_id: Option<LogId>,
786    pub msg: Option<String>,
787}
788
789/// Represents a function call that could panic during execution.
790/// E.g., for the following code:
791///
792/// ```ignore
793/// fn some_function() {
794///    let _ = this_function_might_panic(42);
795///}
796/// ```
797///
798/// the `function` field will contain the name of the function that might panic:
799///   `function: "some_other_package::module::this_function_might_panic"`
800///
801/// and the `loc` and `caller_function` fields will contain the source location of the call to the `function`
802/// that might panic:
803///
804/// ```ignore
805///     caller_function: "some_package::some_module::some_function",
806///     pkg: "some_package@0.1.0",
807///     file: "src/some_module.sw",
808///     ...
809/// ```
810///
811/// Note that, in case of panicking function or caller function being
812/// generic functions, a single panicking call can have multiple
813/// [PanickingCallOccurrence]s related to it.
814///
815/// For example:
816/// - `this_function_might_panic(42);` will have a single occurrence,
817///   with `function` containing the full name of the function that might panic.
818/// - `this_generic_function_might_panic::<u64>(42);` will have a single occurrence,
819///   with `function` containing the full name of the function that might panic,
820///   but with the generic type parameter `u64` included in the name.
821/// - `this_generic_function_might_panic::<T>(42);` will have multiple occurrences,
822///   one for every monomorphized type.
823///
824/// Similar is for a generic caller function.
825///
826/// **Every [PanickingCallOccurrence] has exactly one panicking call code assigned to it.**
827#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
828pub struct PanickingCallOccurrence {
829    pub function: String,
830    pub caller_function: String,
831    pub loc: SourceLocation,
832}
833
834/// [PanicOccurrence]s mapped to their corresponding panic error codes.
835pub type PanicOccurrences = HashMap<PanicOccurrence, u64>;
836
837/// [PanickingCallOccurrence]s mapped to their corresponding panicking call codes.
838pub type PanickingCallOccurrences = HashMap<PanickingCallOccurrence, u64>;
839
840pub struct CompiledAsm {
841    pub finalized_asm: FinalizedAsm,
842    pub panic_occurrences: PanicOccurrences,
843    pub panicking_call_occurrences: PanickingCallOccurrences,
844}
845
846#[allow(clippy::result_large_err)]
847#[allow(clippy::too_many_arguments)]
848pub fn parsed_to_ast(
849    handler: &Handler,
850    engines: &Engines,
851    parse_program: &mut parsed::ParseProgram,
852    initial_namespace: namespace::Package,
853    build_config: Option<&BuildConfig>,
854    package_name: &str,
855    retrigger_compilation: Option<Arc<AtomicBool>>,
856    experimental: ExperimentalFeatures,
857    backtrace: Backtrace,
858) -> Result<ty::TyProgram, TypeCheckFailed> {
859    let lsp_config = build_config.map(|x| x.lsp_mode.clone()).unwrap_or_default();
860
861    // Build the dependency graph for the submodules.
862    build_module_dep_graph(handler, &mut parse_program.root).map_err(|error| TypeCheckFailed {
863        root_module: None,
864        namespace: initial_namespace.clone(),
865        error,
866    })?;
867
868    let collection_namespace = Namespace::new(handler, engines, initial_namespace.clone(), true)
869        .map_err(|error| TypeCheckFailed {
870            root_module: None,
871            namespace: initial_namespace.clone(),
872            error,
873        })?;
874    // Collect the program symbols.
875
876    let mut collection_ctx =
877        ty::TyProgram::collect(handler, engines, parse_program, collection_namespace).map_err(
878            |error| TypeCheckFailed {
879                root_module: None,
880                namespace: initial_namespace.clone(),
881                error,
882            },
883        )?;
884
885    let typecheck_namespace =
886        Namespace::new(handler, engines, initial_namespace, true).map_err(|error| {
887            TypeCheckFailed {
888                root_module: None,
889                namespace: collection_ctx.namespace().current_package_ref().clone(),
890                error,
891            }
892        })?;
893    // Type check the program.
894    let typed_program_opt = ty::TyProgram::type_check(
895        handler,
896        engines,
897        parse_program,
898        &mut collection_ctx,
899        typecheck_namespace,
900        package_name,
901        build_config,
902        experimental,
903    );
904
905    let mut typed_program = typed_program_opt?;
906
907    check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
908        TypeCheckFailed {
909            root_module: Some(Arc::new(typed_program.root_module.clone())),
910            namespace: typed_program.namespace.current_package_ref().clone(),
911            error,
912        }
913    })?;
914    // Only clear the parsed AST nodes if we are running a regular compilation pipeline.
915    // LSP needs these to build its token map, and they are cleared by `clear_program` as
916    // part of the LSP garbage collection functionality instead.
917    if lsp_config.is_none() {
918        engines.pe().clear();
919    }
920
921    typed_program.check_deprecated(engines, handler);
922
923    match typed_program.check_recursive(engines, handler) {
924        Ok(()) => {}
925        Err(error) => {
926            handler.dedup();
927            return Err(TypeCheckFailed {
928                root_module: Some(Arc::new(typed_program.root_module.clone())),
929                namespace: typed_program.namespace.current_package().clone(),
930                error,
931            });
932        }
933    };
934
935    // Skip collecting metadata if we triggered an optimised build from LSP.
936    let types_metadata = if !lsp_config.as_ref().is_some_and(|lsp| lsp.optimized_build) {
937        // Collect information about the types used in this program
938        let types_metadata_result = typed_program.collect_types_metadata(
939            handler,
940            &mut CollectTypesMetadataContext::new(engines, experimental, package_name.to_string()),
941        );
942        let types_metadata = match types_metadata_result {
943            Ok(types_metadata) => types_metadata,
944            Err(error) => {
945                handler.dedup();
946                return Err(TypeCheckFailed {
947                    root_module: Some(Arc::new(typed_program.root_module.clone())),
948                    namespace: typed_program.namespace.current_package().clone(),
949                    error,
950                });
951            }
952        };
953
954        typed_program
955            .logged_types
956            .extend(types_metadata.iter().filter_map(|m| match m {
957                TypeMetadata::LoggedType(log_id, type_id) => Some((*log_id, *type_id)),
958                _ => None,
959            }));
960
961        typed_program
962            .messages_types
963            .extend(types_metadata.iter().filter_map(|m| match m {
964                TypeMetadata::MessageType(message_id, type_id) => Some((*message_id, *type_id)),
965                _ => None,
966            }));
967
968        let (print_graph, print_graph_url_format) = match build_config {
969            Some(cfg) => (
970                cfg.print_dca_graph.clone(),
971                cfg.print_dca_graph_url_format.clone(),
972            ),
973            None => (None, None),
974        };
975
976        check_should_abort(handler, retrigger_compilation.clone()).map_err(|error| {
977            TypeCheckFailed {
978                root_module: Some(Arc::new(typed_program.root_module.clone())),
979                namespace: typed_program.namespace.current_package_ref().clone(),
980                error,
981            }
982        })?;
983
984        // Perform control flow analysis and extend with any errors.
985        let _ = perform_control_flow_analysis(
986            handler,
987            engines,
988            &typed_program,
989            print_graph,
990            print_graph_url_format,
991        );
992
993        types_metadata
994    } else {
995        vec![]
996    };
997
998    // Evaluate const declarations, to allow storage slots initialization with consts.
999    let mut ctx = Context::new(engines.se(), experimental, backtrace.into());
1000    let module = Module::new(&mut ctx, Kind::Contract);
1001    if let Err(errs) = ir_generation::compile::compile_constants_for_package(
1002        engines,
1003        &mut ctx,
1004        module,
1005        &typed_program.namespace,
1006    ) {
1007        errs.into_iter().for_each(|err| {
1008            handler.emit_err(err.clone());
1009        });
1010    }
1011
1012    // CEI pattern analysis
1013    let cei_analysis_warnings =
1014        semantic_analysis::cei_pattern_analysis::analyze_program(engines, &typed_program);
1015    for warn in cei_analysis_warnings {
1016        handler.emit_warn(warn);
1017    }
1018
1019    let mut md_mgr = MetadataManager::default();
1020    // Check that all storage initializers can be evaluated at compile time.
1021    typed_program
1022        .get_typed_program_with_initialized_storage_slots(
1023            handler,
1024            engines,
1025            &mut ctx,
1026            &mut md_mgr,
1027            module,
1028        )
1029        .map_err(|error: ErrorEmitted| {
1030            handler.dedup();
1031            TypeCheckFailed {
1032                root_module: Some(Arc::new(typed_program.root_module.clone())),
1033                namespace: typed_program.namespace.current_package_ref().clone(),
1034                error,
1035            }
1036        })?;
1037
1038    // All unresolved types lead to compile errors.
1039    for err in types_metadata.iter().filter_map(|m| match m {
1040        TypeMetadata::UnresolvedType(name, call_site_span_opt) => {
1041            Some(CompileError::UnableToInferGeneric {
1042                ty: name.as_str().to_string(),
1043                span: call_site_span_opt.clone().unwrap_or_else(|| name.span()),
1044            })
1045        }
1046        _ => None,
1047    }) {
1048        handler.emit_err(err);
1049    }
1050
1051    Ok(typed_program)
1052}
1053
1054#[allow(clippy::too_many_arguments)]
1055pub fn compile_to_ast(
1056    handler: &Handler,
1057    engines: &Engines,
1058    src: Source,
1059    initial_namespace: namespace::Package,
1060    build_config: Option<&BuildConfig>,
1061    package_name: &str,
1062    retrigger_compilation: Option<Arc<AtomicBool>>,
1063    experimental: ExperimentalFeatures,
1064) -> Result<Programs, ErrorEmitted> {
1065    check_should_abort(handler, retrigger_compilation.clone())?;
1066
1067    let query_engine = engines.qe();
1068    let mut metrics = PerformanceData::default();
1069    if let Some(config) = build_config {
1070        let path = config.canonical_root_module();
1071        let include_tests = config.include_tests;
1072        // Check if we can re-use the data in the cache.
1073        if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) {
1074            let mut entry = query_engine.get_programs_cache_entry(&path).unwrap();
1075            entry.programs.metrics.reused_programs += 1;
1076
1077            let (warnings, errors, infos) = entry.handler_data;
1078            let new_handler = Handler::from_parts(warnings, errors, infos);
1079            handler.append(new_handler);
1080            return Ok(entry.programs);
1081        };
1082    }
1083
1084    // Parse the program to a concrete syntax tree (CST).
1085    let parse_program_opt = time_expr!(
1086        package_name,
1087        "parse the program to a concrete syntax tree (CST)",
1088        "parse_cst",
1089        parse(
1090            src,
1091            handler,
1092            engines,
1093            build_config,
1094            experimental,
1095            package_name
1096        ),
1097        build_config,
1098        metrics
1099    );
1100
1101    check_should_abort(handler, retrigger_compilation.clone())?;
1102
1103    let (lexed_program, mut parsed_program) = match parse_program_opt {
1104        Ok(modules) => modules,
1105        Err(e) => {
1106            handler.dedup();
1107            return Err(e);
1108        }
1109    };
1110
1111    // If tests are not enabled, exclude them from `parsed_program`.
1112    if build_config.is_none_or(|config| !config.include_tests) {
1113        parsed_program.exclude_tests(engines);
1114    }
1115
1116    // Type check (+ other static analysis) the CST to a typed AST.
1117    let program = time_expr!(
1118        package_name,
1119        "parse the concrete syntax tree (CST) to a typed AST",
1120        "parse_ast",
1121        parsed_to_ast(
1122            handler,
1123            engines,
1124            &mut parsed_program,
1125            initial_namespace,
1126            build_config,
1127            package_name,
1128            retrigger_compilation.clone(),
1129            experimental,
1130            build_config.map(|cfg| cfg.backtrace).unwrap_or_default()
1131        ),
1132        build_config,
1133        metrics
1134    );
1135
1136    check_should_abort(handler, retrigger_compilation.clone())?;
1137
1138    handler.dedup();
1139
1140    let programs = Programs::new(
1141        Arc::new(lexed_program),
1142        Arc::new(parsed_program),
1143        program.map(Arc::new),
1144        metrics,
1145    );
1146
1147    if let Some(config) = build_config {
1148        let path = config.canonical_root_module();
1149        let cache_entry = ProgramsCacheEntry {
1150            path,
1151            programs: programs.clone(),
1152            handler_data: handler.clone().consume(),
1153        };
1154        query_engine.insert_programs_cache_entry(cache_entry);
1155    }
1156
1157    check_should_abort(handler, retrigger_compilation.clone())?;
1158
1159    Ok(programs)
1160}
1161
1162/// Given input Sway source code, try compiling to a `CompiledAsm`,
1163/// containing the asm in opcode form (not raw bytes/bytecode).
1164pub fn compile_to_asm(
1165    handler: &Handler,
1166    engines: &Engines,
1167    src: Source,
1168    initial_namespace: namespace::Package,
1169    build_config: &BuildConfig,
1170    package_name: &str,
1171    experimental: ExperimentalFeatures,
1172) -> Result<CompiledAsm, ErrorEmitted> {
1173    let ast_res = compile_to_ast(
1174        handler,
1175        engines,
1176        src,
1177        initial_namespace,
1178        Some(build_config),
1179        package_name,
1180        None,
1181        experimental,
1182    )?;
1183
1184    ast_to_asm(handler, engines, &ast_res, build_config, experimental)
1185}
1186
1187/// Given an AST compilation result, try compiling to a `CompiledAsm`,
1188/// containing the asm in opcode form (not raw bytes/bytecode).
1189pub fn ast_to_asm(
1190    handler: &Handler,
1191    engines: &Engines,
1192    programs: &Programs,
1193    build_config: &BuildConfig,
1194    experimental: ExperimentalFeatures,
1195) -> Result<CompiledAsm, ErrorEmitted> {
1196    let typed_program = match &programs.typed {
1197        Ok(typed_program) => typed_program,
1198        Err(err) => return Err(err.error),
1199    };
1200
1201    let mut panic_occurrences = PanicOccurrences::default();
1202    let mut panicking_call_occurrences = PanickingCallOccurrences::default();
1203
1204    let asm = match compile_ast_to_ir_to_asm(
1205        handler,
1206        engines,
1207        typed_program,
1208        &mut panic_occurrences,
1209        &mut panicking_call_occurrences,
1210        build_config,
1211        experimental,
1212    ) {
1213        Ok(res) => res,
1214        Err(err) => {
1215            handler.dedup();
1216            return Err(err);
1217        }
1218    };
1219
1220    Ok(CompiledAsm {
1221        finalized_asm: asm,
1222        panic_occurrences,
1223        panicking_call_occurrences,
1224    })
1225}
1226
1227pub(crate) fn compile_ast_to_ir_to_asm(
1228    handler: &Handler,
1229    engines: &Engines,
1230    program: &ty::TyProgram,
1231    panic_occurrences: &mut PanicOccurrences,
1232    panicking_call_occurrences: &mut PanickingCallOccurrences,
1233    build_config: &BuildConfig,
1234    experimental: ExperimentalFeatures,
1235) -> Result<FinalizedAsm, ErrorEmitted> {
1236    // The IR pipeline relies on type information being fully resolved.
1237    // If type information is found to still be generic or unresolved inside of
1238    // IR, this is considered an internal compiler error. To resolve this situation,
1239    // we need to explicitly ensure all types are resolved before going into IR.
1240    //
1241    // We _could_ introduce a new type here that uses TypeInfo instead of TypeId and throw away
1242    // the engine, since we don't need inference for IR. That'd be a _lot_ of copy-pasted code,
1243    // though, so instead, we are just going to do a pass and throw any unresolved generics as
1244    // errors and then hold as a runtime invariant that none of the types will be unresolved in the
1245    // IR phase.
1246
1247    let mut ir = match ir_generation::compile_program(
1248        program,
1249        panic_occurrences,
1250        panicking_call_occurrences,
1251        build_config.include_tests,
1252        engines,
1253        experimental,
1254        build_config.backtrace.into(),
1255    ) {
1256        Ok(ir) => ir,
1257        Err(errors) => {
1258            let mut last = None;
1259            for e in errors {
1260                last = Some(handler.emit_err(e));
1261            }
1262            return Err(last.unwrap());
1263        }
1264    };
1265
1266    // Find all the entry points for purity checking and DCE.
1267    let entry_point_functions: Vec<::sway_ir::Function> = ir
1268        .module_iter()
1269        .flat_map(|module| module.function_iter(&ir))
1270        .filter(|func| func.is_entry(&ir))
1271        .collect();
1272
1273    // Do a purity check on the _unoptimised_ IR.
1274    {
1275        let mut env = ir_generation::PurityEnv::default();
1276        let mut md_mgr = metadata::MetadataManager::default();
1277        for entry_point in &entry_point_functions {
1278            check_function_purity(handler, &mut env, &ir, &mut md_mgr, entry_point);
1279        }
1280    }
1281
1282    // Initialize the pass manager and register known passes.
1283    let mut pass_mgr = PassManager::default();
1284    register_known_passes(&mut pass_mgr);
1285    let mut pass_group = PassGroup::default();
1286
1287    match build_config.optimization_level {
1288        OptLevel::Opt1 => {
1289            pass_group.append_group(create_o1_pass_group());
1290        }
1291        OptLevel::Opt0 => {
1292            // We run a function deduplication pass that only removes duplicate
1293            // functions when everything, including the metadata are identical.
1294            pass_group.append_pass(FN_DEDUP_DEBUG_PROFILE_NAME);
1295
1296            // Inlining is necessary until #4899 is resolved.
1297            pass_group.append_pass(FN_INLINE_NAME);
1298
1299            // Do DCE so other optimizations run faster.
1300            pass_group.append_pass(GLOBALS_DCE_NAME);
1301            pass_group.append_pass(DCE_NAME);
1302        }
1303    }
1304
1305    // Target specific transforms should be moved into something more configured.
1306    if build_config.build_target == BuildTarget::Fuel {
1307        // FuelVM target specific transforms.
1308        //
1309        // Demote large by-value constants, arguments and return values to by-reference values
1310        // using temporaries.
1311        pass_group.append_pass(CONST_DEMOTION_NAME);
1312        pass_group.append_pass(ARG_DEMOTION_NAME);
1313        pass_group.append_pass(RET_DEMOTION_NAME);
1314        pass_group.append_pass(MISC_DEMOTION_NAME);
1315
1316        // Convert loads and stores to mem_copies where possible.
1317        pass_group.append_pass(ARG_POINTEE_MUTABILITY_TAGGER_NAME);
1318        pass_group.append_pass(MEMCPYOPT_NAME);
1319
1320        // Run a DCE and simplify-cfg to clean up any obsolete instructions.
1321        pass_group.append_pass(DCE_NAME);
1322        pass_group.append_pass(SIMPLIFY_CFG_NAME);
1323
1324        match build_config.optimization_level {
1325            OptLevel::Opt1 => {
1326                pass_group.append_pass(SROA_NAME);
1327                pass_group.append_pass(MEM2REG_NAME);
1328                pass_group.append_pass(DCE_NAME);
1329            }
1330            OptLevel::Opt0 => {}
1331        }
1332    }
1333
1334    // Run the passes.
1335    let print_passes_opts: PrintPassesOpts = (&build_config.print_ir).into();
1336    let res =
1337        if let Err(ir_error) = pass_mgr.run_with_print(&mut ir, &pass_group, &print_passes_opts) {
1338            Err(handler.emit_err(CompileError::InternalOwned(
1339                ir_error.to_string(),
1340                span::Span::dummy(),
1341            )))
1342        } else {
1343            Ok(())
1344        };
1345    res?;
1346
1347    compile_ir_context_to_finalized_asm(handler, &ir, Some(build_config))
1348}
1349
1350/// Given input Sway source code, compile to [CompiledBytecode], containing the asm in bytecode form.
1351#[allow(clippy::too_many_arguments)]
1352pub fn compile_to_bytecode(
1353    handler: &Handler,
1354    engines: &Engines,
1355    src: Source,
1356    initial_namespace: namespace::Package,
1357    build_config: &BuildConfig,
1358    source_map: &mut SourceMap,
1359    package_name: &str,
1360    experimental: ExperimentalFeatures,
1361) -> Result<CompiledBytecode, ErrorEmitted> {
1362    let mut asm_res = compile_to_asm(
1363        handler,
1364        engines,
1365        src,
1366        initial_namespace,
1367        build_config,
1368        package_name,
1369        experimental,
1370    )?;
1371    asm_to_bytecode(
1372        handler,
1373        &mut asm_res,
1374        source_map,
1375        engines.se(),
1376        build_config,
1377    )
1378}
1379
1380/// Size of the prelude's CONFIGURABLES_OFFSET section, in bytes.
1381pub const PRELUDE_CONFIGURABLES_SIZE_IN_BYTES: usize = 8;
1382/// Offset (in bytes) of the CONFIGURABLES_OFFSET section in the prelude.
1383pub const PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES: usize = 16;
1384/// Total size of the prelude in bytes. Instructions start right after.
1385pub const PRELUDE_SIZE_IN_BYTES: usize = 32;
1386
1387/// Given bytecode, overwrite the existing offset to configurables offset in the prelude with the given one.
1388pub fn set_bytecode_configurables_offset(
1389    compiled_bytecode: &mut CompiledBytecode,
1390    md: &[u8; PRELUDE_CONFIGURABLES_SIZE_IN_BYTES],
1391) {
1392    assert!(
1393        compiled_bytecode.bytecode.len()
1394            >= PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES + PRELUDE_CONFIGURABLES_SIZE_IN_BYTES
1395    );
1396    let code = &mut compiled_bytecode.bytecode;
1397    for (index, byte) in md.iter().enumerate() {
1398        code[index + PRELUDE_CONFIGURABLES_OFFSET_IN_BYTES] = *byte;
1399    }
1400}
1401
1402/// Given the assembly (opcodes), compile to [CompiledBytecode], containing the asm in bytecode form.
1403pub fn asm_to_bytecode(
1404    handler: &Handler,
1405    asm: &mut CompiledAsm,
1406    source_map: &mut SourceMap,
1407    source_engine: &SourceEngine,
1408    build_config: &BuildConfig,
1409) -> Result<CompiledBytecode, ErrorEmitted> {
1410    let compiled_bytecode =
1411        asm.finalized_asm
1412            .to_bytecode_mut(handler, source_map, source_engine, build_config)?;
1413    Ok(compiled_bytecode)
1414}
1415
1416/// Given a [ty::TyProgram], which is type-checked Sway source, construct a graph to analyze
1417/// control flow and determine if it is valid.
1418fn perform_control_flow_analysis(
1419    handler: &Handler,
1420    engines: &Engines,
1421    program: &ty::TyProgram,
1422    print_graph: Option<String>,
1423    print_graph_url_format: Option<String>,
1424) -> Result<(), ErrorEmitted> {
1425    let dca_res = dead_code_analysis(handler, engines, program);
1426    let rpa_errors = return_path_analysis(engines, program);
1427    let rpa_res = handler.scope(|handler| {
1428        for err in rpa_errors {
1429            handler.emit_err(err);
1430        }
1431        Ok(())
1432    });
1433
1434    if let Ok(graph) = dca_res.clone() {
1435        graph.visualize(engines, print_graph, print_graph_url_format);
1436    }
1437    dca_res?;
1438    rpa_res
1439}
1440
1441/// Constructs a dead code graph from all modules within the graph and then attempts to find dead
1442/// code.
1443///
1444/// Returns the graph that was used for analysis.
1445fn dead_code_analysis<'a>(
1446    handler: &Handler,
1447    engines: &'a Engines,
1448    program: &ty::TyProgram,
1449) -> Result<ControlFlowGraph<'a>, ErrorEmitted> {
1450    let decl_engine = engines.de();
1451    let mut dead_code_graph = ControlFlowGraph::new(engines);
1452    let tree_type = program.kind.tree_type();
1453    module_dead_code_analysis(
1454        handler,
1455        engines,
1456        &program.root_module,
1457        &tree_type,
1458        &mut dead_code_graph,
1459    )?;
1460    let warnings = dead_code_graph.find_dead_code(decl_engine);
1461    for warn in warnings {
1462        handler.emit_warn(warn);
1463    }
1464    Ok(dead_code_graph)
1465}
1466
1467/// Recursively collect modules into the given `ControlFlowGraph` ready for dead code analysis.
1468fn module_dead_code_analysis<'eng: 'cfg, 'cfg>(
1469    handler: &Handler,
1470    engines: &'eng Engines,
1471    module: &ty::TyModule,
1472    tree_type: &parsed::TreeType,
1473    graph: &mut ControlFlowGraph<'cfg>,
1474) -> Result<(), ErrorEmitted> {
1475    module
1476        .submodules
1477        .iter()
1478        .try_fold((), |(), (_, submodule)| {
1479            let tree_type = parsed::TreeType::Library;
1480            module_dead_code_analysis(handler, engines, &submodule.module, &tree_type, graph)
1481        })?;
1482    let res = {
1483        ControlFlowGraph::append_module_to_dead_code_graph(
1484            engines,
1485            &module.all_nodes,
1486            tree_type,
1487            graph,
1488        )
1489        .map_err(|err| handler.emit_err(err))
1490    };
1491    graph.connect_pending_entry_edges();
1492    res
1493}
1494
1495fn return_path_analysis(engines: &Engines, program: &ty::TyProgram) -> Vec<CompileError> {
1496    let mut errors = vec![];
1497    module_return_path_analysis(engines, &program.root_module, &mut errors);
1498    errors
1499}
1500
1501fn module_return_path_analysis(
1502    engines: &Engines,
1503    module: &ty::TyModule,
1504    errors: &mut Vec<CompileError>,
1505) {
1506    for (_, submodule) in &module.submodules {
1507        module_return_path_analysis(engines, &submodule.module, errors);
1508    }
1509    let graph = ControlFlowGraph::construct_return_path_graph(engines, &module.all_nodes);
1510    match graph {
1511        Ok(graph) => errors.extend(graph.analyze_return_paths(engines)),
1512        Err(mut error) => errors.append(&mut error),
1513    }
1514}
1515
1516/// Check if the retrigger compilation flag has been set to true in the language server.
1517/// If it has, there is a new compilation request, so we should abort the current compilation.
1518fn check_should_abort(
1519    handler: &Handler,
1520    retrigger_compilation: Option<Arc<AtomicBool>>,
1521) -> Result<(), ErrorEmitted> {
1522    if let Some(ref retrigger_compilation) = retrigger_compilation {
1523        if retrigger_compilation.load(Ordering::SeqCst) {
1524            return Err(handler.cancel());
1525        }
1526    }
1527    Ok(())
1528}
1529
1530pub fn dump_trait_impls_for_typename(
1531    handler: &Handler,
1532    engines: &Engines,
1533    namespace: &namespace::Namespace,
1534    typename: &str,
1535) -> Result<(), ErrorEmitted> {
1536    let path: Vec<&str> = typename.split("::").collect();
1537    let mut call_path = CallPath::fullpath(&path);
1538    call_path.callpath_type = CallPathType::Ambiguous;
1539
1540    let pkg_namespace = namespace.current_package_ref();
1541    let mod_path = [pkg_namespace.root_module().name().clone()];
1542
1543    let resolve_handler = Handler::default();
1544    let resolved = resolve_call_path(
1545        &resolve_handler,
1546        engines,
1547        namespace,
1548        &mod_path,
1549        &call_path,
1550        None,
1551        VisibilityCheck::No,
1552    );
1553
1554    if let Ok(resolved) = resolved {
1555        let module = &pkg_namespace.root_module();
1556
1557        let mut impls = Vec::new();
1558        find_trait_impls_for_type(engines, namespace, &resolved, module, &mut impls);
1559
1560        for ext_pkg in pkg_namespace.external_packages.iter() {
1561            let ext_module = ext_pkg.1.root_module();
1562            find_trait_impls_for_type(engines, namespace, &resolved, ext_module, &mut impls);
1563        }
1564
1565        let unique_impls = impls
1566            .iter()
1567            .unique_by(|i| i.impl_span.clone())
1568            .cloned()
1569            .collect::<Vec<_>>();
1570        handler.emit_info(CompileInfo {
1571            span: resolved.span(engines).subset_first_of("{").unwrap(),
1572            content: Info::ImplTraitsForType {
1573                impls: unique_impls,
1574            },
1575        });
1576    }
1577
1578    Ok(())
1579}
1580
1581fn find_trait_impls_for_type(
1582    engines: &Engines,
1583    namespace: &namespace::Namespace,
1584    resolved_decl: &ResolvedDeclaration,
1585    module: &namespace::Module,
1586    impls: &mut Vec<CollectedTraitImpl>,
1587) {
1588    let handler = Handler::default();
1589    let struct_decl_source_id = resolved_decl
1590        .to_struct_decl(&handler, engines)
1591        .map(|d| d.expect_typed())
1592        .and_then(|decl| decl.to_struct_decl(&handler, engines))
1593        .map(|decl_id| engines.de().get_struct(&decl_id).span.source_id().cloned())
1594        .ok()
1595        .flatten();
1596
1597    let enum_decl_source_id = resolved_decl
1598        .to_enum_decl(&handler, engines)
1599        .map(|d| d.expect_typed())
1600        .and_then(|decl| decl.to_enum_id(&handler, engines))
1601        .map(|decl_id| engines.de().get_enum(&decl_id).span.source_id().cloned())
1602        .ok()
1603        .flatten();
1604
1605    module.walk_scope_chain(|lexical_scope| {
1606        module.submodules().iter().for_each(|(_, sub)| {
1607            find_trait_impls_for_type(engines, namespace, resolved_decl, sub, impls);
1608        });
1609
1610        let trait_map = &lexical_scope.items.implemented_traits;
1611
1612        for key in trait_map.trait_impls.keys() {
1613            for trait_entry in trait_map.trait_impls[key].iter() {
1614                let trait_type = engines.te().get(trait_entry.inner.key.type_id);
1615
1616                let matched = match *trait_type {
1617                    TypeInfo::Enum(decl_id) => {
1618                        let trait_enum = engines.de().get_enum(&decl_id);
1619                        enum_decl_source_id == trait_enum.span.source_id().cloned()
1620                    }
1621                    TypeInfo::Struct(decl_id) => {
1622                        let trait_struct = engines.de().get_struct(&decl_id);
1623                        struct_decl_source_id == trait_struct.span.source_id().cloned()
1624                    }
1625                    _ => false,
1626                };
1627
1628                if matched {
1629                    let trait_callpath = trait_entry.inner.key.name.to_fullpath(engines, namespace);
1630                    impls.push(CollectedTraitImpl {
1631                        impl_span: trait_entry
1632                            .inner
1633                            .value
1634                            .impl_span
1635                            .subset_first_of("{")
1636                            .unwrap(),
1637                        trait_name: engines.help_out(trait_callpath).to_string(),
1638                    });
1639                }
1640            }
1641        }
1642    });
1643}
1644
1645#[test]
1646fn test_basic_prog() {
1647    let handler = Handler::default();
1648    let engines = Engines::default();
1649    let prog = parse(
1650        r#"
1651        contract;
1652
1653    enum yo
1654    <T>
1655    where
1656    T: IsAThing
1657    {
1658        x: u32,
1659        y: MyStruct<u32>
1660    }
1661
1662    enum  MyOtherSumType
1663    {
1664        x: u32,
1665        y: MyStruct<u32>
1666    }
1667        struct MyStruct<T> {
1668            field_name: u64,
1669            other_field: T,
1670        }
1671
1672
1673    fn generic_function
1674    <T>
1675    (arg1: u64,
1676    arg2: T)
1677    ->
1678    T
1679    where T: Display,
1680          T: Debug {
1681          let x: MyStruct =
1682          MyStruct
1683          {
1684              field_name:
1685              5
1686          };
1687          return
1688          match
1689            arg1
1690          {
1691               1
1692               => true,
1693               _ => { return false; },
1694          };
1695    }
1696
1697    struct MyStruct {
1698        test: string,
1699    }
1700
1701
1702
1703    use stdlib::println;
1704
1705    trait MyTrait {
1706        // interface points
1707        fn myfunc(x: int) -> unit;
1708        } {
1709        // methods
1710        fn calls_interface_fn(x: int) -> unit {
1711            // declare a byte
1712            let x = 0b10101111;
1713            let mut y = 0b11111111;
1714            self.interface_fn(x);
1715        }
1716    }
1717
1718    pub fn prints_number_five() -> u8 {
1719        let x: u8 = 5;
1720        println(x);
1721         x.to_string();
1722         let some_list = [
1723         5,
1724         10 + 3 / 2,
1725         func_app(my_args, (so_many_args))];
1726        return 5;
1727    }
1728    "#
1729        .into(),
1730        &handler,
1731        &engines,
1732        None,
1733        ExperimentalFeatures::default(),
1734        "test",
1735    );
1736    prog.unwrap();
1737}
1738#[test]
1739fn test_parenthesized() {
1740    let handler = Handler::default();
1741    let engines = Engines::default();
1742    let prog = parse(
1743        r#"
1744        contract;
1745        pub fn some_abi_func() -> unit {
1746            let x = (5 + 6 / (1 + (2 / 1) + 4));
1747            return;
1748        }
1749    "#
1750        .into(),
1751        &handler,
1752        &engines,
1753        None,
1754        ExperimentalFeatures::default(),
1755        "test",
1756    );
1757    prog.unwrap();
1758}
1759
1760#[test]
1761fn test_unary_ordering() {
1762    use crate::language::{self, parsed};
1763    let handler = Handler::default();
1764    let engines = Engines::default();
1765    let prog = parse(
1766        r#"
1767    script;
1768    fn main() -> bool {
1769        let a = true;
1770        let b = true;
1771        !a && b;
1772    }"#
1773        .into(),
1774        &handler,
1775        &engines,
1776        None,
1777        ExperimentalFeatures::default(),
1778        "test",
1779    );
1780    let (.., prog) = prog.unwrap();
1781    // this should parse as `(!a) && b`, not `!(a && b)`. So, the top level
1782    // expression should be `&&`
1783    if let parsed::AstNode {
1784        content:
1785            parsed::AstNodeContent::Declaration(parsed::Declaration::FunctionDeclaration(decl_id)),
1786        ..
1787    } = &prog.root.tree.root_nodes[0]
1788    {
1789        let fn_decl = engines.pe().get_function(decl_id);
1790        if let parsed::AstNode {
1791            content:
1792                parsed::AstNodeContent::Expression(parsed::Expression {
1793                    kind:
1794                        parsed::ExpressionKind::LazyOperator(parsed::LazyOperatorExpression {
1795                            op, ..
1796                        }),
1797                    ..
1798                }),
1799            ..
1800        } = &fn_decl.body.contents[2]
1801        {
1802            assert_eq!(op, &language::LazyOp::And)
1803        } else {
1804            panic!("Was not lazy operator.")
1805        }
1806    } else {
1807        panic!("Was not ast node")
1808    };
1809}
1810
1811#[test]
1812fn test_parser_recovery() {
1813    let handler = Handler::default();
1814    let engines = Engines::default();
1815    let prog = parse(
1816        r#"
1817    script;
1818    fn main() -> bool {
1819        let
1820        let a = true;
1821        true
1822    }"#
1823        .into(),
1824        &handler,
1825        &engines,
1826        None,
1827        ExperimentalFeatures::default(),
1828        "test",
1829    );
1830    let (_, _) = prog.unwrap();
1831    assert!(handler.has_errors());
1832    dbg!(handler);
1833}