Skip to main content

wit_parser/
ast.rs

1use crate::ast::error::ParseError;
2use crate::{ParseResult, UnresolvedPackage, UnresolvedPackageGroup};
3use alloc::borrow::Cow;
4use alloc::boxed::Box;
5use alloc::format;
6use alloc::string::{String, ToString};
7use alloc::vec::Vec;
8#[cfg(feature = "std")]
9use anyhow::Context as _;
10use core::fmt;
11use core::mem;
12use core::result::Result;
13use lex::{Span, Token, Tokenizer};
14use semver::Version;
15#[cfg(feature = "std")]
16use std::path::Path;
17
18pub mod error;
19pub mod lex;
20
21pub use resolve::Resolver;
22mod resolve;
23pub mod toposort;
24
25pub use lex::validate_id;
26
27/// Representation of a single WIT `*.wit` file and nested packages.
28struct PackageFile<'a> {
29    /// Optional `package foo:bar;` header
30    package_id: Option<PackageName<'a>>,
31    /// Other AST items.
32    decl_list: DeclList<'a>,
33}
34
35impl<'a> PackageFile<'a> {
36    /// Parse a standalone file represented by `tokens`.
37    ///
38    /// This will optionally start with `package foo:bar;` and then will have a
39    /// list of ast items after it.
40    fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
41        let mut package_name_tokens_peek = tokens.clone();
42        let docs = parse_docs(&mut package_name_tokens_peek)?;
43
44        // Parse `package foo:bar;` but throw it out if it's actually
45        // `package foo:bar { ... }` since that's an ast item instead.
46        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
47            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
48            if package_name_tokens_peek.eat(Token::Semicolon)? {
49                *tokens = package_name_tokens_peek;
50                Some(name)
51            } else {
52                None
53            }
54        } else {
55            None
56        };
57        let decl_list = DeclList::parse_until(tokens, None)?;
58        Ok(PackageFile {
59            package_id,
60            decl_list,
61        })
62    }
63
64    /// Parse a nested package of the form `package foo:bar { ... }`
65    fn parse_nested(
66        tokens: &mut Tokenizer<'a>,
67        docs: Docs<'a>,
68        attributes: Vec<Attribute<'a>>,
69    ) -> ParseResult<Self> {
70        let span = tokens.expect(Token::Package)?;
71        if !attributes.is_empty() {
72            return Err(ParseError::new_syntax(
73                span,
74                format!("cannot place attributes on nested packages"),
75            ));
76        }
77        let package_id = PackageName::parse(tokens, docs)?;
78        tokens.expect(Token::LeftBrace)?;
79        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
80        Ok(PackageFile {
81            package_id: Some(package_id),
82            decl_list,
83        })
84    }
85}
86
87/// Stores all of the declarations in a package's scope. In AST terms, this
88/// means everything except the `package` declaration that demarcates a package
89/// scope. In the traditional implicit format, these are all of the declarations
90/// non-`package` declarations in the file:
91///
92/// ```wit
93/// package foo:name;
94///
95/// /* START DECL LIST */
96/// // Some comment...
97/// interface i {}
98/// world w {}
99/// /* END DECL LIST */
100/// ```
101///
102/// In the nested package style, a [`DeclList`] is everything inside of each
103/// `package` element's brackets:
104///
105/// ```wit
106/// package foo:name {
107///   /* START FIRST DECL LIST */
108///   // Some comment...
109///   interface i {}
110///   world w {}
111///   /* END FIRST DECL LIST */
112/// }
113///
114/// package bar:name {
115///   /* START SECOND DECL LIST */
116///   // Some comment...
117///   interface i {}
118///   world w {}
119///   /* END SECOND DECL LIST */
120/// }
121/// ```
122#[derive(Default)]
123pub struct DeclList<'a> {
124    items: Vec<AstItem<'a>>,
125}
126
127impl<'a> DeclList<'a> {
128    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> ParseResult<DeclList<'a>> {
129        let mut items = Vec::new();
130        let mut docs = parse_docs(tokens)?;
131        loop {
132            match end {
133                Some(end) => {
134                    if tokens.eat(end)? {
135                        break;
136                    }
137                }
138                None => {
139                    if tokens.clone().next()?.is_none() {
140                        break;
141                    }
142                }
143            }
144            items.push(AstItem::parse(tokens, docs)?);
145            docs = parse_docs(tokens)?;
146        }
147        Ok(DeclList { items })
148    }
149
150    fn for_each_path<'b>(
151        &'b self,
152        f: &mut dyn FnMut(
153            Option<&'b Id<'a>>,
154            &'b [Attribute<'a>],
155            &'b UsePath<'a>,
156            Option<&'b [UseName<'a>]>,
157            WorldOrInterface,
158        ) -> ParseResult<()>,
159    ) -> ParseResult<()> {
160        for item in self.items.iter() {
161            match item {
162                AstItem::World(world) => {
163                    // Visit imports here first before exports to help preserve
164                    // round-tripping of documents because printing a world puts
165                    // imports first but textually they can be listed with
166                    // exports first.
167                    let mut imports = Vec::new();
168                    let mut exports = Vec::new();
169                    for item in world.items.iter() {
170                        match item {
171                            WorldItem::Use(u) => f(
172                                None,
173                                &u.attributes,
174                                &u.from,
175                                Some(&u.names),
176                                WorldOrInterface::Interface,
177                            )?,
178                            WorldItem::Include(i) => f(
179                                Some(&world.name),
180                                &i.attributes,
181                                &i.from,
182                                None,
183                                WorldOrInterface::World,
184                            )?,
185                            WorldItem::Type(_) => {}
186                            WorldItem::Import(Import {
187                                kind, attributes, ..
188                            }) => imports.push((kind, attributes)),
189                            WorldItem::Export(Export {
190                                kind, attributes, ..
191                            }) => exports.push((kind, attributes)),
192                        }
193                    }
194
195                    let mut visit_kind =
196                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
197                            ExternKind::Interface(_, items) => {
198                                for item in items {
199                                    match item {
200                                        InterfaceItem::Use(u) => f(
201                                            None,
202                                            &u.attributes,
203                                            &u.from,
204                                            Some(&u.names),
205                                            WorldOrInterface::Interface,
206                                        )?,
207                                        _ => {}
208                                    }
209                                }
210                                Ok(())
211                            }
212                            ExternKind::Path(path) => {
213                                f(None, attrs, path, None, WorldOrInterface::Interface)
214                            }
215                            ExternKind::Func(..) => Ok(()),
216                        };
217
218                    for (kind, attrs) in imports {
219                        visit_kind(kind, attrs)?;
220                    }
221                    for (kind, attrs) in exports {
222                        visit_kind(kind, attrs)?;
223                    }
224                }
225                AstItem::Interface(i) => {
226                    for item in i.items.iter() {
227                        match item {
228                            InterfaceItem::Use(u) => f(
229                                Some(&i.name),
230                                &u.attributes,
231                                &u.from,
232                                Some(&u.names),
233                                WorldOrInterface::Interface,
234                            )?,
235                            _ => {}
236                        }
237                    }
238                }
239                AstItem::Use(u) => {
240                    // At the top-level, we don't know if this is a world or an interface
241                    // It is up to the resolver to decides how to handle this ambiguity.
242                    f(
243                        None,
244                        &u.attributes,
245                        &u.item,
246                        None,
247                        WorldOrInterface::Unknown,
248                    )?;
249                }
250
251                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
252            }
253        }
254        Ok(())
255    }
256}
257
258enum AstItem<'a> {
259    Interface(Interface<'a>),
260    World(World<'a>),
261    Use(ToplevelUse<'a>),
262    Package(PackageFile<'a>),
263}
264
265impl<'a> AstItem<'a> {
266    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> ParseResult<Self> {
267        let attributes = Attribute::parse_list(tokens)?;
268        match tokens.clone().next()? {
269            Some((_span, Token::Interface)) => {
270                Interface::parse(tokens, docs, attributes).map(Self::Interface)
271            }
272            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
273            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
274            Some((_span, Token::Package)) => {
275                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
276            }
277            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
278        }
279    }
280}
281
282#[derive(Debug, Clone)]
283struct PackageName<'a> {
284    docs: Docs<'a>,
285    span: Span,
286    namespace: Id<'a>,
287    name: Id<'a>,
288    version: Option<(Span, Version)>,
289}
290
291impl<'a> PackageName<'a> {
292    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> ParseResult<Self> {
293        let namespace = parse_id(tokens)?;
294        tokens.expect(Token::Colon)?;
295        let name = parse_id(tokens)?;
296        let version = parse_opt_version(tokens)?;
297        Ok(PackageName {
298            docs,
299            span: Span::new(
300                namespace.span.start(),
301                version
302                    .as_ref()
303                    .map(|(s, _)| s.end())
304                    .unwrap_or(name.span.end()),
305            ),
306            namespace,
307            name,
308            version,
309        })
310    }
311
312    fn package_name(&self) -> crate::PackageName {
313        crate::PackageName {
314            namespace: self.namespace.name.to_string(),
315            name: self.name.name.to_string(),
316            version: self.version.as_ref().map(|(_, v)| v.clone()),
317        }
318    }
319}
320
321struct ToplevelUse<'a> {
322    span: Span,
323    attributes: Vec<Attribute<'a>>,
324    item: UsePath<'a>,
325    as_: Option<Id<'a>>,
326}
327
328impl<'a> ToplevelUse<'a> {
329    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
330        let span = tokens.expect(Token::Use)?;
331        let item = UsePath::parse(tokens)?;
332        let as_ = if tokens.eat(Token::As)? {
333            Some(parse_id(tokens)?)
334        } else {
335            None
336        };
337        tokens.expect_semicolon()?;
338        Ok(ToplevelUse {
339            span,
340            attributes,
341            item,
342            as_,
343        })
344    }
345}
346
347struct World<'a> {
348    docs: Docs<'a>,
349    attributes: Vec<Attribute<'a>>,
350    name: Id<'a>,
351    items: Vec<WorldItem<'a>>,
352}
353
354impl<'a> World<'a> {
355    fn parse(
356        tokens: &mut Tokenizer<'a>,
357        docs: Docs<'a>,
358        attributes: Vec<Attribute<'a>>,
359    ) -> ParseResult<Self> {
360        tokens.expect(Token::World)?;
361        let name = parse_id(tokens)?;
362        let items = Self::parse_items(tokens)?;
363        Ok(World {
364            docs,
365            attributes,
366            name,
367            items,
368        })
369    }
370
371    fn parse_items(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<WorldItem<'a>>> {
372        tokens.expect(Token::LeftBrace)?;
373        let mut items = Vec::new();
374        loop {
375            let docs = parse_docs(tokens)?;
376            if tokens.eat(Token::RightBrace)? {
377                break;
378            }
379            let attributes = Attribute::parse_list(tokens)?;
380            items.push(WorldItem::parse(tokens, docs, attributes)?);
381        }
382        Ok(items)
383    }
384}
385
386enum WorldItem<'a> {
387    Import(Import<'a>),
388    Export(Export<'a>),
389    Use(Use<'a>),
390    Type(TypeDef<'a>),
391    Include(Include<'a>),
392}
393
394impl<'a> WorldItem<'a> {
395    fn parse(
396        tokens: &mut Tokenizer<'a>,
397        docs: Docs<'a>,
398        attributes: Vec<Attribute<'a>>,
399    ) -> ParseResult<WorldItem<'a>> {
400        match tokens.clone().next()? {
401            Some((_span, Token::Import)) => {
402                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
403            }
404            Some((_span, Token::Export)) => {
405                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
406            }
407            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
408            Some((_span, Token::Type)) => {
409                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
410            }
411            Some((_span, Token::Flags)) => {
412                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
413            }
414            Some((_span, Token::Resource)) => {
415                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
416            }
417            Some((_span, Token::Record)) => {
418                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
419            }
420            Some((_span, Token::Variant)) => {
421                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
422            }
423            Some((_span, Token::Enum)) => {
424                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
425            }
426            Some((_span, Token::Include)) => {
427                Include::parse(tokens, attributes).map(WorldItem::Include)
428            }
429            other => Err(err_expected(
430                tokens,
431                "`import`, `export`, `include`, `use`, or type definition",
432                other,
433            )
434            .into()),
435        }
436    }
437}
438
439struct Import<'a> {
440    docs: Docs<'a>,
441    attributes: Vec<Attribute<'a>>,
442    kind: ExternKind<'a>,
443}
444
445impl<'a> Import<'a> {
446    fn parse(
447        tokens: &mut Tokenizer<'a>,
448        docs: Docs<'a>,
449        attributes: Vec<Attribute<'a>>,
450    ) -> ParseResult<Import<'a>> {
451        tokens.expect(Token::Import)?;
452        let kind = ExternKind::parse(tokens)?;
453        Ok(Import {
454            docs,
455            attributes,
456            kind,
457        })
458    }
459}
460
461struct Export<'a> {
462    docs: Docs<'a>,
463    attributes: Vec<Attribute<'a>>,
464    kind: ExternKind<'a>,
465}
466
467impl<'a> Export<'a> {
468    fn parse(
469        tokens: &mut Tokenizer<'a>,
470        docs: Docs<'a>,
471        attributes: Vec<Attribute<'a>>,
472    ) -> ParseResult<Export<'a>> {
473        tokens.expect(Token::Export)?;
474        let kind = ExternKind::parse(tokens)?;
475        Ok(Export {
476            docs,
477            attributes,
478            kind,
479        })
480    }
481}
482
483enum ExternKind<'a> {
484    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
485    Path(UsePath<'a>),
486    Func(Id<'a>, Func<'a>),
487}
488
489impl<'a> ExternKind<'a> {
490    fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<ExternKind<'a>> {
491        // Create a copy of the token stream to test out if this is a function
492        // or an interface import. In those situations the token stream gets
493        // reset to the state of the clone and we continue down those paths.
494        //
495        // If neither a function nor an interface appears here though then the
496        // clone is thrown away and the original token stream is parsed for an
497        // interface. This will redo the original ID parse and the original
498        // colon parse, but that shouldn't be too bad perf-wise.
499        let mut clone = tokens.clone();
500        let id = parse_id(&mut clone)?;
501        if clone.eat(Token::Colon)? {
502            // import foo: async? func(...)
503            if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
504                *tokens = clone;
505                let ret = ExternKind::Func(id, Func::parse(tokens)?);
506                tokens.expect_semicolon()?;
507                return Ok(ret);
508            }
509
510            // import foo: interface { ... }
511            if clone.eat(Token::Interface)? {
512                *tokens = clone;
513                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
514            }
515        }
516
517        // import foo
518        // import foo/bar
519        // import foo:bar/baz
520        let ret = ExternKind::Path(UsePath::parse(tokens)?);
521        tokens.expect_semicolon()?;
522        Ok(ret)
523    }
524
525    fn span(&self) -> Span {
526        match self {
527            ExternKind::Interface(id, _) => id.span,
528            ExternKind::Path(UsePath::Id(id)) => id.span,
529            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
530            ExternKind::Func(id, _) => id.span,
531        }
532    }
533}
534
535struct Interface<'a> {
536    docs: Docs<'a>,
537    attributes: Vec<Attribute<'a>>,
538    name: Id<'a>,
539    items: Vec<InterfaceItem<'a>>,
540}
541
542impl<'a> Interface<'a> {
543    fn parse(
544        tokens: &mut Tokenizer<'a>,
545        docs: Docs<'a>,
546        attributes: Vec<Attribute<'a>>,
547    ) -> ParseResult<Self> {
548        tokens.expect(Token::Interface)?;
549        let name = parse_id(tokens)?;
550        let items = Self::parse_items(tokens)?;
551        Ok(Interface {
552            docs,
553            attributes,
554            name,
555            items,
556        })
557    }
558
559    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<InterfaceItem<'a>>> {
560        tokens.expect(Token::LeftBrace)?;
561        let mut items = Vec::new();
562        loop {
563            let docs = parse_docs(tokens)?;
564            if tokens.eat(Token::RightBrace)? {
565                break;
566            }
567            let attributes = Attribute::parse_list(tokens)?;
568            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
569        }
570        Ok(items)
571    }
572}
573
574#[derive(Debug)]
575pub enum WorldOrInterface {
576    World,
577    Interface,
578    Unknown,
579}
580
581enum InterfaceItem<'a> {
582    TypeDef(TypeDef<'a>),
583    Func(NamedFunc<'a>),
584    Use(Use<'a>),
585}
586
587struct Use<'a> {
588    attributes: Vec<Attribute<'a>>,
589    from: UsePath<'a>,
590    names: Vec<UseName<'a>>,
591}
592
593#[derive(Debug)]
594enum UsePath<'a> {
595    Id(Id<'a>),
596    Package { id: PackageName<'a>, name: Id<'a> },
597}
598
599impl<'a> UsePath<'a> {
600    fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
601        let id = parse_id(tokens)?;
602        if tokens.eat(Token::Colon)? {
603            // `foo:bar/baz@1.0`
604            let namespace = id;
605            let pkg_name = parse_id(tokens)?;
606            tokens.expect(Token::Slash)?;
607            let name = parse_id(tokens)?;
608            let version = parse_opt_version(tokens)?;
609            Ok(UsePath::Package {
610                id: PackageName {
611                    docs: Default::default(),
612                    span: Span::new(namespace.span.start(), pkg_name.span.end()),
613                    namespace,
614                    name: pkg_name,
615                    version,
616                },
617                name,
618            })
619        } else {
620            // `foo`
621            Ok(UsePath::Id(id))
622        }
623    }
624
625    fn name(&self) -> &Id<'a> {
626        match self {
627            UsePath::Id(id) => id,
628            UsePath::Package { name, .. } => name,
629        }
630    }
631}
632
633struct UseName<'a> {
634    name: Id<'a>,
635    as_: Option<Id<'a>>,
636}
637
638impl<'a> Use<'a> {
639    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
640        tokens.expect(Token::Use)?;
641        let from = UsePath::parse(tokens)?;
642        tokens.expect(Token::Period)?;
643        tokens.expect(Token::LeftBrace)?;
644
645        let mut names = Vec::new();
646        while !tokens.eat(Token::RightBrace)? {
647            let mut name = UseName {
648                name: parse_id(tokens)?,
649                as_: None,
650            };
651            if tokens.eat(Token::As)? {
652                name.as_ = Some(parse_id(tokens)?);
653            }
654            names.push(name);
655            if !tokens.eat(Token::Comma)? {
656                tokens.expect(Token::RightBrace)?;
657                break;
658            }
659        }
660        tokens.expect_semicolon()?;
661        Ok(Use {
662            attributes,
663            from,
664            names,
665        })
666    }
667}
668
669struct Include<'a> {
670    from: UsePath<'a>,
671    attributes: Vec<Attribute<'a>>,
672    names: Vec<IncludeName<'a>>,
673}
674
675struct IncludeName<'a> {
676    name: Id<'a>,
677    as_: Id<'a>,
678}
679
680impl<'a> Include<'a> {
681    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
682        tokens.expect(Token::Include)?;
683        let from = UsePath::parse(tokens)?;
684
685        let names = if tokens.eat(Token::With)? {
686            parse_list(
687                tokens,
688                Token::LeftBrace,
689                Token::RightBrace,
690                |_docs, tokens| {
691                    let name = parse_id(tokens)?;
692                    tokens.expect(Token::As)?;
693                    let as_ = parse_id(tokens)?;
694                    Ok(IncludeName { name, as_ })
695                },
696            )?
697        } else {
698            tokens.expect_semicolon()?;
699            Vec::new()
700        };
701
702        Ok(Include {
703            attributes,
704            from,
705            names,
706        })
707    }
708}
709
710#[derive(Debug, Clone)]
711pub struct Id<'a> {
712    name: &'a str,
713    span: Span,
714}
715
716impl<'a> From<&'a str> for Id<'a> {
717    fn from(s: &'a str) -> Id<'a> {
718        Id {
719            name: s.into(),
720            span: Default::default(),
721        }
722    }
723}
724
725#[derive(Debug, Clone)]
726pub struct Docs<'a> {
727    docs: Vec<Cow<'a, str>>,
728    span: Span,
729}
730
731impl<'a> Default for Docs<'a> {
732    fn default() -> Self {
733        Self {
734            docs: Default::default(),
735            span: Default::default(),
736        }
737    }
738}
739
740struct TypeDef<'a> {
741    docs: Docs<'a>,
742    attributes: Vec<Attribute<'a>>,
743    name: Id<'a>,
744    ty: Type<'a>,
745}
746
747enum Type<'a> {
748    Bool(Span),
749    U8(Span),
750    U16(Span),
751    U32(Span),
752    U64(Span),
753    S8(Span),
754    S16(Span),
755    S32(Span),
756    S64(Span),
757    F32(Span),
758    F64(Span),
759    Char(Span),
760    String(Span),
761    Name(Id<'a>),
762    List(List<'a>),
763    Map(Map<'a>),
764    FixedLengthList(FixedLengthList<'a>),
765    Handle(Handle<'a>),
766    Resource(Resource<'a>),
767    Record(Record<'a>),
768    Flags(Flags<'a>),
769    Variant(Variant<'a>),
770    Tuple(Tuple<'a>),
771    Enum(Enum<'a>),
772    Option(Option_<'a>),
773    Result(Result_<'a>),
774    Future(Future<'a>),
775    Stream(Stream<'a>),
776    ErrorContext(Span),
777}
778
779enum Handle<'a> {
780    Own { resource: Id<'a> },
781    Borrow { resource: Id<'a> },
782}
783
784impl Handle<'_> {
785    fn span(&self) -> Span {
786        match self {
787            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
788        }
789    }
790}
791
792struct Resource<'a> {
793    span: Span,
794    funcs: Vec<ResourceFunc<'a>>,
795}
796
797enum ResourceFunc<'a> {
798    Method(NamedFunc<'a>),
799    Static(NamedFunc<'a>),
800    Constructor(NamedFunc<'a>),
801}
802
803impl<'a> ResourceFunc<'a> {
804    fn parse(
805        docs: Docs<'a>,
806        attributes: Vec<Attribute<'a>>,
807        tokens: &mut Tokenizer<'a>,
808    ) -> ParseResult<Self> {
809        match tokens.clone().next()? {
810            Some((span, Token::Constructor)) => {
811                tokens.expect(Token::Constructor)?;
812                tokens.expect(Token::LeftParen)?;
813                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
814                    let name = parse_id(tokens)?;
815                    tokens.expect(Token::Colon)?;
816                    let ty = Type::parse(tokens)?;
817                    Ok((name, ty))
818                })?;
819                let result = if tokens.eat(Token::RArrow)? {
820                    let ty = Type::parse(tokens)?;
821                    Some(ty)
822                } else {
823                    None
824                };
825                tokens.expect_semicolon()?;
826                Ok(ResourceFunc::Constructor(NamedFunc {
827                    docs,
828                    attributes,
829                    name: Id {
830                        span,
831                        name: "constructor",
832                    },
833                    func: Func {
834                        span,
835                        async_: false,
836                        params,
837                        result,
838                    },
839                }))
840            }
841            Some((_span, Token::Id | Token::ExplicitId)) => {
842                let name = parse_id(tokens)?;
843                tokens.expect(Token::Colon)?;
844                let ctor = if tokens.eat(Token::Static)? {
845                    ResourceFunc::Static
846                } else {
847                    ResourceFunc::Method
848                };
849                let func = Func::parse(tokens)?;
850                tokens.expect_semicolon()?;
851                Ok(ctor(NamedFunc {
852                    docs,
853                    attributes,
854                    name,
855                    func,
856                }))
857            }
858            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
859        }
860    }
861
862    fn named_func(&self) -> &NamedFunc<'a> {
863        use ResourceFunc::*;
864        match self {
865            Method(f) | Static(f) | Constructor(f) => f,
866        }
867    }
868}
869
870struct Record<'a> {
871    span: Span,
872    fields: Vec<Field<'a>>,
873}
874
875struct Field<'a> {
876    docs: Docs<'a>,
877    name: Id<'a>,
878    ty: Type<'a>,
879}
880
881struct Flags<'a> {
882    span: Span,
883    flags: Vec<Flag<'a>>,
884}
885
886struct Flag<'a> {
887    docs: Docs<'a>,
888    name: Id<'a>,
889}
890
891struct Variant<'a> {
892    span: Span,
893    cases: Vec<Case<'a>>,
894}
895
896struct Case<'a> {
897    docs: Docs<'a>,
898    name: Id<'a>,
899    ty: Option<Type<'a>>,
900}
901
902struct Enum<'a> {
903    span: Span,
904    cases: Vec<EnumCase<'a>>,
905}
906
907struct EnumCase<'a> {
908    docs: Docs<'a>,
909    name: Id<'a>,
910}
911
912struct Option_<'a> {
913    span: Span,
914    ty: Box<Type<'a>>,
915}
916
917struct List<'a> {
918    span: Span,
919    ty: Box<Type<'a>>,
920}
921
922struct Map<'a> {
923    span: Span,
924    key: Box<Type<'a>>,
925    value: Box<Type<'a>>,
926}
927
928struct FixedLengthList<'a> {
929    span: Span,
930    ty: Box<Type<'a>>,
931    size: u32,
932}
933
934struct Future<'a> {
935    span: Span,
936    ty: Option<Box<Type<'a>>>,
937}
938
939struct Tuple<'a> {
940    span: Span,
941    types: Vec<Type<'a>>,
942}
943
944struct Result_<'a> {
945    span: Span,
946    ok: Option<Box<Type<'a>>>,
947    err: Option<Box<Type<'a>>>,
948}
949
950struct Stream<'a> {
951    span: Span,
952    ty: Option<Box<Type<'a>>>,
953}
954
955struct NamedFunc<'a> {
956    docs: Docs<'a>,
957    attributes: Vec<Attribute<'a>>,
958    name: Id<'a>,
959    func: Func<'a>,
960}
961
962type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
963
964struct Func<'a> {
965    span: Span,
966    async_: bool,
967    params: ParamList<'a>,
968    result: Option<Type<'a>>,
969}
970
971impl<'a> Func<'a> {
972    fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Func<'a>> {
973        fn parse_params<'a>(
974            tokens: &mut Tokenizer<'a>,
975            left_paren: bool,
976        ) -> ParseResult<ParamList<'a>> {
977            if left_paren {
978                tokens.expect(Token::LeftParen)?;
979            };
980            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
981                let name = parse_id(tokens)?;
982                tokens.expect(Token::Colon)?;
983                let ty = Type::parse(tokens)?;
984                Ok((name, ty))
985            })
986        }
987
988        let async_ = tokens.eat(Token::Async)?;
989        let span = tokens.expect(Token::Func)?;
990        let params = parse_params(tokens, true)?;
991        let result = if tokens.eat(Token::RArrow)? {
992            let ty = Type::parse(tokens)?;
993            Some(ty)
994        } else {
995            None
996        };
997        Ok(Func {
998            span,
999            async_,
1000            params,
1001            result,
1002        })
1003    }
1004}
1005
1006impl<'a> InterfaceItem<'a> {
1007    fn parse(
1008        tokens: &mut Tokenizer<'a>,
1009        docs: Docs<'a>,
1010        attributes: Vec<Attribute<'a>>,
1011    ) -> ParseResult<InterfaceItem<'a>> {
1012        match tokens.clone().next()? {
1013            Some((_span, Token::Type)) => {
1014                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1015            }
1016            Some((_span, Token::Flags)) => {
1017                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1018            }
1019            Some((_span, Token::Enum)) => {
1020                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1021            }
1022            Some((_span, Token::Variant)) => {
1023                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1024            }
1025            Some((_span, Token::Resource)) => {
1026                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1027            }
1028            Some((_span, Token::Record)) => {
1029                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1030            }
1031            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1032                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1033            }
1034            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1035            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1036        }
1037    }
1038}
1039
1040impl<'a> TypeDef<'a> {
1041    fn parse(
1042        tokens: &mut Tokenizer<'a>,
1043        docs: Docs<'a>,
1044        attributes: Vec<Attribute<'a>>,
1045    ) -> ParseResult<Self> {
1046        tokens.expect(Token::Type)?;
1047        let name = parse_id(tokens)?;
1048        tokens.expect(Token::Equals)?;
1049        let ty = Type::parse(tokens)?;
1050        tokens.expect_semicolon()?;
1051        Ok(TypeDef {
1052            docs,
1053            attributes,
1054            name,
1055            ty,
1056        })
1057    }
1058
1059    fn parse_flags(
1060        tokens: &mut Tokenizer<'a>,
1061        docs: Docs<'a>,
1062        attributes: Vec<Attribute<'a>>,
1063    ) -> ParseResult<Self> {
1064        tokens.expect(Token::Flags)?;
1065        let name = parse_id(tokens)?;
1066        let ty = Type::Flags(Flags {
1067            span: name.span,
1068            flags: parse_list(
1069                tokens,
1070                Token::LeftBrace,
1071                Token::RightBrace,
1072                |docs, tokens| {
1073                    let name = parse_id(tokens)?;
1074                    Ok(Flag { docs, name })
1075                },
1076            )?,
1077        });
1078        Ok(TypeDef {
1079            docs,
1080            attributes,
1081            name,
1082            ty,
1083        })
1084    }
1085
1086    fn parse_resource(
1087        tokens: &mut Tokenizer<'a>,
1088        docs: Docs<'a>,
1089        attributes: Vec<Attribute<'a>>,
1090    ) -> ParseResult<Self> {
1091        tokens.expect(Token::Resource)?;
1092        let name = parse_id(tokens)?;
1093        let mut funcs = Vec::new();
1094        if tokens.eat(Token::LeftBrace)? {
1095            while !tokens.eat(Token::RightBrace)? {
1096                let docs = parse_docs(tokens)?;
1097                let attributes = Attribute::parse_list(tokens)?;
1098                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1099            }
1100        } else {
1101            tokens.expect_semicolon()?;
1102        }
1103        let ty = Type::Resource(Resource {
1104            span: name.span,
1105            funcs,
1106        });
1107        Ok(TypeDef {
1108            docs,
1109            attributes,
1110            name,
1111            ty,
1112        })
1113    }
1114
1115    fn parse_record(
1116        tokens: &mut Tokenizer<'a>,
1117        docs: Docs<'a>,
1118        attributes: Vec<Attribute<'a>>,
1119    ) -> ParseResult<Self> {
1120        tokens.expect(Token::Record)?;
1121        let name = parse_id(tokens)?;
1122        let ty = Type::Record(Record {
1123            span: name.span,
1124            fields: parse_list(
1125                tokens,
1126                Token::LeftBrace,
1127                Token::RightBrace,
1128                |docs, tokens| {
1129                    let name = parse_id(tokens)?;
1130                    tokens.expect(Token::Colon)?;
1131                    let ty = Type::parse(tokens)?;
1132                    Ok(Field { docs, name, ty })
1133                },
1134            )?,
1135        });
1136        Ok(TypeDef {
1137            docs,
1138            attributes,
1139            name,
1140            ty,
1141        })
1142    }
1143
1144    fn parse_variant(
1145        tokens: &mut Tokenizer<'a>,
1146        docs: Docs<'a>,
1147        attributes: Vec<Attribute<'a>>,
1148    ) -> ParseResult<Self> {
1149        tokens.expect(Token::Variant)?;
1150        let name = parse_id(tokens)?;
1151        let ty = Type::Variant(Variant {
1152            span: name.span,
1153            cases: parse_list(
1154                tokens,
1155                Token::LeftBrace,
1156                Token::RightBrace,
1157                |docs, tokens| {
1158                    let name = parse_id(tokens)?;
1159                    let ty = if tokens.eat(Token::LeftParen)? {
1160                        let ty = Type::parse(tokens)?;
1161                        tokens.expect(Token::RightParen)?;
1162                        Some(ty)
1163                    } else {
1164                        None
1165                    };
1166                    Ok(Case { docs, name, ty })
1167                },
1168            )?,
1169        });
1170        Ok(TypeDef {
1171            docs,
1172            attributes,
1173            name,
1174            ty,
1175        })
1176    }
1177
1178    fn parse_enum(
1179        tokens: &mut Tokenizer<'a>,
1180        docs: Docs<'a>,
1181        attributes: Vec<Attribute<'a>>,
1182    ) -> ParseResult<Self> {
1183        tokens.expect(Token::Enum)?;
1184        let name = parse_id(tokens)?;
1185        let ty = Type::Enum(Enum {
1186            span: name.span,
1187            cases: parse_list(
1188                tokens,
1189                Token::LeftBrace,
1190                Token::RightBrace,
1191                |docs, tokens| {
1192                    let name = parse_id(tokens)?;
1193                    Ok(EnumCase { docs, name })
1194                },
1195            )?,
1196        });
1197        Ok(TypeDef {
1198            docs,
1199            attributes,
1200            name,
1201            ty,
1202        })
1203    }
1204}
1205
1206impl<'a> NamedFunc<'a> {
1207    fn parse(
1208        tokens: &mut Tokenizer<'a>,
1209        docs: Docs<'a>,
1210        attributes: Vec<Attribute<'a>>,
1211    ) -> ParseResult<Self> {
1212        let name = parse_id(tokens)?;
1213        tokens.expect(Token::Colon)?;
1214        let func = Func::parse(tokens)?;
1215        tokens.expect_semicolon()?;
1216        Ok(NamedFunc {
1217            docs,
1218            attributes,
1219            name,
1220            func,
1221        })
1222    }
1223}
1224
1225fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> ParseResult<Id<'a>> {
1226    match tokens.next()? {
1227        Some((span, Token::Id)) => Ok(Id {
1228            name: tokens.parse_id(span)?,
1229            span,
1230        }),
1231        Some((span, Token::ExplicitId)) => Ok(Id {
1232            name: tokens.parse_explicit_id(span)?,
1233            span,
1234        }),
1235        other => Err(err_expected(tokens, "an identifier or string", other)),
1236    }
1237}
1238
1239fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> ParseResult<Option<(Span, Version)>> {
1240    if tokens.eat(Token::At)? {
1241        parse_version(tokens).map(Some)
1242    } else {
1243        Ok(None)
1244    }
1245}
1246
1247fn parse_version(tokens: &mut Tokenizer<'_>) -> ParseResult<(Span, Version)> {
1248    let start = tokens.expect(Token::Integer)?.start();
1249    tokens.expect(Token::Period)?;
1250    tokens.expect(Token::Integer)?;
1251    tokens.expect(Token::Period)?;
1252    let end = tokens.expect(Token::Integer)?.end();
1253    let mut span = Span::new(start, end);
1254    eat_ids(tokens, Token::Minus, &mut span)?;
1255    eat_ids(tokens, Token::Plus, &mut span)?;
1256    let string = tokens.get_span(span);
1257    let version =
1258        Version::parse(string).map_err(|e| ParseError::new_syntax(span, e.to_string()))?;
1259    return Ok((span, version));
1260
1261    // According to `semver.org` this is what we're parsing:
1262    //
1263    // ```ebnf
1264    // <pre-release> ::= <dot-separated pre-release identifiers>
1265    //
1266    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1267    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1268    //
1269    // <build> ::= <dot-separated build identifiers>
1270    //
1271    // <dot-separated build identifiers> ::= <build identifier>
1272    //                                     | <build identifier> "." <dot-separated build identifiers>
1273    //
1274    // <pre-release identifier> ::= <alphanumeric identifier>
1275    //                            | <numeric identifier>
1276    //
1277    // <build identifier> ::= <alphanumeric identifier>
1278    //                      | <digits>
1279    //
1280    // <alphanumeric identifier> ::= <non-digit>
1281    //                             | <non-digit> <identifier characters>
1282    //                             | <identifier characters> <non-digit>
1283    //                             | <identifier characters> <non-digit> <identifier characters>
1284    //
1285    // <numeric identifier> ::= "0"
1286    //                        | <positive digit>
1287    //                        | <positive digit> <digits>
1288    //
1289    // <identifier characters> ::= <identifier character>
1290    //                           | <identifier character> <identifier characters>
1291    //
1292    // <identifier character> ::= <digit>
1293    //                          | <non-digit>
1294    //
1295    // <non-digit> ::= <letter>
1296    //               | "-"
1297    //
1298    // <digits> ::= <digit>
1299    //            | <digit> <digits>
1300    // ```
1301    //
1302    // This is loosely based on WIT syntax and an approximation is parsed here:
1303    //
1304    // * This function starts by parsing the optional leading `-` and `+` which
1305    //   indicates pre-release and build metadata.
1306    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1307    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1308    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1309    //   where the `.` is part of WIT syntax, not semver.
1310    //
1311    // Note that this additionally doesn't try to return any first-class errors.
1312    // Instead this bails out on something unrecognized for something else in
1313    // the system to return an error.
1314    fn eat_ids(
1315        tokens: &mut Tokenizer<'_>,
1316        prefix: Token,
1317        end: &mut Span,
1318    ) -> Result<(), lex::Error> {
1319        if !tokens.eat(prefix)? {
1320            return Ok(());
1321        }
1322        loop {
1323            let mut clone = tokens.clone();
1324            match clone.next()? {
1325                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1326                    end.set_end(span.end());
1327                    *tokens = clone;
1328                }
1329                Some((_span, Token::Period)) => match clone.next()? {
1330                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1331                        end.set_end(span.end());
1332                        *tokens = clone;
1333                    }
1334                    _ => break Ok(()),
1335                },
1336                _ => break Ok(()),
1337            }
1338        }
1339    }
1340}
1341
1342fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>, lex::Error> {
1343    let mut docs = Docs::default();
1344    let mut clone = tokens.clone();
1345    let mut started = false;
1346    while let Some((span, token)) = clone.next_raw()? {
1347        match token {
1348            Token::Whitespace => {}
1349            Token::Comment => {
1350                let comment = tokens.get_span(span);
1351                if !started {
1352                    docs.span.set_start(span.start());
1353                    started = true;
1354                }
1355                let trailing_ws = comment
1356                    .bytes()
1357                    .rev()
1358                    .take_while(|ch| ch.is_ascii_whitespace())
1359                    .count();
1360                docs.span.set_end(span.end() - (trailing_ws as u32));
1361                docs.docs.push(comment.into());
1362            }
1363            _ => break,
1364        };
1365        *tokens = clone.clone();
1366    }
1367    Ok(docs)
1368}
1369
1370impl<'a> Type<'a> {
1371    fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
1372        match tokens.next()? {
1373            Some((span, Token::U8)) => Ok(Type::U8(span)),
1374            Some((span, Token::U16)) => Ok(Type::U16(span)),
1375            Some((span, Token::U32)) => Ok(Type::U32(span)),
1376            Some((span, Token::U64)) => Ok(Type::U64(span)),
1377            Some((span, Token::S8)) => Ok(Type::S8(span)),
1378            Some((span, Token::S16)) => Ok(Type::S16(span)),
1379            Some((span, Token::S32)) => Ok(Type::S32(span)),
1380            Some((span, Token::S64)) => Ok(Type::S64(span)),
1381            Some((span, Token::F32)) => Ok(Type::F32(span)),
1382            Some((span, Token::F64)) => Ok(Type::F64(span)),
1383            Some((span, Token::Char)) => Ok(Type::Char(span)),
1384
1385            // tuple<T, U, ...>
1386            Some((span, Token::Tuple)) => {
1387                let types = parse_list(
1388                    tokens,
1389                    Token::LessThan,
1390                    Token::GreaterThan,
1391                    |_docs, tokens| Type::parse(tokens),
1392                )?;
1393                Ok(Type::Tuple(Tuple { span, types }))
1394            }
1395
1396            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1397            Some((span, Token::String_)) => Ok(Type::String(span)),
1398
1399            // list<T>
1400            // list<T, N>
1401            Some((span, Token::List)) => {
1402                tokens.expect(Token::LessThan)?;
1403                let ty = Type::parse(tokens)?;
1404                let size = if tokens.eat(Token::Comma)? {
1405                    let number = tokens.next()?;
1406                    if let Some((span, Token::Integer)) = number {
1407                        let size: u32 = tokens.get_span(span).parse().map_err(|e| {
1408                            ParseError::new_syntax(span, format!("invalid list size: {e}"))
1409                        })?;
1410                        Some(size)
1411                    } else {
1412                        return Err(err_expected(tokens, "fixed-length", number).into());
1413                    }
1414                } else {
1415                    None
1416                };
1417                tokens.expect(Token::GreaterThan)?;
1418                if let Some(size) = size {
1419                    Ok(Type::FixedLengthList(FixedLengthList {
1420                        span,
1421                        ty: Box::new(ty),
1422                        size,
1423                    }))
1424                } else {
1425                    Ok(Type::List(List {
1426                        span,
1427                        ty: Box::new(ty),
1428                    }))
1429                }
1430            }
1431
1432            // map<K, V>
1433            Some((span, Token::Map)) => {
1434                tokens.expect(Token::LessThan)?;
1435                let key = Type::parse(tokens)?;
1436                tokens.expect(Token::Comma)?;
1437                let value = Type::parse(tokens)?;
1438                tokens.expect(Token::GreaterThan)?;
1439                Ok(Type::Map(Map {
1440                    span,
1441                    key: Box::new(key),
1442                    value: Box::new(value),
1443                }))
1444            }
1445
1446            // option<T>
1447            Some((span, Token::Option_)) => {
1448                tokens.expect(Token::LessThan)?;
1449                let ty = Type::parse(tokens)?;
1450                tokens.expect(Token::GreaterThan)?;
1451                Ok(Type::Option(Option_ {
1452                    span,
1453                    ty: Box::new(ty),
1454                }))
1455            }
1456
1457            // result<T, E>
1458            // result<_, E>
1459            // result<T>
1460            // result
1461            Some((span, Token::Result_)) => {
1462                let mut ok = None;
1463                let mut err = None;
1464
1465                if tokens.eat(Token::LessThan)? {
1466                    if tokens.eat(Token::Underscore)? {
1467                        tokens.expect(Token::Comma)?;
1468                        err = Some(Box::new(Type::parse(tokens)?));
1469                    } else {
1470                        ok = Some(Box::new(Type::parse(tokens)?));
1471                        if tokens.eat(Token::Comma)? {
1472                            err = Some(Box::new(Type::parse(tokens)?));
1473                        }
1474                    };
1475                    tokens.expect(Token::GreaterThan)?;
1476                };
1477                Ok(Type::Result(Result_ { span, ok, err }))
1478            }
1479
1480            // future<T>
1481            // future
1482            Some((span, Token::Future)) => {
1483                let mut ty = None;
1484
1485                if tokens.eat(Token::LessThan)? {
1486                    ty = Some(Box::new(Type::parse(tokens)?));
1487                    tokens.expect(Token::GreaterThan)?;
1488                };
1489                Ok(Type::Future(Future { span, ty }))
1490            }
1491
1492            // stream<T>
1493            // stream
1494            Some((span, Token::Stream)) => {
1495                let mut ty = None;
1496
1497                if tokens.eat(Token::LessThan)? {
1498                    ty = Some(Box::new(Type::parse(tokens)?));
1499                    tokens.expect(Token::GreaterThan)?;
1500                };
1501                Ok(Type::Stream(Stream { span, ty }))
1502            }
1503
1504            // error-context
1505            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1506
1507            // own<T>
1508            Some((_span, Token::Own)) => {
1509                tokens.expect(Token::LessThan)?;
1510                let resource = parse_id(tokens)?;
1511                tokens.expect(Token::GreaterThan)?;
1512                Ok(Type::Handle(Handle::Own { resource }))
1513            }
1514
1515            // borrow<T>
1516            Some((_span, Token::Borrow)) => {
1517                tokens.expect(Token::LessThan)?;
1518                let resource = parse_id(tokens)?;
1519                tokens.expect(Token::GreaterThan)?;
1520                Ok(Type::Handle(Handle::Borrow { resource }))
1521            }
1522
1523            // `foo`
1524            Some((span, Token::Id)) => Ok(Type::Name(Id {
1525                name: tokens.parse_id(span)?.into(),
1526                span,
1527            })),
1528            // `%foo`
1529            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1530                name: tokens.parse_explicit_id(span)?.into(),
1531                span,
1532            })),
1533
1534            other => Err(err_expected(tokens, "a type", other).into()),
1535        }
1536    }
1537
1538    fn span(&self) -> Span {
1539        match self {
1540            Type::Bool(span)
1541            | Type::U8(span)
1542            | Type::U16(span)
1543            | Type::U32(span)
1544            | Type::U64(span)
1545            | Type::S8(span)
1546            | Type::S16(span)
1547            | Type::S32(span)
1548            | Type::S64(span)
1549            | Type::F32(span)
1550            | Type::F64(span)
1551            | Type::Char(span)
1552            | Type::String(span)
1553            | Type::ErrorContext(span) => *span,
1554            Type::Name(id) => id.span,
1555            Type::List(l) => l.span,
1556            Type::Map(m) => m.span,
1557            Type::FixedLengthList(l) => l.span,
1558            Type::Handle(h) => h.span(),
1559            Type::Resource(r) => r.span,
1560            Type::Record(r) => r.span,
1561            Type::Flags(f) => f.span,
1562            Type::Variant(v) => v.span,
1563            Type::Tuple(t) => t.span,
1564            Type::Enum(e) => e.span,
1565            Type::Option(o) => o.span,
1566            Type::Result(r) => r.span,
1567            Type::Future(f) => f.span,
1568            Type::Stream(s) => s.span,
1569        }
1570    }
1571}
1572
1573fn parse_list<'a, T>(
1574    tokens: &mut Tokenizer<'a>,
1575    start: Token,
1576    end: Token,
1577    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> ParseResult<T>,
1578) -> ParseResult<Vec<T>> {
1579    tokens.expect(start)?;
1580    parse_list_trailer(tokens, end, parse)
1581}
1582
1583fn parse_list_trailer<'a, T>(
1584    tokens: &mut Tokenizer<'a>,
1585    end: Token,
1586    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> ParseResult<T>,
1587) -> ParseResult<Vec<T>> {
1588    let mut items = Vec::new();
1589    loop {
1590        // get docs before we skip them to try to eat the end token
1591        let docs = parse_docs(tokens)?;
1592
1593        // if we found an end token then we're done
1594        if tokens.eat(end)? {
1595            break;
1596        }
1597
1598        let item = parse(docs, tokens)?;
1599        items.push(item);
1600
1601        // if there's no trailing comma then this is required to be the end,
1602        // otherwise we go through the loop to try to get another item
1603        if !tokens.eat(Token::Comma)? {
1604            tokens.expect(end)?;
1605            break;
1606        }
1607    }
1608    Ok(items)
1609}
1610
1611fn err_expected(
1612    tokens: &Tokenizer<'_>,
1613    expected: &'static str,
1614    found: Option<(Span, Token)>,
1615) -> ParseError {
1616    match found {
1617        Some((span, token)) => ParseError::new_syntax(
1618            span,
1619            format!("expected {}, found {}", expected, token.describe()),
1620        ),
1621        None => {
1622            ParseError::new_syntax(tokens.eof_span(), format!("expected {expected}, found eof"))
1623        }
1624    }
1625}
1626
1627enum Attribute<'a> {
1628    Since { span: Span, version: Version },
1629    Unstable { span: Span, feature: Id<'a> },
1630    Deprecated { span: Span, version: Version },
1631}
1632
1633impl<'a> Attribute<'a> {
1634    fn parse_list(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<Attribute<'a>>> {
1635        let mut ret = Vec::new();
1636        while tokens.eat(Token::At)? {
1637            let id = parse_id(tokens)?;
1638            let attr = match id.name {
1639                "since" => {
1640                    tokens.expect(Token::LeftParen)?;
1641                    eat_id(tokens, "version")?;
1642                    tokens.expect(Token::Equals)?;
1643                    let (_span, version) = parse_version(tokens)?;
1644                    tokens.expect(Token::RightParen)?;
1645                    Attribute::Since {
1646                        span: id.span,
1647                        version,
1648                    }
1649                }
1650                "unstable" => {
1651                    tokens.expect(Token::LeftParen)?;
1652                    eat_id(tokens, "feature")?;
1653                    tokens.expect(Token::Equals)?;
1654                    let feature = parse_id(tokens)?;
1655                    tokens.expect(Token::RightParen)?;
1656                    Attribute::Unstable {
1657                        span: id.span,
1658                        feature,
1659                    }
1660                }
1661                "deprecated" => {
1662                    tokens.expect(Token::LeftParen)?;
1663                    eat_id(tokens, "version")?;
1664                    tokens.expect(Token::Equals)?;
1665                    let (_span, version) = parse_version(tokens)?;
1666                    tokens.expect(Token::RightParen)?;
1667                    Attribute::Deprecated {
1668                        span: id.span,
1669                        version,
1670                    }
1671                }
1672                other => {
1673                    return Err(ParseError::new_syntax(
1674                        id.span,
1675                        format!("unknown attribute `{other}`"),
1676                    ));
1677                }
1678            };
1679            ret.push(attr);
1680        }
1681        Ok(ret)
1682    }
1683
1684    fn span(&self) -> Span {
1685        match self {
1686            Attribute::Since { span, .. }
1687            | Attribute::Unstable { span, .. }
1688            | Attribute::Deprecated { span, .. } => *span,
1689        }
1690    }
1691}
1692
1693fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> ParseResult<Span> {
1694    let id = parse_id(tokens)?;
1695    if id.name != expected {
1696        return Err(ParseError::new_syntax(
1697            id.span,
1698            format!("expected `{expected}`, found `{}`", id.name),
1699        ));
1700    }
1701    Ok(id.span)
1702}
1703
1704/// A listing of source files which are used to get parsed into an
1705/// [`UnresolvedPackage`].
1706///
1707/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1708#[derive(Clone, Default, Debug, PartialEq, Eq)]
1709pub struct SourceMap {
1710    sources: Vec<Source>,
1711    offset: u32,
1712}
1713
1714#[derive(Clone, Debug, PartialEq, Eq)]
1715struct Source {
1716    offset: u32,
1717    path: String,
1718    contents: String,
1719}
1720
1721impl SourceMap {
1722    /// Creates a new empty source map.
1723    pub fn new() -> SourceMap {
1724        SourceMap::default()
1725    }
1726
1727    /// Reads the file `path` on the filesystem and appends its contents to this
1728    /// [`SourceMap`].
1729    #[cfg(feature = "std")]
1730    pub fn push_file(&mut self, path: &Path) -> anyhow::Result<()> {
1731        let contents = std::fs::read_to_string(path)
1732            .with_context(|| format!("failed to read file {path:?}"))?;
1733        self.push(path, contents);
1734        Ok(())
1735    }
1736
1737    /// Appends the given contents with the given path into this source map.
1738    ///
1739    /// The `path` provided is not read from the filesystem and is instead only
1740    /// used during error messages. Each file added to a [`SourceMap`] is
1741    /// used to create the final parsed package namely by unioning all the
1742    /// interfaces and worlds defined together. Note that each file has its own
1743    /// personal namespace, however, for top-level `use` and such.
1744    #[cfg(feature = "std")]
1745    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1746        self.push_str(&path.display().to_string(), contents);
1747    }
1748
1749    /// Appends the given contents with the given source name into this source map.
1750    ///
1751    /// The `path` provided is not read from the filesystem and is instead only
1752    /// used during error messages. Each file added to a [`SourceMap`] is
1753    /// used to create the final parsed package namely by unioning all the
1754    /// interfaces and worlds defined together. Note that each file has its own
1755    /// personal namespace, however, for top-level `use` and such.
1756    pub fn push_str(&mut self, path: &str, contents: impl Into<String>) {
1757        let mut contents = contents.into();
1758        // Guarantee that there's at least one character in these contents by
1759        // appending a single newline to the end. This is excluded from
1760        // tokenization below so it's only here to ensure that spans which point
1761        // one byte beyond the end of a file (eof) point to the same original
1762        // file.
1763        contents.push('\n');
1764        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1765        self.sources.push(Source {
1766            offset: self.offset,
1767            path: path.to_string(),
1768            contents,
1769        });
1770        self.offset = new_offset;
1771    }
1772
1773    /// Appends all sources from another `SourceMap` into this one.
1774    ///
1775    /// Returns the byte offset that should be added to all `Span.start` and
1776    /// `Span.end` values from the appended source map to make them valid
1777    /// in the combined source map.
1778    pub fn append(&mut self, other: SourceMap) -> u32 {
1779        let base = self.offset;
1780        for mut source in other.sources {
1781            source.offset += base;
1782            self.sources.push(source);
1783        }
1784        self.offset += other.offset;
1785        base
1786    }
1787
1788    /// Parses the files added to this source map into a
1789    /// [`UnresolvedPackageGroup`].
1790    ///
1791    /// On failure returns `Err((self, e))` so the caller can use the source
1792    /// map for error formatting if needed.
1793    pub fn parse(self) -> Result<UnresolvedPackageGroup, (Self, ParseError)> {
1794        match self.parse_inner() {
1795            Ok((main, nested)) => Ok(UnresolvedPackageGroup {
1796                main,
1797                nested,
1798                source_map: self,
1799            }),
1800            Err(e) => Err((self, e)),
1801        }
1802    }
1803
1804    fn parse_inner(&self) -> ParseResult<(UnresolvedPackage, Vec<UnresolvedPackage>)> {
1805        let mut nested = Vec::new();
1806        let mut resolver = Resolver::default();
1807        let mut srcs = self.sources.iter().collect::<Vec<_>>();
1808        srcs.sort_by_key(|src| &src.path);
1809
1810        // Parse each source file individually. A tokenizer is created here
1811        // from settings and then `PackageFile` is used to parse the whole
1812        // stream of tokens.
1813        for src in srcs {
1814            let mut tokens = Tokenizer::new(
1815                // chop off the forcibly appended `\n` character when
1816                // passing through the source to get tokenized.
1817                &src.contents[..src.contents.len() - 1],
1818                src.offset,
1819            )?;
1820            let mut file = PackageFile::parse(&mut tokens)?;
1821
1822            // Filter out any nested packages and resolve them separately.
1823            // Nested packages have only a single "file" so only one item
1824            // is pushed into a `Resolver`. Note that a nested `Resolver`
1825            // is used here, not the outer one.
1826            //
1827            // Note that filtering out `Package` items is required due to
1828            // how the implementation of disallowing nested packages in
1829            // nested packages currently works.
1830            for item in mem::take(&mut file.decl_list.items) {
1831                match item {
1832                    AstItem::Package(nested_pkg) => {
1833                        let mut resolve = Resolver::default();
1834                        resolve.push(nested_pkg)?;
1835                        nested.push(resolve.resolve()?);
1836                    }
1837                    other => file.decl_list.items.push(other),
1838                }
1839            }
1840
1841            // With nested packages handled push this file into the resolver.
1842            resolver.push(file)?;
1843        }
1844
1845        Ok((resolver.resolve()?, nested))
1846    }
1847
1848    /// Runs `f` and, on error, attempts to add source highlighting to resolver
1849    /// error types that still use `anyhow`. Only needed until the resolver is
1850    /// migrated to structured errors.
1851    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> anyhow::Result<T>
1852    where
1853        F: FnOnce() -> anyhow::Result<T>,
1854    {
1855        let mut err = match f() {
1856            Ok(t) => return Ok(t),
1857            Err(e) => e,
1858        };
1859        if let Some(e) = err.downcast_mut::<crate::Error>() {
1860            e.highlight(self);
1861        } else if let Some(e) = err.downcast_mut::<crate::PackageNotFoundError>() {
1862            e.highlight(self);
1863        }
1864        Err(err)
1865    }
1866
1867    pub(crate) fn highlight_span(&self, span: Span, err: impl fmt::Display) -> Option<String> {
1868        if !span.is_known() {
1869            return None;
1870        }
1871        Some(self.highlight_err(span.start(), Some(span.end()), err))
1872    }
1873
1874    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1875        let src = self.source_for_offset(start);
1876        let start = src.to_relative_offset(start);
1877        let end = end.map(|end| src.to_relative_offset(end));
1878        let (line, col) = src.linecol(start);
1879        let snippet = src.contents.lines().nth(line).unwrap_or("");
1880        let line = line + 1;
1881        let col = col + 1;
1882
1883        // If the snippet is too large then don't overload output on a terminal
1884        // for example and instead just print the error. This also sidesteps
1885        // Rust's restriction that `>0$` below has to be less than `u16::MAX`.
1886        if snippet.len() > 500 {
1887            return format!("{}:{line}:{col}: {err}", src.path);
1888        }
1889        let mut msg = format!(
1890            "\
1891{err}
1892     --> {file}:{line}:{col}
1893      |
1894 {line:4} | {snippet}
1895      | {marker:>0$}",
1896            col,
1897            file = src.path,
1898            marker = "^",
1899        );
1900        if let Some(end) = end {
1901            if let Some(s) = src.contents.get(start..end) {
1902                for _ in s.chars().skip(1) {
1903                    msg.push('-');
1904                }
1905            }
1906        }
1907        return msg;
1908    }
1909
1910    /// Renders a span as a human-readable location string (e.g., "file.wit:10:5").
1911    pub fn render_location(&self, span: Span) -> String {
1912        if !span.is_known() {
1913            return "<unknown>".to_string();
1914        }
1915        let start = span.start();
1916        let src = self.source_for_offset(start);
1917        let rel_start = src.to_relative_offset(start);
1918        let (line, col) = src.linecol(rel_start);
1919        format!(
1920            "{file}:{line}:{col}",
1921            file = src.path,
1922            line = line + 1,
1923            col = col + 1,
1924        )
1925    }
1926
1927    fn source_for_offset(&self, start: u32) -> &Source {
1928        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1929            Ok(i) => i,
1930            Err(i) => i - 1,
1931        };
1932        &self.sources[i]
1933    }
1934
1935    /// Returns an iterator over all filenames added to this source map.
1936    #[cfg(feature = "std")]
1937    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1938        self.sources.iter().map(|src| Path::new(&src.path))
1939    }
1940
1941    /// Returns an iterator over all source names added to this source map.
1942    pub fn source_names(&self) -> impl Iterator<Item = &str> {
1943        self.sources.iter().map(|src| src.path.as_str())
1944    }
1945}
1946
1947impl Source {
1948    fn to_relative_offset(&self, offset: u32) -> usize {
1949        usize::try_from(offset - self.offset).unwrap()
1950    }
1951
1952    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1953        let mut cur = 0;
1954        // Use split_terminator instead of lines so that if there is a `\r`,
1955        // it is included in the offset calculation. The `+1` values below
1956        // account for the `\n`.
1957        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1958            if cur + line.len() + 1 > relative_offset {
1959                return (i, relative_offset - cur);
1960            }
1961            cur += line.len() + 1;
1962        }
1963        (self.contents.lines().count(), 0)
1964    }
1965}
1966
1967pub enum ParsedUsePath {
1968    Name(String),
1969    Package(crate::PackageName, String),
1970}
1971
1972pub fn parse_use_path(s: &str) -> anyhow::Result<ParsedUsePath> {
1973    let mut tokens = Tokenizer::new(s, 0)?;
1974    let path = UsePath::parse(&mut tokens)?;
1975    if tokens.next()?.is_some() {
1976        anyhow::bail!("trailing tokens in path specifier");
1977    }
1978    Ok(match path {
1979        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1980        UsePath::Package { id, name } => {
1981            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1982        }
1983    })
1984}