Skip to main content

wit_parser/
ast.rs

1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use alloc::borrow::Cow;
3use alloc::boxed::Box;
4use alloc::format;
5use alloc::string::{String, ToString};
6use alloc::vec::Vec;
7use anyhow::{Context, Result, bail};
8use core::fmt;
9use core::mem;
10use lex::{Span, Token, Tokenizer};
11use semver::Version;
12#[cfg(feature = "std")]
13use std::path::Path;
14
15pub mod lex;
16
17pub use resolve::Resolver;
18mod resolve;
19pub mod toposort;
20
21pub use lex::validate_id;
22
23/// Representation of a single WIT `*.wit` file and nested packages.
24struct PackageFile<'a> {
25    /// Optional `package foo:bar;` header
26    package_id: Option<PackageName<'a>>,
27    /// Other AST items.
28    decl_list: DeclList<'a>,
29}
30
31impl<'a> PackageFile<'a> {
32    /// Parse a standalone file represented by `tokens`.
33    ///
34    /// This will optionally start with `package foo:bar;` and then will have a
35    /// list of ast items after it.
36    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
37        let mut package_name_tokens_peek = tokens.clone();
38        let docs = parse_docs(&mut package_name_tokens_peek)?;
39
40        // Parse `package foo:bar;` but throw it out if it's actually
41        // `package foo:bar { ... }` since that's an ast item instead.
42        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
43            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
44            if package_name_tokens_peek.eat(Token::Semicolon)? {
45                *tokens = package_name_tokens_peek;
46                Some(name)
47            } else {
48                None
49            }
50        } else {
51            None
52        };
53        let decl_list = DeclList::parse_until(tokens, None)?;
54        Ok(PackageFile {
55            package_id,
56            decl_list,
57        })
58    }
59
60    /// Parse a nested package of the form `package foo:bar { ... }`
61    fn parse_nested(
62        tokens: &mut Tokenizer<'a>,
63        docs: Docs<'a>,
64        attributes: Vec<Attribute<'a>>,
65    ) -> Result<Self> {
66        let span = tokens.expect(Token::Package)?;
67        if !attributes.is_empty() {
68            bail!(Error::new(
69                span,
70                format!("cannot place attributes on nested packages"),
71            ));
72        }
73        let package_id = PackageName::parse(tokens, docs)?;
74        tokens.expect(Token::LeftBrace)?;
75        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
76        Ok(PackageFile {
77            package_id: Some(package_id),
78            decl_list,
79        })
80    }
81}
82
83/// Stores all of the declarations in a package's scope. In AST terms, this
84/// means everything except the `package` declaration that demarcates a package
85/// scope. In the traditional implicit format, these are all of the declarations
86/// non-`package` declarations in the file:
87///
88/// ```wit
89/// package foo:name;
90///
91/// /* START DECL LIST */
92/// // Some comment...
93/// interface i {}
94/// world w {}
95/// /* END DECL LIST */
96/// ```
97///
98/// In the nested package style, a [`DeclList`] is everything inside of each
99/// `package` element's brackets:
100///
101/// ```wit
102/// package foo:name {
103///   /* START FIRST DECL LIST */
104///   // Some comment...
105///   interface i {}
106///   world w {}
107///   /* END FIRST DECL LIST */
108/// }
109///
110/// package bar:name {
111///   /* START SECOND DECL LIST */
112///   // Some comment...
113///   interface i {}
114///   world w {}
115///   /* END SECOND DECL LIST */
116/// }
117/// ```
118#[derive(Default)]
119pub struct DeclList<'a> {
120    items: Vec<AstItem<'a>>,
121}
122
123impl<'a> DeclList<'a> {
124    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
125        let mut items = Vec::new();
126        let mut docs = parse_docs(tokens)?;
127        loop {
128            match end {
129                Some(end) => {
130                    if tokens.eat(end)? {
131                        break;
132                    }
133                }
134                None => {
135                    if tokens.clone().next()?.is_none() {
136                        break;
137                    }
138                }
139            }
140            items.push(AstItem::parse(tokens, docs)?);
141            docs = parse_docs(tokens)?;
142        }
143        Ok(DeclList { items })
144    }
145
146    fn for_each_path<'b>(
147        &'b self,
148        f: &mut dyn FnMut(
149            Option<&'b Id<'a>>,
150            &'b [Attribute<'a>],
151            &'b UsePath<'a>,
152            Option<&'b [UseName<'a>]>,
153            WorldOrInterface,
154        ) -> Result<()>,
155    ) -> Result<()> {
156        for item in self.items.iter() {
157            match item {
158                AstItem::World(world) => {
159                    // Visit imports here first before exports to help preserve
160                    // round-tripping of documents because printing a world puts
161                    // imports first but textually they can be listed with
162                    // exports first.
163                    let mut imports = Vec::new();
164                    let mut exports = Vec::new();
165                    for item in world.items.iter() {
166                        match item {
167                            WorldItem::Use(u) => f(
168                                None,
169                                &u.attributes,
170                                &u.from,
171                                Some(&u.names),
172                                WorldOrInterface::Interface,
173                            )?,
174                            WorldItem::Include(i) => f(
175                                Some(&world.name),
176                                &i.attributes,
177                                &i.from,
178                                None,
179                                WorldOrInterface::World,
180                            )?,
181                            WorldItem::Type(_) => {}
182                            WorldItem::Import(Import {
183                                kind, attributes, ..
184                            }) => imports.push((kind, attributes)),
185                            WorldItem::Export(Export {
186                                kind, attributes, ..
187                            }) => exports.push((kind, attributes)),
188                        }
189                    }
190
191                    let mut visit_kind =
192                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
193                            ExternKind::Interface(_, items) => {
194                                for item in items {
195                                    match item {
196                                        InterfaceItem::Use(u) => f(
197                                            None,
198                                            &u.attributes,
199                                            &u.from,
200                                            Some(&u.names),
201                                            WorldOrInterface::Interface,
202                                        )?,
203                                        _ => {}
204                                    }
205                                }
206                                Ok(())
207                            }
208                            ExternKind::Path(path) => {
209                                f(None, attrs, path, None, WorldOrInterface::Interface)
210                            }
211                            ExternKind::Func(..) => Ok(()),
212                        };
213
214                    for (kind, attrs) in imports {
215                        visit_kind(kind, attrs)?;
216                    }
217                    for (kind, attrs) in exports {
218                        visit_kind(kind, attrs)?;
219                    }
220                }
221                AstItem::Interface(i) => {
222                    for item in i.items.iter() {
223                        match item {
224                            InterfaceItem::Use(u) => f(
225                                Some(&i.name),
226                                &u.attributes,
227                                &u.from,
228                                Some(&u.names),
229                                WorldOrInterface::Interface,
230                            )?,
231                            _ => {}
232                        }
233                    }
234                }
235                AstItem::Use(u) => {
236                    // At the top-level, we don't know if this is a world or an interface
237                    // It is up to the resolver to decides how to handle this ambiguity.
238                    f(
239                        None,
240                        &u.attributes,
241                        &u.item,
242                        None,
243                        WorldOrInterface::Unknown,
244                    )?;
245                }
246
247                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
248            }
249        }
250        Ok(())
251    }
252}
253
254enum AstItem<'a> {
255    Interface(Interface<'a>),
256    World(World<'a>),
257    Use(ToplevelUse<'a>),
258    Package(PackageFile<'a>),
259}
260
261impl<'a> AstItem<'a> {
262    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
263        let attributes = Attribute::parse_list(tokens)?;
264        match tokens.clone().next()? {
265            Some((_span, Token::Interface)) => {
266                Interface::parse(tokens, docs, attributes).map(Self::Interface)
267            }
268            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
269            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
270            Some((_span, Token::Package)) => {
271                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
272            }
273            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
274        }
275    }
276}
277
278#[derive(Debug, Clone)]
279struct PackageName<'a> {
280    docs: Docs<'a>,
281    span: Span,
282    namespace: Id<'a>,
283    name: Id<'a>,
284    version: Option<(Span, Version)>,
285}
286
287impl<'a> PackageName<'a> {
288    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
289        let namespace = parse_id(tokens)?;
290        tokens.expect(Token::Colon)?;
291        let name = parse_id(tokens)?;
292        let version = parse_opt_version(tokens)?;
293        Ok(PackageName {
294            docs,
295            span: Span::new(
296                namespace.span.start(),
297                version
298                    .as_ref()
299                    .map(|(s, _)| s.end())
300                    .unwrap_or(name.span.end()),
301            ),
302            namespace,
303            name,
304            version,
305        })
306    }
307
308    fn package_name(&self) -> crate::PackageName {
309        crate::PackageName {
310            namespace: self.namespace.name.to_string(),
311            name: self.name.name.to_string(),
312            version: self.version.as_ref().map(|(_, v)| v.clone()),
313        }
314    }
315}
316
317struct ToplevelUse<'a> {
318    span: Span,
319    attributes: Vec<Attribute<'a>>,
320    item: UsePath<'a>,
321    as_: Option<Id<'a>>,
322}
323
324impl<'a> ToplevelUse<'a> {
325    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
326        let span = tokens.expect(Token::Use)?;
327        let item = UsePath::parse(tokens)?;
328        let as_ = if tokens.eat(Token::As)? {
329            Some(parse_id(tokens)?)
330        } else {
331            None
332        };
333        tokens.expect_semicolon()?;
334        Ok(ToplevelUse {
335            span,
336            attributes,
337            item,
338            as_,
339        })
340    }
341}
342
343struct World<'a> {
344    docs: Docs<'a>,
345    attributes: Vec<Attribute<'a>>,
346    name: Id<'a>,
347    items: Vec<WorldItem<'a>>,
348}
349
350impl<'a> World<'a> {
351    fn parse(
352        tokens: &mut Tokenizer<'a>,
353        docs: Docs<'a>,
354        attributes: Vec<Attribute<'a>>,
355    ) -> Result<Self> {
356        tokens.expect(Token::World)?;
357        let name = parse_id(tokens)?;
358        let items = Self::parse_items(tokens)?;
359        Ok(World {
360            docs,
361            attributes,
362            name,
363            items,
364        })
365    }
366
367    fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
368        tokens.expect(Token::LeftBrace)?;
369        let mut items = Vec::new();
370        loop {
371            let docs = parse_docs(tokens)?;
372            if tokens.eat(Token::RightBrace)? {
373                break;
374            }
375            let attributes = Attribute::parse_list(tokens)?;
376            items.push(WorldItem::parse(tokens, docs, attributes)?);
377        }
378        Ok(items)
379    }
380}
381
382enum WorldItem<'a> {
383    Import(Import<'a>),
384    Export(Export<'a>),
385    Use(Use<'a>),
386    Type(TypeDef<'a>),
387    Include(Include<'a>),
388}
389
390impl<'a> WorldItem<'a> {
391    fn parse(
392        tokens: &mut Tokenizer<'a>,
393        docs: Docs<'a>,
394        attributes: Vec<Attribute<'a>>,
395    ) -> Result<WorldItem<'a>> {
396        match tokens.clone().next()? {
397            Some((_span, Token::Import)) => {
398                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
399            }
400            Some((_span, Token::Export)) => {
401                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
402            }
403            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
404            Some((_span, Token::Type)) => {
405                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
406            }
407            Some((_span, Token::Flags)) => {
408                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
409            }
410            Some((_span, Token::Resource)) => {
411                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
412            }
413            Some((_span, Token::Record)) => {
414                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
415            }
416            Some((_span, Token::Variant)) => {
417                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
418            }
419            Some((_span, Token::Enum)) => {
420                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
421            }
422            Some((_span, Token::Include)) => {
423                Include::parse(tokens, attributes).map(WorldItem::Include)
424            }
425            other => Err(err_expected(
426                tokens,
427                "`import`, `export`, `include`, `use`, or type definition",
428                other,
429            )
430            .into()),
431        }
432    }
433}
434
435struct Import<'a> {
436    docs: Docs<'a>,
437    attributes: Vec<Attribute<'a>>,
438    kind: ExternKind<'a>,
439}
440
441impl<'a> Import<'a> {
442    fn parse(
443        tokens: &mut Tokenizer<'a>,
444        docs: Docs<'a>,
445        attributes: Vec<Attribute<'a>>,
446    ) -> Result<Import<'a>> {
447        tokens.expect(Token::Import)?;
448        let kind = ExternKind::parse(tokens)?;
449        Ok(Import {
450            docs,
451            attributes,
452            kind,
453        })
454    }
455}
456
457struct Export<'a> {
458    docs: Docs<'a>,
459    attributes: Vec<Attribute<'a>>,
460    kind: ExternKind<'a>,
461}
462
463impl<'a> Export<'a> {
464    fn parse(
465        tokens: &mut Tokenizer<'a>,
466        docs: Docs<'a>,
467        attributes: Vec<Attribute<'a>>,
468    ) -> Result<Export<'a>> {
469        tokens.expect(Token::Export)?;
470        let kind = ExternKind::parse(tokens)?;
471        Ok(Export {
472            docs,
473            attributes,
474            kind,
475        })
476    }
477}
478
479enum ExternKind<'a> {
480    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
481    Path(UsePath<'a>),
482    Func(Id<'a>, Func<'a>),
483}
484
485impl<'a> ExternKind<'a> {
486    fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
487        // Create a copy of the token stream to test out if this is a function
488        // or an interface import. In those situations the token stream gets
489        // reset to the state of the clone and we continue down those paths.
490        //
491        // If neither a function nor an interface appears here though then the
492        // clone is thrown away and the original token stream is parsed for an
493        // interface. This will redo the original ID parse and the original
494        // colon parse, but that shouldn't be too bad perf-wise.
495        let mut clone = tokens.clone();
496        let id = parse_id(&mut clone)?;
497        if clone.eat(Token::Colon)? {
498            // import foo: async? func(...)
499            if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
500                *tokens = clone;
501                let ret = ExternKind::Func(id, Func::parse(tokens)?);
502                tokens.expect_semicolon()?;
503                return Ok(ret);
504            }
505
506            // import foo: interface { ... }
507            if clone.eat(Token::Interface)? {
508                *tokens = clone;
509                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
510            }
511        }
512
513        // import foo
514        // import foo/bar
515        // import foo:bar/baz
516        let ret = ExternKind::Path(UsePath::parse(tokens)?);
517        tokens.expect_semicolon()?;
518        Ok(ret)
519    }
520
521    fn span(&self) -> Span {
522        match self {
523            ExternKind::Interface(id, _) => id.span,
524            ExternKind::Path(UsePath::Id(id)) => id.span,
525            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
526            ExternKind::Func(id, _) => id.span,
527        }
528    }
529}
530
531struct Interface<'a> {
532    docs: Docs<'a>,
533    attributes: Vec<Attribute<'a>>,
534    name: Id<'a>,
535    items: Vec<InterfaceItem<'a>>,
536}
537
538impl<'a> Interface<'a> {
539    fn parse(
540        tokens: &mut Tokenizer<'a>,
541        docs: Docs<'a>,
542        attributes: Vec<Attribute<'a>>,
543    ) -> Result<Self> {
544        tokens.expect(Token::Interface)?;
545        let name = parse_id(tokens)?;
546        let items = Self::parse_items(tokens)?;
547        Ok(Interface {
548            docs,
549            attributes,
550            name,
551            items,
552        })
553    }
554
555    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
556        tokens.expect(Token::LeftBrace)?;
557        let mut items = Vec::new();
558        loop {
559            let docs = parse_docs(tokens)?;
560            if tokens.eat(Token::RightBrace)? {
561                break;
562            }
563            let attributes = Attribute::parse_list(tokens)?;
564            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
565        }
566        Ok(items)
567    }
568}
569
570#[derive(Debug)]
571pub enum WorldOrInterface {
572    World,
573    Interface,
574    Unknown,
575}
576
577enum InterfaceItem<'a> {
578    TypeDef(TypeDef<'a>),
579    Func(NamedFunc<'a>),
580    Use(Use<'a>),
581}
582
583struct Use<'a> {
584    attributes: Vec<Attribute<'a>>,
585    from: UsePath<'a>,
586    names: Vec<UseName<'a>>,
587}
588
589#[derive(Debug)]
590enum UsePath<'a> {
591    Id(Id<'a>),
592    Package { id: PackageName<'a>, name: Id<'a> },
593}
594
595impl<'a> UsePath<'a> {
596    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
597        let id = parse_id(tokens)?;
598        if tokens.eat(Token::Colon)? {
599            // `foo:bar/baz@1.0`
600            let namespace = id;
601            let pkg_name = parse_id(tokens)?;
602            tokens.expect(Token::Slash)?;
603            let name = parse_id(tokens)?;
604            let version = parse_opt_version(tokens)?;
605            Ok(UsePath::Package {
606                id: PackageName {
607                    docs: Default::default(),
608                    span: Span::new(namespace.span.start(), pkg_name.span.end()),
609                    namespace,
610                    name: pkg_name,
611                    version,
612                },
613                name,
614            })
615        } else {
616            // `foo`
617            Ok(UsePath::Id(id))
618        }
619    }
620
621    fn name(&self) -> &Id<'a> {
622        match self {
623            UsePath::Id(id) => id,
624            UsePath::Package { name, .. } => name,
625        }
626    }
627}
628
629struct UseName<'a> {
630    name: Id<'a>,
631    as_: Option<Id<'a>>,
632}
633
634impl<'a> Use<'a> {
635    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
636        tokens.expect(Token::Use)?;
637        let from = UsePath::parse(tokens)?;
638        tokens.expect(Token::Period)?;
639        tokens.expect(Token::LeftBrace)?;
640
641        let mut names = Vec::new();
642        while !tokens.eat(Token::RightBrace)? {
643            let mut name = UseName {
644                name: parse_id(tokens)?,
645                as_: None,
646            };
647            if tokens.eat(Token::As)? {
648                name.as_ = Some(parse_id(tokens)?);
649            }
650            names.push(name);
651            if !tokens.eat(Token::Comma)? {
652                tokens.expect(Token::RightBrace)?;
653                break;
654            }
655        }
656        tokens.expect_semicolon()?;
657        Ok(Use {
658            attributes,
659            from,
660            names,
661        })
662    }
663}
664
665struct Include<'a> {
666    from: UsePath<'a>,
667    attributes: Vec<Attribute<'a>>,
668    names: Vec<IncludeName<'a>>,
669}
670
671struct IncludeName<'a> {
672    name: Id<'a>,
673    as_: Id<'a>,
674}
675
676impl<'a> Include<'a> {
677    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
678        tokens.expect(Token::Include)?;
679        let from = UsePath::parse(tokens)?;
680
681        let names = if tokens.eat(Token::With)? {
682            parse_list(
683                tokens,
684                Token::LeftBrace,
685                Token::RightBrace,
686                |_docs, tokens| {
687                    let name = parse_id(tokens)?;
688                    tokens.expect(Token::As)?;
689                    let as_ = parse_id(tokens)?;
690                    Ok(IncludeName { name, as_ })
691                },
692            )?
693        } else {
694            tokens.expect_semicolon()?;
695            Vec::new()
696        };
697
698        Ok(Include {
699            attributes,
700            from,
701            names,
702        })
703    }
704}
705
706#[derive(Debug, Clone)]
707pub struct Id<'a> {
708    name: &'a str,
709    span: Span,
710}
711
712impl<'a> From<&'a str> for Id<'a> {
713    fn from(s: &'a str) -> Id<'a> {
714        Id {
715            name: s.into(),
716            span: Default::default(),
717        }
718    }
719}
720
721#[derive(Debug, Clone)]
722pub struct Docs<'a> {
723    docs: Vec<Cow<'a, str>>,
724    span: Span,
725}
726
727impl<'a> Default for Docs<'a> {
728    fn default() -> Self {
729        Self {
730            docs: Default::default(),
731            span: Default::default(),
732        }
733    }
734}
735
736struct TypeDef<'a> {
737    docs: Docs<'a>,
738    attributes: Vec<Attribute<'a>>,
739    name: Id<'a>,
740    ty: Type<'a>,
741}
742
743enum Type<'a> {
744    Bool(Span),
745    U8(Span),
746    U16(Span),
747    U32(Span),
748    U64(Span),
749    S8(Span),
750    S16(Span),
751    S32(Span),
752    S64(Span),
753    F32(Span),
754    F64(Span),
755    Char(Span),
756    String(Span),
757    Name(Id<'a>),
758    List(List<'a>),
759    Map(Map<'a>),
760    FixedLengthList(FixedLengthList<'a>),
761    Handle(Handle<'a>),
762    Resource(Resource<'a>),
763    Record(Record<'a>),
764    Flags(Flags<'a>),
765    Variant(Variant<'a>),
766    Tuple(Tuple<'a>),
767    Enum(Enum<'a>),
768    Option(Option_<'a>),
769    Result(Result_<'a>),
770    Future(Future<'a>),
771    Stream(Stream<'a>),
772    ErrorContext(Span),
773}
774
775enum Handle<'a> {
776    Own { resource: Id<'a> },
777    Borrow { resource: Id<'a> },
778}
779
780impl Handle<'_> {
781    fn span(&self) -> Span {
782        match self {
783            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
784        }
785    }
786}
787
788struct Resource<'a> {
789    span: Span,
790    funcs: Vec<ResourceFunc<'a>>,
791}
792
793enum ResourceFunc<'a> {
794    Method(NamedFunc<'a>),
795    Static(NamedFunc<'a>),
796    Constructor(NamedFunc<'a>),
797}
798
799impl<'a> ResourceFunc<'a> {
800    fn parse(
801        docs: Docs<'a>,
802        attributes: Vec<Attribute<'a>>,
803        tokens: &mut Tokenizer<'a>,
804    ) -> Result<Self> {
805        match tokens.clone().next()? {
806            Some((span, Token::Constructor)) => {
807                tokens.expect(Token::Constructor)?;
808                tokens.expect(Token::LeftParen)?;
809                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
810                    let name = parse_id(tokens)?;
811                    tokens.expect(Token::Colon)?;
812                    let ty = Type::parse(tokens)?;
813                    Ok((name, ty))
814                })?;
815                let result = if tokens.eat(Token::RArrow)? {
816                    let ty = Type::parse(tokens)?;
817                    Some(ty)
818                } else {
819                    None
820                };
821                tokens.expect_semicolon()?;
822                Ok(ResourceFunc::Constructor(NamedFunc {
823                    docs,
824                    attributes,
825                    name: Id {
826                        span,
827                        name: "constructor",
828                    },
829                    func: Func {
830                        span,
831                        async_: false,
832                        params,
833                        result,
834                    },
835                }))
836            }
837            Some((_span, Token::Id | Token::ExplicitId)) => {
838                let name = parse_id(tokens)?;
839                tokens.expect(Token::Colon)?;
840                let ctor = if tokens.eat(Token::Static)? {
841                    ResourceFunc::Static
842                } else {
843                    ResourceFunc::Method
844                };
845                let func = Func::parse(tokens)?;
846                tokens.expect_semicolon()?;
847                Ok(ctor(NamedFunc {
848                    docs,
849                    attributes,
850                    name,
851                    func,
852                }))
853            }
854            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
855        }
856    }
857
858    fn named_func(&self) -> &NamedFunc<'a> {
859        use ResourceFunc::*;
860        match self {
861            Method(f) | Static(f) | Constructor(f) => f,
862        }
863    }
864}
865
866struct Record<'a> {
867    span: Span,
868    fields: Vec<Field<'a>>,
869}
870
871struct Field<'a> {
872    docs: Docs<'a>,
873    name: Id<'a>,
874    ty: Type<'a>,
875}
876
877struct Flags<'a> {
878    span: Span,
879    flags: Vec<Flag<'a>>,
880}
881
882struct Flag<'a> {
883    docs: Docs<'a>,
884    name: Id<'a>,
885}
886
887struct Variant<'a> {
888    span: Span,
889    cases: Vec<Case<'a>>,
890}
891
892struct Case<'a> {
893    docs: Docs<'a>,
894    name: Id<'a>,
895    ty: Option<Type<'a>>,
896}
897
898struct Enum<'a> {
899    span: Span,
900    cases: Vec<EnumCase<'a>>,
901}
902
903struct EnumCase<'a> {
904    docs: Docs<'a>,
905    name: Id<'a>,
906}
907
908struct Option_<'a> {
909    span: Span,
910    ty: Box<Type<'a>>,
911}
912
913struct List<'a> {
914    span: Span,
915    ty: Box<Type<'a>>,
916}
917
918struct Map<'a> {
919    span: Span,
920    key: Box<Type<'a>>,
921    value: Box<Type<'a>>,
922}
923
924struct FixedLengthList<'a> {
925    span: Span,
926    ty: Box<Type<'a>>,
927    size: u32,
928}
929
930struct Future<'a> {
931    span: Span,
932    ty: Option<Box<Type<'a>>>,
933}
934
935struct Tuple<'a> {
936    span: Span,
937    types: Vec<Type<'a>>,
938}
939
940struct Result_<'a> {
941    span: Span,
942    ok: Option<Box<Type<'a>>>,
943    err: Option<Box<Type<'a>>>,
944}
945
946struct Stream<'a> {
947    span: Span,
948    ty: Option<Box<Type<'a>>>,
949}
950
951struct NamedFunc<'a> {
952    docs: Docs<'a>,
953    attributes: Vec<Attribute<'a>>,
954    name: Id<'a>,
955    func: Func<'a>,
956}
957
958type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
959
960struct Func<'a> {
961    span: Span,
962    async_: bool,
963    params: ParamList<'a>,
964    result: Option<Type<'a>>,
965}
966
967impl<'a> Func<'a> {
968    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
969        fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
970            if left_paren {
971                tokens.expect(Token::LeftParen)?;
972            };
973            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
974                let name = parse_id(tokens)?;
975                tokens.expect(Token::Colon)?;
976                let ty = Type::parse(tokens)?;
977                Ok((name, ty))
978            })
979        }
980
981        let async_ = tokens.eat(Token::Async)?;
982        let span = tokens.expect(Token::Func)?;
983        let params = parse_params(tokens, true)?;
984        let result = if tokens.eat(Token::RArrow)? {
985            let ty = Type::parse(tokens)?;
986            Some(ty)
987        } else {
988            None
989        };
990        Ok(Func {
991            span,
992            async_,
993            params,
994            result,
995        })
996    }
997}
998
999impl<'a> InterfaceItem<'a> {
1000    fn parse(
1001        tokens: &mut Tokenizer<'a>,
1002        docs: Docs<'a>,
1003        attributes: Vec<Attribute<'a>>,
1004    ) -> Result<InterfaceItem<'a>> {
1005        match tokens.clone().next()? {
1006            Some((_span, Token::Type)) => {
1007                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1008            }
1009            Some((_span, Token::Flags)) => {
1010                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1011            }
1012            Some((_span, Token::Enum)) => {
1013                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1014            }
1015            Some((_span, Token::Variant)) => {
1016                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1017            }
1018            Some((_span, Token::Resource)) => {
1019                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1020            }
1021            Some((_span, Token::Record)) => {
1022                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1023            }
1024            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1025                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1026            }
1027            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1028            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1029        }
1030    }
1031}
1032
1033impl<'a> TypeDef<'a> {
1034    fn parse(
1035        tokens: &mut Tokenizer<'a>,
1036        docs: Docs<'a>,
1037        attributes: Vec<Attribute<'a>>,
1038    ) -> Result<Self> {
1039        tokens.expect(Token::Type)?;
1040        let name = parse_id(tokens)?;
1041        tokens.expect(Token::Equals)?;
1042        let ty = Type::parse(tokens)?;
1043        tokens.expect_semicolon()?;
1044        Ok(TypeDef {
1045            docs,
1046            attributes,
1047            name,
1048            ty,
1049        })
1050    }
1051
1052    fn parse_flags(
1053        tokens: &mut Tokenizer<'a>,
1054        docs: Docs<'a>,
1055        attributes: Vec<Attribute<'a>>,
1056    ) -> Result<Self> {
1057        tokens.expect(Token::Flags)?;
1058        let name = parse_id(tokens)?;
1059        let ty = Type::Flags(Flags {
1060            span: name.span,
1061            flags: parse_list(
1062                tokens,
1063                Token::LeftBrace,
1064                Token::RightBrace,
1065                |docs, tokens| {
1066                    let name = parse_id(tokens)?;
1067                    Ok(Flag { docs, name })
1068                },
1069            )?,
1070        });
1071        Ok(TypeDef {
1072            docs,
1073            attributes,
1074            name,
1075            ty,
1076        })
1077    }
1078
1079    fn parse_resource(
1080        tokens: &mut Tokenizer<'a>,
1081        docs: Docs<'a>,
1082        attributes: Vec<Attribute<'a>>,
1083    ) -> Result<Self> {
1084        tokens.expect(Token::Resource)?;
1085        let name = parse_id(tokens)?;
1086        let mut funcs = Vec::new();
1087        if tokens.eat(Token::LeftBrace)? {
1088            while !tokens.eat(Token::RightBrace)? {
1089                let docs = parse_docs(tokens)?;
1090                let attributes = Attribute::parse_list(tokens)?;
1091                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1092            }
1093        } else {
1094            tokens.expect_semicolon()?;
1095        }
1096        let ty = Type::Resource(Resource {
1097            span: name.span,
1098            funcs,
1099        });
1100        Ok(TypeDef {
1101            docs,
1102            attributes,
1103            name,
1104            ty,
1105        })
1106    }
1107
1108    fn parse_record(
1109        tokens: &mut Tokenizer<'a>,
1110        docs: Docs<'a>,
1111        attributes: Vec<Attribute<'a>>,
1112    ) -> Result<Self> {
1113        tokens.expect(Token::Record)?;
1114        let name = parse_id(tokens)?;
1115        let ty = Type::Record(Record {
1116            span: name.span,
1117            fields: parse_list(
1118                tokens,
1119                Token::LeftBrace,
1120                Token::RightBrace,
1121                |docs, tokens| {
1122                    let name = parse_id(tokens)?;
1123                    tokens.expect(Token::Colon)?;
1124                    let ty = Type::parse(tokens)?;
1125                    Ok(Field { docs, name, ty })
1126                },
1127            )?,
1128        });
1129        Ok(TypeDef {
1130            docs,
1131            attributes,
1132            name,
1133            ty,
1134        })
1135    }
1136
1137    fn parse_variant(
1138        tokens: &mut Tokenizer<'a>,
1139        docs: Docs<'a>,
1140        attributes: Vec<Attribute<'a>>,
1141    ) -> Result<Self> {
1142        tokens.expect(Token::Variant)?;
1143        let name = parse_id(tokens)?;
1144        let ty = Type::Variant(Variant {
1145            span: name.span,
1146            cases: parse_list(
1147                tokens,
1148                Token::LeftBrace,
1149                Token::RightBrace,
1150                |docs, tokens| {
1151                    let name = parse_id(tokens)?;
1152                    let ty = if tokens.eat(Token::LeftParen)? {
1153                        let ty = Type::parse(tokens)?;
1154                        tokens.expect(Token::RightParen)?;
1155                        Some(ty)
1156                    } else {
1157                        None
1158                    };
1159                    Ok(Case { docs, name, ty })
1160                },
1161            )?,
1162        });
1163        Ok(TypeDef {
1164            docs,
1165            attributes,
1166            name,
1167            ty,
1168        })
1169    }
1170
1171    fn parse_enum(
1172        tokens: &mut Tokenizer<'a>,
1173        docs: Docs<'a>,
1174        attributes: Vec<Attribute<'a>>,
1175    ) -> Result<Self> {
1176        tokens.expect(Token::Enum)?;
1177        let name = parse_id(tokens)?;
1178        let ty = Type::Enum(Enum {
1179            span: name.span,
1180            cases: parse_list(
1181                tokens,
1182                Token::LeftBrace,
1183                Token::RightBrace,
1184                |docs, tokens| {
1185                    let name = parse_id(tokens)?;
1186                    Ok(EnumCase { docs, name })
1187                },
1188            )?,
1189        });
1190        Ok(TypeDef {
1191            docs,
1192            attributes,
1193            name,
1194            ty,
1195        })
1196    }
1197}
1198
1199impl<'a> NamedFunc<'a> {
1200    fn parse(
1201        tokens: &mut Tokenizer<'a>,
1202        docs: Docs<'a>,
1203        attributes: Vec<Attribute<'a>>,
1204    ) -> Result<Self> {
1205        let name = parse_id(tokens)?;
1206        tokens.expect(Token::Colon)?;
1207        let func = Func::parse(tokens)?;
1208        tokens.expect_semicolon()?;
1209        Ok(NamedFunc {
1210            docs,
1211            attributes,
1212            name,
1213            func,
1214        })
1215    }
1216}
1217
1218fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1219    match tokens.next()? {
1220        Some((span, Token::Id)) => Ok(Id {
1221            name: tokens.parse_id(span)?,
1222            span,
1223        }),
1224        Some((span, Token::ExplicitId)) => Ok(Id {
1225            name: tokens.parse_explicit_id(span)?,
1226            span,
1227        }),
1228        other => Err(err_expected(tokens, "an identifier or string", other).into()),
1229    }
1230}
1231
1232fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1233    if tokens.eat(Token::At)? {
1234        parse_version(tokens).map(Some)
1235    } else {
1236        Ok(None)
1237    }
1238}
1239
1240fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1241    let start = tokens.expect(Token::Integer)?.start();
1242    tokens.expect(Token::Period)?;
1243    tokens.expect(Token::Integer)?;
1244    tokens.expect(Token::Period)?;
1245    let end = tokens.expect(Token::Integer)?.end();
1246    let mut span = Span::new(start, end);
1247    eat_ids(tokens, Token::Minus, &mut span)?;
1248    eat_ids(tokens, Token::Plus, &mut span)?;
1249    let string = tokens.get_span(span);
1250    let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1251    return Ok((span, version));
1252
1253    // According to `semver.org` this is what we're parsing:
1254    //
1255    // ```ebnf
1256    // <pre-release> ::= <dot-separated pre-release identifiers>
1257    //
1258    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1259    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1260    //
1261    // <build> ::= <dot-separated build identifiers>
1262    //
1263    // <dot-separated build identifiers> ::= <build identifier>
1264    //                                     | <build identifier> "." <dot-separated build identifiers>
1265    //
1266    // <pre-release identifier> ::= <alphanumeric identifier>
1267    //                            | <numeric identifier>
1268    //
1269    // <build identifier> ::= <alphanumeric identifier>
1270    //                      | <digits>
1271    //
1272    // <alphanumeric identifier> ::= <non-digit>
1273    //                             | <non-digit> <identifier characters>
1274    //                             | <identifier characters> <non-digit>
1275    //                             | <identifier characters> <non-digit> <identifier characters>
1276    //
1277    // <numeric identifier> ::= "0"
1278    //                        | <positive digit>
1279    //                        | <positive digit> <digits>
1280    //
1281    // <identifier characters> ::= <identifier character>
1282    //                           | <identifier character> <identifier characters>
1283    //
1284    // <identifier character> ::= <digit>
1285    //                          | <non-digit>
1286    //
1287    // <non-digit> ::= <letter>
1288    //               | "-"
1289    //
1290    // <digits> ::= <digit>
1291    //            | <digit> <digits>
1292    // ```
1293    //
1294    // This is loosely based on WIT syntax and an approximation is parsed here:
1295    //
1296    // * This function starts by parsing the optional leading `-` and `+` which
1297    //   indicates pre-release and build metadata.
1298    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1299    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1300    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1301    //   where the `.` is part of WIT syntax, not semver.
1302    //
1303    // Note that this additionally doesn't try to return any first-class errors.
1304    // Instead this bails out on something unrecognized for something else in
1305    // the system to return an error.
1306    fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1307        if !tokens.eat(prefix)? {
1308            return Ok(());
1309        }
1310        loop {
1311            let mut clone = tokens.clone();
1312            match clone.next()? {
1313                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1314                    end.set_end(span.end());
1315                    *tokens = clone;
1316                }
1317                Some((_span, Token::Period)) => match clone.next()? {
1318                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1319                        end.set_end(span.end());
1320                        *tokens = clone;
1321                    }
1322                    _ => break Ok(()),
1323                },
1324                _ => break Ok(()),
1325            }
1326        }
1327    }
1328}
1329
1330fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1331    let mut docs = Docs::default();
1332    let mut clone = tokens.clone();
1333    let mut started = false;
1334    while let Some((span, token)) = clone.next_raw()? {
1335        match token {
1336            Token::Whitespace => {}
1337            Token::Comment => {
1338                let comment = tokens.get_span(span);
1339                if !started {
1340                    docs.span.set_start(span.start());
1341                    started = true;
1342                }
1343                let trailing_ws = comment
1344                    .bytes()
1345                    .rev()
1346                    .take_while(|ch| ch.is_ascii_whitespace())
1347                    .count();
1348                docs.span.set_end(span.end() - (trailing_ws as u32));
1349                docs.docs.push(comment.into());
1350            }
1351            _ => break,
1352        };
1353        *tokens = clone.clone();
1354    }
1355    Ok(docs)
1356}
1357
1358impl<'a> Type<'a> {
1359    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1360        match tokens.next()? {
1361            Some((span, Token::U8)) => Ok(Type::U8(span)),
1362            Some((span, Token::U16)) => Ok(Type::U16(span)),
1363            Some((span, Token::U32)) => Ok(Type::U32(span)),
1364            Some((span, Token::U64)) => Ok(Type::U64(span)),
1365            Some((span, Token::S8)) => Ok(Type::S8(span)),
1366            Some((span, Token::S16)) => Ok(Type::S16(span)),
1367            Some((span, Token::S32)) => Ok(Type::S32(span)),
1368            Some((span, Token::S64)) => Ok(Type::S64(span)),
1369            Some((span, Token::F32)) => Ok(Type::F32(span)),
1370            Some((span, Token::F64)) => Ok(Type::F64(span)),
1371            Some((span, Token::Char)) => Ok(Type::Char(span)),
1372
1373            // tuple<T, U, ...>
1374            Some((span, Token::Tuple)) => {
1375                let types = parse_list(
1376                    tokens,
1377                    Token::LessThan,
1378                    Token::GreaterThan,
1379                    |_docs, tokens| Type::parse(tokens),
1380                )?;
1381                Ok(Type::Tuple(Tuple { span, types }))
1382            }
1383
1384            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1385            Some((span, Token::String_)) => Ok(Type::String(span)),
1386
1387            // list<T>
1388            // list<T, N>
1389            Some((span, Token::List)) => {
1390                tokens.expect(Token::LessThan)?;
1391                let ty = Type::parse(tokens)?;
1392                let size = if tokens.eat(Token::Comma)? {
1393                    let number = tokens.next()?;
1394                    if let Some((span, Token::Integer)) = number {
1395                        let size: u32 = tokens.get_span(span).parse()?;
1396                        Some(size)
1397                    } else {
1398                        return Err(err_expected(tokens, "fixed-length", number).into());
1399                    }
1400                } else {
1401                    None
1402                };
1403                tokens.expect(Token::GreaterThan)?;
1404                if let Some(size) = size {
1405                    Ok(Type::FixedLengthList(FixedLengthList {
1406                        span,
1407                        ty: Box::new(ty),
1408                        size,
1409                    }))
1410                } else {
1411                    Ok(Type::List(List {
1412                        span,
1413                        ty: Box::new(ty),
1414                    }))
1415                }
1416            }
1417
1418            // map<K, V>
1419            Some((span, Token::Map)) => {
1420                tokens.expect(Token::LessThan)?;
1421                let key = Type::parse(tokens)?;
1422                tokens.expect(Token::Comma)?;
1423                let value = Type::parse(tokens)?;
1424                tokens.expect(Token::GreaterThan)?;
1425                Ok(Type::Map(Map {
1426                    span,
1427                    key: Box::new(key),
1428                    value: Box::new(value),
1429                }))
1430            }
1431
1432            // option<T>
1433            Some((span, Token::Option_)) => {
1434                tokens.expect(Token::LessThan)?;
1435                let ty = Type::parse(tokens)?;
1436                tokens.expect(Token::GreaterThan)?;
1437                Ok(Type::Option(Option_ {
1438                    span,
1439                    ty: Box::new(ty),
1440                }))
1441            }
1442
1443            // result<T, E>
1444            // result<_, E>
1445            // result<T>
1446            // result
1447            Some((span, Token::Result_)) => {
1448                let mut ok = None;
1449                let mut err = None;
1450
1451                if tokens.eat(Token::LessThan)? {
1452                    if tokens.eat(Token::Underscore)? {
1453                        tokens.expect(Token::Comma)?;
1454                        err = Some(Box::new(Type::parse(tokens)?));
1455                    } else {
1456                        ok = Some(Box::new(Type::parse(tokens)?));
1457                        if tokens.eat(Token::Comma)? {
1458                            err = Some(Box::new(Type::parse(tokens)?));
1459                        }
1460                    };
1461                    tokens.expect(Token::GreaterThan)?;
1462                };
1463                Ok(Type::Result(Result_ { span, ok, err }))
1464            }
1465
1466            // future<T>
1467            // future
1468            Some((span, Token::Future)) => {
1469                let mut ty = None;
1470
1471                if tokens.eat(Token::LessThan)? {
1472                    ty = Some(Box::new(Type::parse(tokens)?));
1473                    tokens.expect(Token::GreaterThan)?;
1474                };
1475                Ok(Type::Future(Future { span, ty }))
1476            }
1477
1478            // stream<T>
1479            // stream
1480            Some((span, Token::Stream)) => {
1481                let mut ty = None;
1482
1483                if tokens.eat(Token::LessThan)? {
1484                    ty = Some(Box::new(Type::parse(tokens)?));
1485                    tokens.expect(Token::GreaterThan)?;
1486                };
1487                Ok(Type::Stream(Stream { span, ty }))
1488            }
1489
1490            // error-context
1491            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1492
1493            // own<T>
1494            Some((_span, Token::Own)) => {
1495                tokens.expect(Token::LessThan)?;
1496                let resource = parse_id(tokens)?;
1497                tokens.expect(Token::GreaterThan)?;
1498                Ok(Type::Handle(Handle::Own { resource }))
1499            }
1500
1501            // borrow<T>
1502            Some((_span, Token::Borrow)) => {
1503                tokens.expect(Token::LessThan)?;
1504                let resource = parse_id(tokens)?;
1505                tokens.expect(Token::GreaterThan)?;
1506                Ok(Type::Handle(Handle::Borrow { resource }))
1507            }
1508
1509            // `foo`
1510            Some((span, Token::Id)) => Ok(Type::Name(Id {
1511                name: tokens.parse_id(span)?.into(),
1512                span,
1513            })),
1514            // `%foo`
1515            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1516                name: tokens.parse_explicit_id(span)?.into(),
1517                span,
1518            })),
1519
1520            other => Err(err_expected(tokens, "a type", other).into()),
1521        }
1522    }
1523
1524    fn span(&self) -> Span {
1525        match self {
1526            Type::Bool(span)
1527            | Type::U8(span)
1528            | Type::U16(span)
1529            | Type::U32(span)
1530            | Type::U64(span)
1531            | Type::S8(span)
1532            | Type::S16(span)
1533            | Type::S32(span)
1534            | Type::S64(span)
1535            | Type::F32(span)
1536            | Type::F64(span)
1537            | Type::Char(span)
1538            | Type::String(span)
1539            | Type::ErrorContext(span) => *span,
1540            Type::Name(id) => id.span,
1541            Type::List(l) => l.span,
1542            Type::Map(m) => m.span,
1543            Type::FixedLengthList(l) => l.span,
1544            Type::Handle(h) => h.span(),
1545            Type::Resource(r) => r.span,
1546            Type::Record(r) => r.span,
1547            Type::Flags(f) => f.span,
1548            Type::Variant(v) => v.span,
1549            Type::Tuple(t) => t.span,
1550            Type::Enum(e) => e.span,
1551            Type::Option(o) => o.span,
1552            Type::Result(r) => r.span,
1553            Type::Future(f) => f.span,
1554            Type::Stream(s) => s.span,
1555        }
1556    }
1557}
1558
1559fn parse_list<'a, T>(
1560    tokens: &mut Tokenizer<'a>,
1561    start: Token,
1562    end: Token,
1563    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1564) -> Result<Vec<T>> {
1565    tokens.expect(start)?;
1566    parse_list_trailer(tokens, end, parse)
1567}
1568
1569fn parse_list_trailer<'a, T>(
1570    tokens: &mut Tokenizer<'a>,
1571    end: Token,
1572    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1573) -> Result<Vec<T>> {
1574    let mut items = Vec::new();
1575    loop {
1576        // get docs before we skip them to try to eat the end token
1577        let docs = parse_docs(tokens)?;
1578
1579        // if we found an end token then we're done
1580        if tokens.eat(end)? {
1581            break;
1582        }
1583
1584        let item = parse(docs, tokens)?;
1585        items.push(item);
1586
1587        // if there's no trailing comma then this is required to be the end,
1588        // otherwise we go through the loop to try to get another item
1589        if !tokens.eat(Token::Comma)? {
1590            tokens.expect(end)?;
1591            break;
1592        }
1593    }
1594    Ok(items)
1595}
1596
1597fn err_expected(
1598    tokens: &Tokenizer<'_>,
1599    expected: &'static str,
1600    found: Option<(Span, Token)>,
1601) -> Error {
1602    match found {
1603        Some((span, token)) => Error::new(
1604            span,
1605            format!("expected {}, found {}", expected, token.describe()),
1606        ),
1607        None => Error::new(tokens.eof_span(), format!("expected {expected}, found eof")),
1608    }
1609}
1610
1611enum Attribute<'a> {
1612    Since { span: Span, version: Version },
1613    Unstable { span: Span, feature: Id<'a> },
1614    Deprecated { span: Span, version: Version },
1615}
1616
1617impl<'a> Attribute<'a> {
1618    fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1619        let mut ret = Vec::new();
1620        while tokens.eat(Token::At)? {
1621            let id = parse_id(tokens)?;
1622            let attr = match id.name {
1623                "since" => {
1624                    tokens.expect(Token::LeftParen)?;
1625                    eat_id(tokens, "version")?;
1626                    tokens.expect(Token::Equals)?;
1627                    let (_span, version) = parse_version(tokens)?;
1628                    tokens.expect(Token::RightParen)?;
1629                    Attribute::Since {
1630                        span: id.span,
1631                        version,
1632                    }
1633                }
1634                "unstable" => {
1635                    tokens.expect(Token::LeftParen)?;
1636                    eat_id(tokens, "feature")?;
1637                    tokens.expect(Token::Equals)?;
1638                    let feature = parse_id(tokens)?;
1639                    tokens.expect(Token::RightParen)?;
1640                    Attribute::Unstable {
1641                        span: id.span,
1642                        feature,
1643                    }
1644                }
1645                "deprecated" => {
1646                    tokens.expect(Token::LeftParen)?;
1647                    eat_id(tokens, "version")?;
1648                    tokens.expect(Token::Equals)?;
1649                    let (_span, version) = parse_version(tokens)?;
1650                    tokens.expect(Token::RightParen)?;
1651                    Attribute::Deprecated {
1652                        span: id.span,
1653                        version,
1654                    }
1655                }
1656                other => {
1657                    bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1658                }
1659            };
1660            ret.push(attr);
1661        }
1662        Ok(ret)
1663    }
1664
1665    fn span(&self) -> Span {
1666        match self {
1667            Attribute::Since { span, .. }
1668            | Attribute::Unstable { span, .. }
1669            | Attribute::Deprecated { span, .. } => *span,
1670        }
1671    }
1672}
1673
1674fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1675    let id = parse_id(tokens)?;
1676    if id.name != expected {
1677        bail!(Error::new(
1678            id.span,
1679            format!("expected `{expected}`, found `{}`", id.name),
1680        ));
1681    }
1682    Ok(id.span)
1683}
1684
1685/// A listing of source files which are used to get parsed into an
1686/// [`UnresolvedPackage`].
1687///
1688/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1689#[derive(Clone, Default, Debug)]
1690pub struct SourceMap {
1691    sources: Vec<Source>,
1692    offset: u32,
1693}
1694
1695#[derive(Clone, Debug)]
1696struct Source {
1697    offset: u32,
1698    path: String,
1699    contents: String,
1700}
1701
1702impl SourceMap {
1703    /// Creates a new empty source map.
1704    pub fn new() -> SourceMap {
1705        SourceMap::default()
1706    }
1707
1708    /// Reads the file `path` on the filesystem and appends its contents to this
1709    /// [`SourceMap`].
1710    #[cfg(feature = "std")]
1711    pub fn push_file(&mut self, path: &Path) -> Result<()> {
1712        let contents = std::fs::read_to_string(path)
1713            .with_context(|| format!("failed to read file {path:?}"))?;
1714        self.push(path, contents);
1715        Ok(())
1716    }
1717
1718    /// Appends the given contents with the given path into this source map.
1719    ///
1720    /// The `path` provided is not read from the filesystem and is instead only
1721    /// used during error messages. Each file added to a [`SourceMap`] is
1722    /// used to create the final parsed package namely by unioning all the
1723    /// interfaces and worlds defined together. Note that each file has its own
1724    /// personal namespace, however, for top-level `use` and such.
1725    #[cfg(feature = "std")]
1726    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1727        self.push_str(&path.display().to_string(), contents);
1728    }
1729
1730    /// Appends the given contents with the given source name into this source map.
1731    ///
1732    /// The `path` provided is not read from the filesystem and is instead only
1733    /// used during error messages. Each file added to a [`SourceMap`] is
1734    /// used to create the final parsed package namely by unioning all the
1735    /// interfaces and worlds defined together. Note that each file has its own
1736    /// personal namespace, however, for top-level `use` and such.
1737    pub fn push_str(&mut self, path: &str, contents: impl Into<String>) {
1738        let mut contents = contents.into();
1739        // Guarantee that there's at least one character in these contents by
1740        // appending a single newline to the end. This is excluded from
1741        // tokenization below so it's only here to ensure that spans which point
1742        // one byte beyond the end of a file (eof) point to the same original
1743        // file.
1744        contents.push('\n');
1745        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1746        self.sources.push(Source {
1747            offset: self.offset,
1748            path: path.to_string(),
1749            contents,
1750        });
1751        self.offset = new_offset;
1752    }
1753
1754    /// Appends all sources from another `SourceMap` into this one.
1755    ///
1756    /// Returns the byte offset that should be added to all `Span.start` and
1757    /// `Span.end` values from the appended source map to make them valid
1758    /// in the combined source map.
1759    pub fn append(&mut self, other: SourceMap) -> u32 {
1760        let base = self.offset;
1761        for mut source in other.sources {
1762            source.offset += base;
1763            self.sources.push(source);
1764        }
1765        self.offset += other.offset;
1766        base
1767    }
1768
1769    /// Parses the files added to this source map into a
1770    /// [`UnresolvedPackageGroup`].
1771    pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1772        let mut nested = Vec::new();
1773        let main = self.rewrite_error(|| {
1774            let mut resolver = Resolver::default();
1775            let mut srcs = self.sources.iter().collect::<Vec<_>>();
1776            srcs.sort_by_key(|src| &src.path);
1777
1778            // Parse each source file individually. A tokenizer is created here
1779            // form settings and then `PackageFile` is used to parse the whole
1780            // stream of tokens.
1781            for src in srcs {
1782                let mut tokens = Tokenizer::new(
1783                    // chop off the forcibly appended `\n` character when
1784                    // passing through the source to get tokenized.
1785                    &src.contents[..src.contents.len() - 1],
1786                    src.offset,
1787                )
1788                .with_context(|| format!("failed to tokenize path: {}", src.path))?;
1789                let mut file = PackageFile::parse(&mut tokens)?;
1790
1791                // Filter out any nested packages and resolve them separately.
1792                // Nested packages have only a single "file" so only one item
1793                // is pushed into a `Resolver`. Note that a nested `Resolver`
1794                // is used here, not the outer one.
1795                //
1796                // Note that filtering out `Package` items is required due to
1797                // how the implementation of disallowing nested packages in
1798                // nested packages currently works.
1799                for item in mem::take(&mut file.decl_list.items) {
1800                    match item {
1801                        AstItem::Package(nested_pkg) => {
1802                            let mut resolve = Resolver::default();
1803                            resolve.push(nested_pkg).with_context(|| {
1804                                format!("failed to handle nested package in: {}", src.path)
1805                            })?;
1806
1807                            nested.push(resolve.resolve()?);
1808                        }
1809                        other => file.decl_list.items.push(other),
1810                    }
1811                }
1812
1813                // With nested packages handled push this file into the
1814                // resolver.
1815                resolver
1816                    .push(file)
1817                    .with_context(|| format!("failed to start resolving path: {}", src.path))?;
1818            }
1819            Ok(resolver.resolve()?)
1820        })?;
1821        Ok(UnresolvedPackageGroup {
1822            main,
1823            nested,
1824            source_map: self,
1825        })
1826    }
1827
1828    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1829    where
1830        F: FnOnce() -> Result<T>,
1831    {
1832        let mut err = match f() {
1833            Ok(t) => return Ok(t),
1834            Err(e) => e,
1835        };
1836        if let Some(parse) = err.downcast_mut::<Error>() {
1837            parse.highlight(self);
1838            return Err(err);
1839        }
1840        if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1841            notfound.highlight(self);
1842            return Err(err);
1843        }
1844
1845        if let Some(lex) = err.downcast_ref::<lex::Error>() {
1846            let pos = match lex {
1847                lex::Error::Unexpected(at, _)
1848                | lex::Error::UnterminatedComment(at)
1849                | lex::Error::Wanted { at, .. }
1850                | lex::Error::InvalidCharInId(at, _)
1851                | lex::Error::IdPartEmpty(at)
1852                | lex::Error::InvalidEscape(at, _) => *at,
1853            };
1854            let msg = self.highlight_err(pos, None, lex);
1855            bail!("{msg}")
1856        }
1857
1858        if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1859            sort.highlight(self);
1860        }
1861
1862        Err(err)
1863    }
1864
1865    pub(crate) fn highlight_span(&self, span: Span, err: impl fmt::Display) -> Option<String> {
1866        if !span.is_known() {
1867            return None;
1868        }
1869        Some(self.highlight_err(span.start(), Some(span.end()), err))
1870    }
1871
1872    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1873        let src = self.source_for_offset(start);
1874        let start = src.to_relative_offset(start);
1875        let end = end.map(|end| src.to_relative_offset(end));
1876        let (line, col) = src.linecol(start);
1877        let snippet = src.contents.lines().nth(line).unwrap_or("");
1878        let line = line + 1;
1879        let col = col + 1;
1880
1881        // If the snippet is too large then don't overload output on a terminal
1882        // for example and instead just print the error. This also sidesteps
1883        // Rust's restriction that `>0$` below has to be less than `u16::MAX`.
1884        if snippet.len() > 500 {
1885            return format!("{}:{line}:{col}: {err}", src.path);
1886        }
1887        let mut msg = format!(
1888            "\
1889{err}
1890     --> {file}:{line}:{col}
1891      |
1892 {line:4} | {snippet}
1893      | {marker:>0$}",
1894            col,
1895            file = src.path,
1896            marker = "^",
1897        );
1898        if let Some(end) = end {
1899            if let Some(s) = src.contents.get(start..end) {
1900                for _ in s.chars().skip(1) {
1901                    msg.push('-');
1902                }
1903            }
1904        }
1905        return msg;
1906    }
1907
1908    /// Renders a span as a human-readable location string (e.g., "file.wit:10:5").
1909    pub fn render_location(&self, span: Span) -> String {
1910        if !span.is_known() {
1911            return "<unknown>".to_string();
1912        }
1913        let start = span.start();
1914        let src = self.source_for_offset(start);
1915        let rel_start = src.to_relative_offset(start);
1916        let (line, col) = src.linecol(rel_start);
1917        format!(
1918            "{file}:{line}:{col}",
1919            file = src.path,
1920            line = line + 1,
1921            col = col + 1,
1922        )
1923    }
1924
1925    fn source_for_offset(&self, start: u32) -> &Source {
1926        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1927            Ok(i) => i,
1928            Err(i) => i - 1,
1929        };
1930        &self.sources[i]
1931    }
1932
1933    /// Returns an iterator over all filenames added to this source map.
1934    #[cfg(feature = "std")]
1935    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1936        self.sources.iter().map(|src| Path::new(&src.path))
1937    }
1938
1939    /// Returns an iterator over all source names added to this source map.
1940    pub fn source_names(&self) -> impl Iterator<Item = &str> {
1941        self.sources.iter().map(|src| src.path.as_str())
1942    }
1943}
1944
1945impl Source {
1946    fn to_relative_offset(&self, offset: u32) -> usize {
1947        usize::try_from(offset - self.offset).unwrap()
1948    }
1949
1950    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1951        let mut cur = 0;
1952        // Use split_terminator instead of lines so that if there is a `\r`,
1953        // it is included in the offset calculation. The `+1` values below
1954        // account for the `\n`.
1955        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1956            if cur + line.len() + 1 > relative_offset {
1957                return (i, relative_offset - cur);
1958            }
1959            cur += line.len() + 1;
1960        }
1961        (self.contents.lines().count(), 0)
1962    }
1963}
1964
1965pub enum ParsedUsePath {
1966    Name(String),
1967    Package(crate::PackageName, String),
1968}
1969
1970pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1971    let mut tokens = Tokenizer::new(s, 0)?;
1972    let path = UsePath::parse(&mut tokens)?;
1973    if tokens.next()?.is_some() {
1974        bail!("trailing tokens in path specifier");
1975    }
1976    Ok(match path {
1977        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1978        UsePath::Package { id, name } => {
1979            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1980        }
1981    })
1982}