wit_parser/
ast.rs

1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{Context, Result, bail};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18/// Representation of a single WIT `*.wit` file and nested packages.
19struct PackageFile<'a> {
20    /// Optional `package foo:bar;` header
21    package_id: Option<PackageName<'a>>,
22    /// Other AST items.
23    decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27    /// Parse a standalone file represented by `tokens`.
28    ///
29    /// This will optionally start with `package foo:bar;` and then will have a
30    /// list of ast items after it.
31    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32        let mut package_name_tokens_peek = tokens.clone();
33        let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35        // Parse `package foo:bar;` but throw it out if it's actually
36        // `package foo:bar { ... }` since that's an ast item instead.
37        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39            if package_name_tokens_peek.eat(Token::Semicolon)? {
40                *tokens = package_name_tokens_peek;
41                Some(name)
42            } else {
43                None
44            }
45        } else {
46            None
47        };
48        let decl_list = DeclList::parse_until(tokens, None)?;
49        Ok(PackageFile {
50            package_id,
51            decl_list,
52        })
53    }
54
55    /// Parse a nested package of the form `package foo:bar { ... }`
56    fn parse_nested(
57        tokens: &mut Tokenizer<'a>,
58        docs: Docs<'a>,
59        attributes: Vec<Attribute<'a>>,
60    ) -> Result<Self> {
61        let span = tokens.expect(Token::Package)?;
62        if !attributes.is_empty() {
63            bail!(Error::new(
64                span,
65                format!("cannot place attributes on nested packages"),
66            ));
67        }
68        let package_id = PackageName::parse(tokens, docs)?;
69        tokens.expect(Token::LeftBrace)?;
70        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71        Ok(PackageFile {
72            package_id: Some(package_id),
73            decl_list,
74        })
75    }
76}
77
78/// Stores all of the declarations in a package's scope. In AST terms, this
79/// means everything except the `package` declaration that demarcates a package
80/// scope. In the traditional implicit format, these are all of the declarations
81/// non-`package` declarations in the file:
82///
83/// ```wit
84/// package foo:name;
85///
86/// /* START DECL LIST */
87/// // Some comment...
88/// interface i {}
89/// world w {}
90/// /* END DECL LIST */
91/// ```
92///
93/// In the nested package style, a [`DeclList`] is everything inside of each
94/// `package` element's brackets:
95///
96/// ```wit
97/// package foo:name {
98///   /* START FIRST DECL LIST */
99///   // Some comment...
100///   interface i {}
101///   world w {}
102///   /* END FIRST DECL LIST */
103/// }
104///
105/// package bar:name {
106///   /* START SECOND DECL LIST */
107///   // Some comment...
108///   interface i {}
109///   world w {}
110///   /* END SECOND DECL LIST */
111/// }
112/// ```
113#[derive(Default)]
114pub struct DeclList<'a> {
115    items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120        let mut items = Vec::new();
121        let mut docs = parse_docs(tokens)?;
122        loop {
123            match end {
124                Some(end) => {
125                    if tokens.eat(end)? {
126                        break;
127                    }
128                }
129                None => {
130                    if tokens.clone().next()?.is_none() {
131                        break;
132                    }
133                }
134            }
135            items.push(AstItem::parse(tokens, docs)?);
136            docs = parse_docs(tokens)?;
137        }
138        Ok(DeclList { items })
139    }
140
141    fn for_each_path<'b>(
142        &'b self,
143        f: &mut dyn FnMut(
144            Option<&'b Id<'a>>,
145            &'b [Attribute<'a>],
146            &'b UsePath<'a>,
147            Option<&'b [UseName<'a>]>,
148            WorldOrInterface,
149        ) -> Result<()>,
150    ) -> Result<()> {
151        for item in self.items.iter() {
152            match item {
153                AstItem::World(world) => {
154                    // Visit imports here first before exports to help preserve
155                    // round-tripping of documents because printing a world puts
156                    // imports first but textually they can be listed with
157                    // exports first.
158                    let mut imports = Vec::new();
159                    let mut exports = Vec::new();
160                    for item in world.items.iter() {
161                        match item {
162                            WorldItem::Use(u) => f(
163                                None,
164                                &u.attributes,
165                                &u.from,
166                                Some(&u.names),
167                                WorldOrInterface::Interface,
168                            )?,
169                            WorldItem::Include(i) => f(
170                                Some(&world.name),
171                                &i.attributes,
172                                &i.from,
173                                None,
174                                WorldOrInterface::World,
175                            )?,
176                            WorldItem::Type(_) => {}
177                            WorldItem::Import(Import {
178                                kind, attributes, ..
179                            }) => imports.push((kind, attributes)),
180                            WorldItem::Export(Export {
181                                kind, attributes, ..
182                            }) => exports.push((kind, attributes)),
183                        }
184                    }
185
186                    let mut visit_kind =
187                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
188                            ExternKind::Interface(_, items) => {
189                                for item in items {
190                                    match item {
191                                        InterfaceItem::Use(u) => f(
192                                            None,
193                                            &u.attributes,
194                                            &u.from,
195                                            Some(&u.names),
196                                            WorldOrInterface::Interface,
197                                        )?,
198                                        _ => {}
199                                    }
200                                }
201                                Ok(())
202                            }
203                            ExternKind::Path(path) => {
204                                f(None, attrs, path, None, WorldOrInterface::Interface)
205                            }
206                            ExternKind::Func(..) => Ok(()),
207                        };
208
209                    for (kind, attrs) in imports {
210                        visit_kind(kind, attrs)?;
211                    }
212                    for (kind, attrs) in exports {
213                        visit_kind(kind, attrs)?;
214                    }
215                }
216                AstItem::Interface(i) => {
217                    for item in i.items.iter() {
218                        match item {
219                            InterfaceItem::Use(u) => f(
220                                Some(&i.name),
221                                &u.attributes,
222                                &u.from,
223                                Some(&u.names),
224                                WorldOrInterface::Interface,
225                            )?,
226                            _ => {}
227                        }
228                    }
229                }
230                AstItem::Use(u) => {
231                    // At the top-level, we don't know if this is a world or an interface
232                    // It is up to the resolver to decides how to handle this ambiguity.
233                    f(
234                        None,
235                        &u.attributes,
236                        &u.item,
237                        None,
238                        WorldOrInterface::Unknown,
239                    )?;
240                }
241
242                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
243            }
244        }
245        Ok(())
246    }
247}
248
249enum AstItem<'a> {
250    Interface(Interface<'a>),
251    World(World<'a>),
252    Use(ToplevelUse<'a>),
253    Package(PackageFile<'a>),
254}
255
256impl<'a> AstItem<'a> {
257    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
258        let attributes = Attribute::parse_list(tokens)?;
259        match tokens.clone().next()? {
260            Some((_span, Token::Interface)) => {
261                Interface::parse(tokens, docs, attributes).map(Self::Interface)
262            }
263            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
264            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
265            Some((_span, Token::Package)) => {
266                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
267            }
268            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
269        }
270    }
271}
272
273#[derive(Debug, Clone)]
274struct PackageName<'a> {
275    docs: Docs<'a>,
276    span: Span,
277    namespace: Id<'a>,
278    name: Id<'a>,
279    version: Option<(Span, Version)>,
280}
281
282impl<'a> PackageName<'a> {
283    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
284        let namespace = parse_id(tokens)?;
285        tokens.expect(Token::Colon)?;
286        let name = parse_id(tokens)?;
287        let version = parse_opt_version(tokens)?;
288        Ok(PackageName {
289            docs,
290            span: Span {
291                start: namespace.span.start,
292                end: version
293                    .as_ref()
294                    .map(|(s, _)| s.end)
295                    .unwrap_or(name.span.end),
296            },
297            namespace,
298            name,
299            version,
300        })
301    }
302
303    fn package_name(&self) -> crate::PackageName {
304        crate::PackageName {
305            namespace: self.namespace.name.to_string(),
306            name: self.name.name.to_string(),
307            version: self.version.as_ref().map(|(_, v)| v.clone()),
308        }
309    }
310}
311
312struct ToplevelUse<'a> {
313    span: Span,
314    attributes: Vec<Attribute<'a>>,
315    item: UsePath<'a>,
316    as_: Option<Id<'a>>,
317}
318
319impl<'a> ToplevelUse<'a> {
320    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
321        let span = tokens.expect(Token::Use)?;
322        let item = UsePath::parse(tokens)?;
323        let as_ = if tokens.eat(Token::As)? {
324            Some(parse_id(tokens)?)
325        } else {
326            None
327        };
328        tokens.expect_semicolon()?;
329        Ok(ToplevelUse {
330            span,
331            attributes,
332            item,
333            as_,
334        })
335    }
336}
337
338struct World<'a> {
339    docs: Docs<'a>,
340    attributes: Vec<Attribute<'a>>,
341    name: Id<'a>,
342    items: Vec<WorldItem<'a>>,
343}
344
345impl<'a> World<'a> {
346    fn parse(
347        tokens: &mut Tokenizer<'a>,
348        docs: Docs<'a>,
349        attributes: Vec<Attribute<'a>>,
350    ) -> Result<Self> {
351        tokens.expect(Token::World)?;
352        let name = parse_id(tokens)?;
353        let items = Self::parse_items(tokens)?;
354        Ok(World {
355            docs,
356            attributes,
357            name,
358            items,
359        })
360    }
361
362    fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
363        tokens.expect(Token::LeftBrace)?;
364        let mut items = Vec::new();
365        loop {
366            let docs = parse_docs(tokens)?;
367            if tokens.eat(Token::RightBrace)? {
368                break;
369            }
370            let attributes = Attribute::parse_list(tokens)?;
371            items.push(WorldItem::parse(tokens, docs, attributes)?);
372        }
373        Ok(items)
374    }
375}
376
377enum WorldItem<'a> {
378    Import(Import<'a>),
379    Export(Export<'a>),
380    Use(Use<'a>),
381    Type(TypeDef<'a>),
382    Include(Include<'a>),
383}
384
385impl<'a> WorldItem<'a> {
386    fn parse(
387        tokens: &mut Tokenizer<'a>,
388        docs: Docs<'a>,
389        attributes: Vec<Attribute<'a>>,
390    ) -> Result<WorldItem<'a>> {
391        match tokens.clone().next()? {
392            Some((_span, Token::Import)) => {
393                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
394            }
395            Some((_span, Token::Export)) => {
396                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
397            }
398            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
399            Some((_span, Token::Type)) => {
400                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
401            }
402            Some((_span, Token::Flags)) => {
403                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
404            }
405            Some((_span, Token::Resource)) => {
406                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
407            }
408            Some((_span, Token::Record)) => {
409                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
410            }
411            Some((_span, Token::Variant)) => {
412                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
413            }
414            Some((_span, Token::Enum)) => {
415                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
416            }
417            Some((_span, Token::Include)) => {
418                Include::parse(tokens, attributes).map(WorldItem::Include)
419            }
420            other => Err(err_expected(
421                tokens,
422                "`import`, `export`, `include`, `use`, or type definition",
423                other,
424            )
425            .into()),
426        }
427    }
428}
429
430struct Import<'a> {
431    docs: Docs<'a>,
432    attributes: Vec<Attribute<'a>>,
433    kind: ExternKind<'a>,
434}
435
436impl<'a> Import<'a> {
437    fn parse(
438        tokens: &mut Tokenizer<'a>,
439        docs: Docs<'a>,
440        attributes: Vec<Attribute<'a>>,
441    ) -> Result<Import<'a>> {
442        tokens.expect(Token::Import)?;
443        let kind = ExternKind::parse(tokens)?;
444        Ok(Import {
445            docs,
446            attributes,
447            kind,
448        })
449    }
450}
451
452struct Export<'a> {
453    docs: Docs<'a>,
454    attributes: Vec<Attribute<'a>>,
455    kind: ExternKind<'a>,
456}
457
458impl<'a> Export<'a> {
459    fn parse(
460        tokens: &mut Tokenizer<'a>,
461        docs: Docs<'a>,
462        attributes: Vec<Attribute<'a>>,
463    ) -> Result<Export<'a>> {
464        tokens.expect(Token::Export)?;
465        let kind = ExternKind::parse(tokens)?;
466        Ok(Export {
467            docs,
468            attributes,
469            kind,
470        })
471    }
472}
473
474enum ExternKind<'a> {
475    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
476    Path(UsePath<'a>),
477    Func(Id<'a>, Func<'a>),
478}
479
480impl<'a> ExternKind<'a> {
481    fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
482        // Create a copy of the token stream to test out if this is a function
483        // or an interface import. In those situations the token stream gets
484        // reset to the state of the clone and we continue down those paths.
485        //
486        // If neither a function nor an interface appears here though then the
487        // clone is thrown away and the original token stream is parsed for an
488        // interface. This will redo the original ID parse and the original
489        // colon parse, but that shouldn't be too bad perf-wise.
490        let mut clone = tokens.clone();
491        let id = parse_id(&mut clone)?;
492        if clone.eat(Token::Colon)? {
493            // import foo: async? func(...)
494            if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
495                *tokens = clone;
496                let ret = ExternKind::Func(id, Func::parse(tokens)?);
497                tokens.expect_semicolon()?;
498                return Ok(ret);
499            }
500
501            // import foo: interface { ... }
502            if clone.eat(Token::Interface)? {
503                *tokens = clone;
504                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
505            }
506        }
507
508        // import foo
509        // import foo/bar
510        // import foo:bar/baz
511        let ret = ExternKind::Path(UsePath::parse(tokens)?);
512        tokens.expect_semicolon()?;
513        Ok(ret)
514    }
515
516    fn span(&self) -> Span {
517        match self {
518            ExternKind::Interface(id, _) => id.span,
519            ExternKind::Path(UsePath::Id(id)) => id.span,
520            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
521            ExternKind::Func(id, _) => id.span,
522        }
523    }
524}
525
526struct Interface<'a> {
527    docs: Docs<'a>,
528    attributes: Vec<Attribute<'a>>,
529    name: Id<'a>,
530    items: Vec<InterfaceItem<'a>>,
531}
532
533impl<'a> Interface<'a> {
534    fn parse(
535        tokens: &mut Tokenizer<'a>,
536        docs: Docs<'a>,
537        attributes: Vec<Attribute<'a>>,
538    ) -> Result<Self> {
539        tokens.expect(Token::Interface)?;
540        let name = parse_id(tokens)?;
541        let items = Self::parse_items(tokens)?;
542        Ok(Interface {
543            docs,
544            attributes,
545            name,
546            items,
547        })
548    }
549
550    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
551        tokens.expect(Token::LeftBrace)?;
552        let mut items = Vec::new();
553        loop {
554            let docs = parse_docs(tokens)?;
555            if tokens.eat(Token::RightBrace)? {
556                break;
557            }
558            let attributes = Attribute::parse_list(tokens)?;
559            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
560        }
561        Ok(items)
562    }
563}
564
565#[derive(Debug)]
566pub enum WorldOrInterface {
567    World,
568    Interface,
569    Unknown,
570}
571
572enum InterfaceItem<'a> {
573    TypeDef(TypeDef<'a>),
574    Func(NamedFunc<'a>),
575    Use(Use<'a>),
576}
577
578struct Use<'a> {
579    attributes: Vec<Attribute<'a>>,
580    from: UsePath<'a>,
581    names: Vec<UseName<'a>>,
582}
583
584#[derive(Debug)]
585enum UsePath<'a> {
586    Id(Id<'a>),
587    Package { id: PackageName<'a>, name: Id<'a> },
588}
589
590impl<'a> UsePath<'a> {
591    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
592        let id = parse_id(tokens)?;
593        if tokens.eat(Token::Colon)? {
594            // `foo:bar/baz@1.0`
595            let namespace = id;
596            let pkg_name = parse_id(tokens)?;
597            tokens.expect(Token::Slash)?;
598            let name = parse_id(tokens)?;
599            let version = parse_opt_version(tokens)?;
600            Ok(UsePath::Package {
601                id: PackageName {
602                    docs: Default::default(),
603                    span: Span {
604                        start: namespace.span.start,
605                        end: pkg_name.span.end,
606                    },
607                    namespace,
608                    name: pkg_name,
609                    version,
610                },
611                name,
612            })
613        } else {
614            // `foo`
615            Ok(UsePath::Id(id))
616        }
617    }
618
619    fn name(&self) -> &Id<'a> {
620        match self {
621            UsePath::Id(id) => id,
622            UsePath::Package { name, .. } => name,
623        }
624    }
625}
626
627struct UseName<'a> {
628    name: Id<'a>,
629    as_: Option<Id<'a>>,
630}
631
632impl<'a> Use<'a> {
633    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
634        tokens.expect(Token::Use)?;
635        let from = UsePath::parse(tokens)?;
636        tokens.expect(Token::Period)?;
637        tokens.expect(Token::LeftBrace)?;
638
639        let mut names = Vec::new();
640        while !tokens.eat(Token::RightBrace)? {
641            let mut name = UseName {
642                name: parse_id(tokens)?,
643                as_: None,
644            };
645            if tokens.eat(Token::As)? {
646                name.as_ = Some(parse_id(tokens)?);
647            }
648            names.push(name);
649            if !tokens.eat(Token::Comma)? {
650                tokens.expect(Token::RightBrace)?;
651                break;
652            }
653        }
654        tokens.expect_semicolon()?;
655        Ok(Use {
656            attributes,
657            from,
658            names,
659        })
660    }
661}
662
663struct Include<'a> {
664    from: UsePath<'a>,
665    attributes: Vec<Attribute<'a>>,
666    names: Vec<IncludeName<'a>>,
667}
668
669struct IncludeName<'a> {
670    name: Id<'a>,
671    as_: Id<'a>,
672}
673
674impl<'a> Include<'a> {
675    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
676        tokens.expect(Token::Include)?;
677        let from = UsePath::parse(tokens)?;
678
679        let names = if tokens.eat(Token::With)? {
680            parse_list(
681                tokens,
682                Token::LeftBrace,
683                Token::RightBrace,
684                |_docs, tokens| {
685                    let name = parse_id(tokens)?;
686                    tokens.expect(Token::As)?;
687                    let as_ = parse_id(tokens)?;
688                    Ok(IncludeName { name, as_ })
689                },
690            )?
691        } else {
692            tokens.expect_semicolon()?;
693            Vec::new()
694        };
695
696        Ok(Include {
697            attributes,
698            from,
699            names,
700        })
701    }
702}
703
704#[derive(Debug, Clone)]
705pub struct Id<'a> {
706    name: &'a str,
707    span: Span,
708}
709
710impl<'a> From<&'a str> for Id<'a> {
711    fn from(s: &'a str) -> Id<'a> {
712        Id {
713            name: s.into(),
714            span: Span { start: 0, end: 0 },
715        }
716    }
717}
718
719#[derive(Debug, Clone)]
720pub struct Docs<'a> {
721    docs: Vec<Cow<'a, str>>,
722    span: Span,
723}
724
725impl<'a> Default for Docs<'a> {
726    fn default() -> Self {
727        Self {
728            docs: Default::default(),
729            span: Span { start: 0, end: 0 },
730        }
731    }
732}
733
734struct TypeDef<'a> {
735    docs: Docs<'a>,
736    attributes: Vec<Attribute<'a>>,
737    name: Id<'a>,
738    ty: Type<'a>,
739}
740
741enum Type<'a> {
742    Bool(Span),
743    U8(Span),
744    U16(Span),
745    U32(Span),
746    U64(Span),
747    S8(Span),
748    S16(Span),
749    S32(Span),
750    S64(Span),
751    F32(Span),
752    F64(Span),
753    Char(Span),
754    String(Span),
755    Name(Id<'a>),
756    List(List<'a>),
757    FixedSizeList(FixedSizeList<'a>),
758    Handle(Handle<'a>),
759    Resource(Resource<'a>),
760    Record(Record<'a>),
761    Flags(Flags<'a>),
762    Variant(Variant<'a>),
763    Tuple(Tuple<'a>),
764    Enum(Enum<'a>),
765    Option(Option_<'a>),
766    Result(Result_<'a>),
767    Future(Future<'a>),
768    Stream(Stream<'a>),
769    ErrorContext(Span),
770}
771
772enum Handle<'a> {
773    Own { resource: Id<'a> },
774    Borrow { resource: Id<'a> },
775}
776
777impl Handle<'_> {
778    fn span(&self) -> Span {
779        match self {
780            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
781        }
782    }
783}
784
785struct Resource<'a> {
786    span: Span,
787    funcs: Vec<ResourceFunc<'a>>,
788}
789
790enum ResourceFunc<'a> {
791    Method(NamedFunc<'a>),
792    Static(NamedFunc<'a>),
793    Constructor(NamedFunc<'a>),
794}
795
796impl<'a> ResourceFunc<'a> {
797    fn parse(
798        docs: Docs<'a>,
799        attributes: Vec<Attribute<'a>>,
800        tokens: &mut Tokenizer<'a>,
801    ) -> Result<Self> {
802        match tokens.clone().next()? {
803            Some((span, Token::Constructor)) => {
804                tokens.expect(Token::Constructor)?;
805                tokens.expect(Token::LeftParen)?;
806                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
807                    let name = parse_id(tokens)?;
808                    tokens.expect(Token::Colon)?;
809                    let ty = Type::parse(tokens)?;
810                    Ok((name, ty))
811                })?;
812                let result = if tokens.eat(Token::RArrow)? {
813                    let ty = Type::parse(tokens)?;
814                    Some(ty)
815                } else {
816                    None
817                };
818                tokens.expect_semicolon()?;
819                Ok(ResourceFunc::Constructor(NamedFunc {
820                    docs,
821                    attributes,
822                    name: Id {
823                        span,
824                        name: "constructor",
825                    },
826                    func: Func {
827                        span,
828                        async_: false,
829                        params,
830                        result,
831                    },
832                }))
833            }
834            Some((_span, Token::Id | Token::ExplicitId)) => {
835                let name = parse_id(tokens)?;
836                tokens.expect(Token::Colon)?;
837                let ctor = if tokens.eat(Token::Static)? {
838                    ResourceFunc::Static
839                } else {
840                    ResourceFunc::Method
841                };
842                let func = Func::parse(tokens)?;
843                tokens.expect_semicolon()?;
844                Ok(ctor(NamedFunc {
845                    docs,
846                    attributes,
847                    name,
848                    func,
849                }))
850            }
851            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
852        }
853    }
854
855    fn named_func(&self) -> &NamedFunc<'a> {
856        use ResourceFunc::*;
857        match self {
858            Method(f) | Static(f) | Constructor(f) => f,
859        }
860    }
861}
862
863struct Record<'a> {
864    span: Span,
865    fields: Vec<Field<'a>>,
866}
867
868struct Field<'a> {
869    docs: Docs<'a>,
870    name: Id<'a>,
871    ty: Type<'a>,
872}
873
874struct Flags<'a> {
875    span: Span,
876    flags: Vec<Flag<'a>>,
877}
878
879struct Flag<'a> {
880    docs: Docs<'a>,
881    name: Id<'a>,
882}
883
884struct Variant<'a> {
885    span: Span,
886    cases: Vec<Case<'a>>,
887}
888
889struct Case<'a> {
890    docs: Docs<'a>,
891    name: Id<'a>,
892    ty: Option<Type<'a>>,
893}
894
895struct Enum<'a> {
896    span: Span,
897    cases: Vec<EnumCase<'a>>,
898}
899
900struct EnumCase<'a> {
901    docs: Docs<'a>,
902    name: Id<'a>,
903}
904
905struct Option_<'a> {
906    span: Span,
907    ty: Box<Type<'a>>,
908}
909
910struct List<'a> {
911    span: Span,
912    ty: Box<Type<'a>>,
913}
914
915struct FixedSizeList<'a> {
916    span: Span,
917    ty: Box<Type<'a>>,
918    size: u32,
919}
920
921struct Future<'a> {
922    span: Span,
923    ty: Option<Box<Type<'a>>>,
924}
925
926struct Tuple<'a> {
927    span: Span,
928    types: Vec<Type<'a>>,
929}
930
931struct Result_<'a> {
932    span: Span,
933    ok: Option<Box<Type<'a>>>,
934    err: Option<Box<Type<'a>>>,
935}
936
937struct Stream<'a> {
938    span: Span,
939    ty: Option<Box<Type<'a>>>,
940}
941
942struct NamedFunc<'a> {
943    docs: Docs<'a>,
944    attributes: Vec<Attribute<'a>>,
945    name: Id<'a>,
946    func: Func<'a>,
947}
948
949type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
950
951struct Func<'a> {
952    span: Span,
953    async_: bool,
954    params: ParamList<'a>,
955    result: Option<Type<'a>>,
956}
957
958impl<'a> Func<'a> {
959    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
960        fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
961            if left_paren {
962                tokens.expect(Token::LeftParen)?;
963            };
964            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
965                let name = parse_id(tokens)?;
966                tokens.expect(Token::Colon)?;
967                let ty = Type::parse(tokens)?;
968                Ok((name, ty))
969            })
970        }
971
972        let async_ = tokens.eat(Token::Async)?;
973        let span = tokens.expect(Token::Func)?;
974        let params = parse_params(tokens, true)?;
975        let result = if tokens.eat(Token::RArrow)? {
976            let ty = Type::parse(tokens)?;
977            Some(ty)
978        } else {
979            None
980        };
981        Ok(Func {
982            span,
983            async_,
984            params,
985            result,
986        })
987    }
988}
989
990impl<'a> InterfaceItem<'a> {
991    fn parse(
992        tokens: &mut Tokenizer<'a>,
993        docs: Docs<'a>,
994        attributes: Vec<Attribute<'a>>,
995    ) -> Result<InterfaceItem<'a>> {
996        match tokens.clone().next()? {
997            Some((_span, Token::Type)) => {
998                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
999            }
1000            Some((_span, Token::Flags)) => {
1001                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1002            }
1003            Some((_span, Token::Enum)) => {
1004                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1005            }
1006            Some((_span, Token::Variant)) => {
1007                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1008            }
1009            Some((_span, Token::Resource)) => {
1010                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1011            }
1012            Some((_span, Token::Record)) => {
1013                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1014            }
1015            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1016                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1017            }
1018            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1019            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1020        }
1021    }
1022}
1023
1024impl<'a> TypeDef<'a> {
1025    fn parse(
1026        tokens: &mut Tokenizer<'a>,
1027        docs: Docs<'a>,
1028        attributes: Vec<Attribute<'a>>,
1029    ) -> Result<Self> {
1030        tokens.expect(Token::Type)?;
1031        let name = parse_id(tokens)?;
1032        tokens.expect(Token::Equals)?;
1033        let ty = Type::parse(tokens)?;
1034        tokens.expect_semicolon()?;
1035        Ok(TypeDef {
1036            docs,
1037            attributes,
1038            name,
1039            ty,
1040        })
1041    }
1042
1043    fn parse_flags(
1044        tokens: &mut Tokenizer<'a>,
1045        docs: Docs<'a>,
1046        attributes: Vec<Attribute<'a>>,
1047    ) -> Result<Self> {
1048        tokens.expect(Token::Flags)?;
1049        let name = parse_id(tokens)?;
1050        let ty = Type::Flags(Flags {
1051            span: name.span,
1052            flags: parse_list(
1053                tokens,
1054                Token::LeftBrace,
1055                Token::RightBrace,
1056                |docs, tokens| {
1057                    let name = parse_id(tokens)?;
1058                    Ok(Flag { docs, name })
1059                },
1060            )?,
1061        });
1062        Ok(TypeDef {
1063            docs,
1064            attributes,
1065            name,
1066            ty,
1067        })
1068    }
1069
1070    fn parse_resource(
1071        tokens: &mut Tokenizer<'a>,
1072        docs: Docs<'a>,
1073        attributes: Vec<Attribute<'a>>,
1074    ) -> Result<Self> {
1075        tokens.expect(Token::Resource)?;
1076        let name = parse_id(tokens)?;
1077        let mut funcs = Vec::new();
1078        if tokens.eat(Token::LeftBrace)? {
1079            while !tokens.eat(Token::RightBrace)? {
1080                let docs = parse_docs(tokens)?;
1081                let attributes = Attribute::parse_list(tokens)?;
1082                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1083            }
1084        } else {
1085            tokens.expect_semicolon()?;
1086        }
1087        let ty = Type::Resource(Resource {
1088            span: name.span,
1089            funcs,
1090        });
1091        Ok(TypeDef {
1092            docs,
1093            attributes,
1094            name,
1095            ty,
1096        })
1097    }
1098
1099    fn parse_record(
1100        tokens: &mut Tokenizer<'a>,
1101        docs: Docs<'a>,
1102        attributes: Vec<Attribute<'a>>,
1103    ) -> Result<Self> {
1104        tokens.expect(Token::Record)?;
1105        let name = parse_id(tokens)?;
1106        let ty = Type::Record(Record {
1107            span: name.span,
1108            fields: parse_list(
1109                tokens,
1110                Token::LeftBrace,
1111                Token::RightBrace,
1112                |docs, tokens| {
1113                    let name = parse_id(tokens)?;
1114                    tokens.expect(Token::Colon)?;
1115                    let ty = Type::parse(tokens)?;
1116                    Ok(Field { docs, name, ty })
1117                },
1118            )?,
1119        });
1120        Ok(TypeDef {
1121            docs,
1122            attributes,
1123            name,
1124            ty,
1125        })
1126    }
1127
1128    fn parse_variant(
1129        tokens: &mut Tokenizer<'a>,
1130        docs: Docs<'a>,
1131        attributes: Vec<Attribute<'a>>,
1132    ) -> Result<Self> {
1133        tokens.expect(Token::Variant)?;
1134        let name = parse_id(tokens)?;
1135        let ty = Type::Variant(Variant {
1136            span: name.span,
1137            cases: parse_list(
1138                tokens,
1139                Token::LeftBrace,
1140                Token::RightBrace,
1141                |docs, tokens| {
1142                    let name = parse_id(tokens)?;
1143                    let ty = if tokens.eat(Token::LeftParen)? {
1144                        let ty = Type::parse(tokens)?;
1145                        tokens.expect(Token::RightParen)?;
1146                        Some(ty)
1147                    } else {
1148                        None
1149                    };
1150                    Ok(Case { docs, name, ty })
1151                },
1152            )?,
1153        });
1154        Ok(TypeDef {
1155            docs,
1156            attributes,
1157            name,
1158            ty,
1159        })
1160    }
1161
1162    fn parse_enum(
1163        tokens: &mut Tokenizer<'a>,
1164        docs: Docs<'a>,
1165        attributes: Vec<Attribute<'a>>,
1166    ) -> Result<Self> {
1167        tokens.expect(Token::Enum)?;
1168        let name = parse_id(tokens)?;
1169        let ty = Type::Enum(Enum {
1170            span: name.span,
1171            cases: parse_list(
1172                tokens,
1173                Token::LeftBrace,
1174                Token::RightBrace,
1175                |docs, tokens| {
1176                    let name = parse_id(tokens)?;
1177                    Ok(EnumCase { docs, name })
1178                },
1179            )?,
1180        });
1181        Ok(TypeDef {
1182            docs,
1183            attributes,
1184            name,
1185            ty,
1186        })
1187    }
1188}
1189
1190impl<'a> NamedFunc<'a> {
1191    fn parse(
1192        tokens: &mut Tokenizer<'a>,
1193        docs: Docs<'a>,
1194        attributes: Vec<Attribute<'a>>,
1195    ) -> Result<Self> {
1196        let name = parse_id(tokens)?;
1197        tokens.expect(Token::Colon)?;
1198        let func = Func::parse(tokens)?;
1199        tokens.expect_semicolon()?;
1200        Ok(NamedFunc {
1201            docs,
1202            attributes,
1203            name,
1204            func,
1205        })
1206    }
1207}
1208
1209fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1210    match tokens.next()? {
1211        Some((span, Token::Id)) => Ok(Id {
1212            name: tokens.parse_id(span)?,
1213            span,
1214        }),
1215        Some((span, Token::ExplicitId)) => Ok(Id {
1216            name: tokens.parse_explicit_id(span)?,
1217            span,
1218        }),
1219        other => Err(err_expected(tokens, "an identifier or string", other).into()),
1220    }
1221}
1222
1223fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1224    if tokens.eat(Token::At)? {
1225        parse_version(tokens).map(Some)
1226    } else {
1227        Ok(None)
1228    }
1229}
1230
1231fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1232    let start = tokens.expect(Token::Integer)?.start;
1233    tokens.expect(Token::Period)?;
1234    tokens.expect(Token::Integer)?;
1235    tokens.expect(Token::Period)?;
1236    let end = tokens.expect(Token::Integer)?.end;
1237    let mut span = Span { start, end };
1238    eat_ids(tokens, Token::Minus, &mut span)?;
1239    eat_ids(tokens, Token::Plus, &mut span)?;
1240    let string = tokens.get_span(span);
1241    let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1242    return Ok((span, version));
1243
1244    // According to `semver.org` this is what we're parsing:
1245    //
1246    // ```ebnf
1247    // <pre-release> ::= <dot-separated pre-release identifiers>
1248    //
1249    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1250    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1251    //
1252    // <build> ::= <dot-separated build identifiers>
1253    //
1254    // <dot-separated build identifiers> ::= <build identifier>
1255    //                                     | <build identifier> "." <dot-separated build identifiers>
1256    //
1257    // <pre-release identifier> ::= <alphanumeric identifier>
1258    //                            | <numeric identifier>
1259    //
1260    // <build identifier> ::= <alphanumeric identifier>
1261    //                      | <digits>
1262    //
1263    // <alphanumeric identifier> ::= <non-digit>
1264    //                             | <non-digit> <identifier characters>
1265    //                             | <identifier characters> <non-digit>
1266    //                             | <identifier characters> <non-digit> <identifier characters>
1267    //
1268    // <numeric identifier> ::= "0"
1269    //                        | <positive digit>
1270    //                        | <positive digit> <digits>
1271    //
1272    // <identifier characters> ::= <identifier character>
1273    //                           | <identifier character> <identifier characters>
1274    //
1275    // <identifier character> ::= <digit>
1276    //                          | <non-digit>
1277    //
1278    // <non-digit> ::= <letter>
1279    //               | "-"
1280    //
1281    // <digits> ::= <digit>
1282    //            | <digit> <digits>
1283    // ```
1284    //
1285    // This is loosely based on WIT syntax and an approximation is parsed here:
1286    //
1287    // * This function starts by parsing the optional leading `-` and `+` which
1288    //   indicates pre-release and build metadata.
1289    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1290    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1291    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1292    //   where the `.` is part of WIT syntax, not semver.
1293    //
1294    // Note that this additionally doesn't try to return any first-class errors.
1295    // Instead this bails out on something unrecognized for something else in
1296    // the system to return an error.
1297    fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1298        if !tokens.eat(prefix)? {
1299            return Ok(());
1300        }
1301        loop {
1302            let mut clone = tokens.clone();
1303            match clone.next()? {
1304                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1305                    end.end = span.end;
1306                    *tokens = clone;
1307                }
1308                Some((_span, Token::Period)) => match clone.next()? {
1309                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1310                        end.end = span.end;
1311                        *tokens = clone;
1312                    }
1313                    _ => break Ok(()),
1314                },
1315                _ => break Ok(()),
1316            }
1317        }
1318    }
1319}
1320
1321fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1322    let mut docs = Docs::default();
1323    let mut clone = tokens.clone();
1324    let mut started = false;
1325    while let Some((span, token)) = clone.next_raw()? {
1326        match token {
1327            Token::Whitespace => {}
1328            Token::Comment => {
1329                let comment = tokens.get_span(span);
1330                if !started {
1331                    docs.span.start = span.start;
1332                    started = true;
1333                }
1334                let trailing_ws = comment
1335                    .bytes()
1336                    .rev()
1337                    .take_while(|ch| ch.is_ascii_whitespace())
1338                    .count();
1339                docs.span.end = span.end - (trailing_ws as u32);
1340                docs.docs.push(comment.into());
1341            }
1342            _ => break,
1343        };
1344        *tokens = clone.clone();
1345    }
1346    Ok(docs)
1347}
1348
1349impl<'a> Type<'a> {
1350    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1351        match tokens.next()? {
1352            Some((span, Token::U8)) => Ok(Type::U8(span)),
1353            Some((span, Token::U16)) => Ok(Type::U16(span)),
1354            Some((span, Token::U32)) => Ok(Type::U32(span)),
1355            Some((span, Token::U64)) => Ok(Type::U64(span)),
1356            Some((span, Token::S8)) => Ok(Type::S8(span)),
1357            Some((span, Token::S16)) => Ok(Type::S16(span)),
1358            Some((span, Token::S32)) => Ok(Type::S32(span)),
1359            Some((span, Token::S64)) => Ok(Type::S64(span)),
1360            Some((span, Token::F32)) => Ok(Type::F32(span)),
1361            Some((span, Token::F64)) => Ok(Type::F64(span)),
1362            Some((span, Token::Char)) => Ok(Type::Char(span)),
1363
1364            // tuple<T, U, ...>
1365            Some((span, Token::Tuple)) => {
1366                let types = parse_list(
1367                    tokens,
1368                    Token::LessThan,
1369                    Token::GreaterThan,
1370                    |_docs, tokens| Type::parse(tokens),
1371                )?;
1372                Ok(Type::Tuple(Tuple { span, types }))
1373            }
1374
1375            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1376            Some((span, Token::String_)) => Ok(Type::String(span)),
1377
1378            // list<T>
1379            // list<T, N>
1380            Some((span, Token::List)) => {
1381                tokens.expect(Token::LessThan)?;
1382                let ty = Type::parse(tokens)?;
1383                let size = if tokens.eat(Token::Comma)? {
1384                    let number = tokens.next()?;
1385                    if let Some((span, Token::Integer)) = number {
1386                        let size: u32 = tokens.get_span(span).parse()?;
1387                        Some(size)
1388                    } else {
1389                        return Err(err_expected(tokens, "fixed size", number).into());
1390                    }
1391                } else {
1392                    None
1393                };
1394                tokens.expect(Token::GreaterThan)?;
1395                if let Some(size) = size {
1396                    Ok(Type::FixedSizeList(FixedSizeList {
1397                        span,
1398                        ty: Box::new(ty),
1399                        size,
1400                    }))
1401                } else {
1402                    Ok(Type::List(List {
1403                        span,
1404                        ty: Box::new(ty),
1405                    }))
1406                }
1407            }
1408
1409            // option<T>
1410            Some((span, Token::Option_)) => {
1411                tokens.expect(Token::LessThan)?;
1412                let ty = Type::parse(tokens)?;
1413                tokens.expect(Token::GreaterThan)?;
1414                Ok(Type::Option(Option_ {
1415                    span,
1416                    ty: Box::new(ty),
1417                }))
1418            }
1419
1420            // result<T, E>
1421            // result<_, E>
1422            // result<T>
1423            // result
1424            Some((span, Token::Result_)) => {
1425                let mut ok = None;
1426                let mut err = None;
1427
1428                if tokens.eat(Token::LessThan)? {
1429                    if tokens.eat(Token::Underscore)? {
1430                        tokens.expect(Token::Comma)?;
1431                        err = Some(Box::new(Type::parse(tokens)?));
1432                    } else {
1433                        ok = Some(Box::new(Type::parse(tokens)?));
1434                        if tokens.eat(Token::Comma)? {
1435                            err = Some(Box::new(Type::parse(tokens)?));
1436                        }
1437                    };
1438                    tokens.expect(Token::GreaterThan)?;
1439                };
1440                Ok(Type::Result(Result_ { span, ok, err }))
1441            }
1442
1443            // future<T>
1444            // future
1445            Some((span, Token::Future)) => {
1446                let mut ty = None;
1447
1448                if tokens.eat(Token::LessThan)? {
1449                    ty = Some(Box::new(Type::parse(tokens)?));
1450                    tokens.expect(Token::GreaterThan)?;
1451                };
1452                Ok(Type::Future(Future { span, ty }))
1453            }
1454
1455            // stream<T>
1456            // stream
1457            Some((span, Token::Stream)) => {
1458                let mut ty = None;
1459
1460                if tokens.eat(Token::LessThan)? {
1461                    ty = Some(Box::new(Type::parse(tokens)?));
1462                    tokens.expect(Token::GreaterThan)?;
1463                };
1464                Ok(Type::Stream(Stream { span, ty }))
1465            }
1466
1467            // error-context
1468            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1469
1470            // own<T>
1471            Some((_span, Token::Own)) => {
1472                tokens.expect(Token::LessThan)?;
1473                let resource = parse_id(tokens)?;
1474                tokens.expect(Token::GreaterThan)?;
1475                Ok(Type::Handle(Handle::Own { resource }))
1476            }
1477
1478            // borrow<T>
1479            Some((_span, Token::Borrow)) => {
1480                tokens.expect(Token::LessThan)?;
1481                let resource = parse_id(tokens)?;
1482                tokens.expect(Token::GreaterThan)?;
1483                Ok(Type::Handle(Handle::Borrow { resource }))
1484            }
1485
1486            // `foo`
1487            Some((span, Token::Id)) => Ok(Type::Name(Id {
1488                name: tokens.parse_id(span)?.into(),
1489                span,
1490            })),
1491            // `%foo`
1492            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1493                name: tokens.parse_explicit_id(span)?.into(),
1494                span,
1495            })),
1496
1497            other => Err(err_expected(tokens, "a type", other).into()),
1498        }
1499    }
1500
1501    fn span(&self) -> Span {
1502        match self {
1503            Type::Bool(span)
1504            | Type::U8(span)
1505            | Type::U16(span)
1506            | Type::U32(span)
1507            | Type::U64(span)
1508            | Type::S8(span)
1509            | Type::S16(span)
1510            | Type::S32(span)
1511            | Type::S64(span)
1512            | Type::F32(span)
1513            | Type::F64(span)
1514            | Type::Char(span)
1515            | Type::String(span)
1516            | Type::ErrorContext(span) => *span,
1517            Type::Name(id) => id.span,
1518            Type::List(l) => l.span,
1519            Type::FixedSizeList(l) => l.span,
1520            Type::Handle(h) => h.span(),
1521            Type::Resource(r) => r.span,
1522            Type::Record(r) => r.span,
1523            Type::Flags(f) => f.span,
1524            Type::Variant(v) => v.span,
1525            Type::Tuple(t) => t.span,
1526            Type::Enum(e) => e.span,
1527            Type::Option(o) => o.span,
1528            Type::Result(r) => r.span,
1529            Type::Future(f) => f.span,
1530            Type::Stream(s) => s.span,
1531        }
1532    }
1533}
1534
1535fn parse_list<'a, T>(
1536    tokens: &mut Tokenizer<'a>,
1537    start: Token,
1538    end: Token,
1539    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1540) -> Result<Vec<T>> {
1541    tokens.expect(start)?;
1542    parse_list_trailer(tokens, end, parse)
1543}
1544
1545fn parse_list_trailer<'a, T>(
1546    tokens: &mut Tokenizer<'a>,
1547    end: Token,
1548    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1549) -> Result<Vec<T>> {
1550    let mut items = Vec::new();
1551    loop {
1552        // get docs before we skip them to try to eat the end token
1553        let docs = parse_docs(tokens)?;
1554
1555        // if we found an end token then we're done
1556        if tokens.eat(end)? {
1557            break;
1558        }
1559
1560        let item = parse(docs, tokens)?;
1561        items.push(item);
1562
1563        // if there's no trailing comma then this is required to be the end,
1564        // otherwise we go through the loop to try to get another item
1565        if !tokens.eat(Token::Comma)? {
1566            tokens.expect(end)?;
1567            break;
1568        }
1569    }
1570    Ok(items)
1571}
1572
1573fn err_expected(
1574    tokens: &Tokenizer<'_>,
1575    expected: &'static str,
1576    found: Option<(Span, Token)>,
1577) -> Error {
1578    match found {
1579        Some((span, token)) => Error::new(
1580            span,
1581            format!("expected {}, found {}", expected, token.describe()),
1582        ),
1583        None => Error::new(tokens.eof_span(), format!("expected {expected}, found eof")),
1584    }
1585}
1586
1587enum Attribute<'a> {
1588    Since { span: Span, version: Version },
1589    Unstable { span: Span, feature: Id<'a> },
1590    Deprecated { span: Span, version: Version },
1591}
1592
1593impl<'a> Attribute<'a> {
1594    fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1595        let mut ret = Vec::new();
1596        while tokens.eat(Token::At)? {
1597            let id = parse_id(tokens)?;
1598            let attr = match id.name {
1599                "since" => {
1600                    tokens.expect(Token::LeftParen)?;
1601                    eat_id(tokens, "version")?;
1602                    tokens.expect(Token::Equals)?;
1603                    let (_span, version) = parse_version(tokens)?;
1604                    tokens.expect(Token::RightParen)?;
1605                    Attribute::Since {
1606                        span: id.span,
1607                        version,
1608                    }
1609                }
1610                "unstable" => {
1611                    tokens.expect(Token::LeftParen)?;
1612                    eat_id(tokens, "feature")?;
1613                    tokens.expect(Token::Equals)?;
1614                    let feature = parse_id(tokens)?;
1615                    tokens.expect(Token::RightParen)?;
1616                    Attribute::Unstable {
1617                        span: id.span,
1618                        feature,
1619                    }
1620                }
1621                "deprecated" => {
1622                    tokens.expect(Token::LeftParen)?;
1623                    eat_id(tokens, "version")?;
1624                    tokens.expect(Token::Equals)?;
1625                    let (_span, version) = parse_version(tokens)?;
1626                    tokens.expect(Token::RightParen)?;
1627                    Attribute::Deprecated {
1628                        span: id.span,
1629                        version,
1630                    }
1631                }
1632                other => {
1633                    bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1634                }
1635            };
1636            ret.push(attr);
1637        }
1638        Ok(ret)
1639    }
1640
1641    fn span(&self) -> Span {
1642        match self {
1643            Attribute::Since { span, .. }
1644            | Attribute::Unstable { span, .. }
1645            | Attribute::Deprecated { span, .. } => *span,
1646        }
1647    }
1648}
1649
1650fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1651    let id = parse_id(tokens)?;
1652    if id.name != expected {
1653        bail!(Error::new(
1654            id.span,
1655            format!("expected `{expected}`, found `{}`", id.name),
1656        ));
1657    }
1658    Ok(id.span)
1659}
1660
1661/// A listing of source files which are used to get parsed into an
1662/// [`UnresolvedPackage`].
1663///
1664/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1665#[derive(Clone, Default)]
1666pub struct SourceMap {
1667    sources: Vec<Source>,
1668    offset: u32,
1669    require_f32_f64: Option<bool>,
1670}
1671
1672#[derive(Clone)]
1673struct Source {
1674    offset: u32,
1675    path: PathBuf,
1676    contents: String,
1677}
1678
1679impl SourceMap {
1680    /// Creates a new empty source map.
1681    pub fn new() -> SourceMap {
1682        SourceMap::default()
1683    }
1684
1685    #[doc(hidden)] // NB: only here for a transitionary period
1686    pub fn set_require_f32_f64(&mut self, enable: bool) {
1687        self.require_f32_f64 = Some(enable);
1688    }
1689
1690    /// Reads the file `path` on the filesystem and appends its contents to this
1691    /// [`SourceMap`].
1692    pub fn push_file(&mut self, path: &Path) -> Result<()> {
1693        let contents = std::fs::read_to_string(path)
1694            .with_context(|| format!("failed to read file {path:?}"))?;
1695        self.push(path, contents);
1696        Ok(())
1697    }
1698
1699    /// Appends the given contents with the given path into this source map.
1700    ///
1701    /// The `path` provided is not read from the filesystem and is instead only
1702    /// used during error messages. Each file added to a [`SourceMap`] is
1703    /// used to create the final parsed package namely by unioning all the
1704    /// interfaces and worlds defined together. Note that each file has its own
1705    /// personal namespace, however, for top-level `use` and such.
1706    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1707        let mut contents = contents.into();
1708        // Guarantee that there's at least one character in these contents by
1709        // appending a single newline to the end. This is excluded from
1710        // tokenization below so it's only here to ensure that spans which point
1711        // one byte beyond the end of a file (eof) point to the same original
1712        // file.
1713        contents.push('\n');
1714        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1715        self.sources.push(Source {
1716            offset: self.offset,
1717            path: path.to_path_buf(),
1718            contents,
1719        });
1720        self.offset = new_offset;
1721    }
1722
1723    /// Parses the files added to this source map into a
1724    /// [`UnresolvedPackageGroup`].
1725    pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1726        let mut nested = Vec::new();
1727        let main = self.rewrite_error(|| {
1728            let mut resolver = Resolver::default();
1729            let mut srcs = self.sources.iter().collect::<Vec<_>>();
1730            srcs.sort_by_key(|src| &src.path);
1731
1732            // Parse each source file individually. A tokenizer is created here
1733            // form settings and then `PackageFile` is used to parse the whole
1734            // stream of tokens.
1735            for src in srcs {
1736                let mut tokens = Tokenizer::new(
1737                    // chop off the forcibly appended `\n` character when
1738                    // passing through the source to get tokenized.
1739                    &src.contents[..src.contents.len() - 1],
1740                    src.offset,
1741                    self.require_f32_f64,
1742                )
1743                .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1744                let mut file = PackageFile::parse(&mut tokens)?;
1745
1746                // Filter out any nested packages and resolve them separately.
1747                // Nested packages have only a single "file" so only one item
1748                // is pushed into a `Resolver`. Note that a nested `Resolver`
1749                // is used here, not the outer one.
1750                //
1751                // Note that filtering out `Package` items is required due to
1752                // how the implementation of disallowing nested packages in
1753                // nested packages currently works.
1754                for item in mem::take(&mut file.decl_list.items) {
1755                    match item {
1756                        AstItem::Package(nested_pkg) => {
1757                            let mut resolve = Resolver::default();
1758                            resolve.push(nested_pkg).with_context(|| {
1759                                format!(
1760                                    "failed to handle nested package in: {}",
1761                                    src.path.display()
1762                                )
1763                            })?;
1764
1765                            nested.push(resolve.resolve()?);
1766                        }
1767                        other => file.decl_list.items.push(other),
1768                    }
1769                }
1770
1771                // With nested packages handled push this file into the
1772                // resolver.
1773                resolver.push(file).with_context(|| {
1774                    format!("failed to start resolving path: {}", src.path.display())
1775                })?;
1776            }
1777            Ok(resolver.resolve()?)
1778        })?;
1779        Ok(UnresolvedPackageGroup {
1780            main,
1781            nested,
1782            source_map: self,
1783        })
1784    }
1785
1786    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1787    where
1788        F: FnOnce() -> Result<T>,
1789    {
1790        let mut err = match f() {
1791            Ok(t) => return Ok(t),
1792            Err(e) => e,
1793        };
1794        if let Some(parse) = err.downcast_mut::<Error>() {
1795            if parse.highlighted.is_none() {
1796                let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1797                parse.highlighted = Some(msg);
1798            }
1799        }
1800        if let Some(_) = err.downcast_mut::<Error>() {
1801            return Err(err);
1802        }
1803        if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1804            if notfound.highlighted.is_none() {
1805                let msg = self.highlight_err(
1806                    notfound.span.start,
1807                    Some(notfound.span.end),
1808                    &format!("{notfound}"),
1809                );
1810                notfound.highlighted = Some(msg);
1811            }
1812        }
1813        if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1814            return Err(err);
1815        }
1816
1817        if let Some(lex) = err.downcast_ref::<lex::Error>() {
1818            let pos = match lex {
1819                lex::Error::Unexpected(at, _)
1820                | lex::Error::UnterminatedComment(at)
1821                | lex::Error::Wanted { at, .. }
1822                | lex::Error::InvalidCharInId(at, _)
1823                | lex::Error::IdPartEmpty(at)
1824                | lex::Error::InvalidEscape(at, _) => *at,
1825            };
1826            let msg = self.highlight_err(pos, None, lex);
1827            bail!("{msg}")
1828        }
1829
1830        if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1831            if sort.highlighted().is_none() {
1832                let span = match sort {
1833                    toposort::Error::NonexistentDep { span, .. }
1834                    | toposort::Error::Cycle { span, .. } => *span,
1835                };
1836                let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1837                sort.set_highlighted(highlighted);
1838            }
1839        }
1840
1841        Err(err)
1842    }
1843
1844    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1845        let src = self.source_for_offset(start);
1846        let start = src.to_relative_offset(start);
1847        let end = end.map(|end| src.to_relative_offset(end));
1848        let (line, col) = src.linecol(start);
1849        let snippet = src.contents.lines().nth(line).unwrap_or("");
1850        let mut msg = format!(
1851            "\
1852{err}
1853     --> {file}:{line}:{col}
1854      |
1855 {line:4} | {snippet}
1856      | {marker:>0$}",
1857            col + 1,
1858            file = src.path.display(),
1859            line = line + 1,
1860            col = col + 1,
1861            marker = "^",
1862        );
1863        if let Some(end) = end {
1864            if let Some(s) = src.contents.get(start..end) {
1865                for _ in s.chars().skip(1) {
1866                    msg.push('-');
1867                }
1868            }
1869        }
1870        return msg;
1871    }
1872
1873    pub(crate) fn render_location(&self, span: Span) -> String {
1874        let src = self.source_for_offset(span.start);
1875        let start = src.to_relative_offset(span.start);
1876        let (line, col) = src.linecol(start);
1877        format!(
1878            "{file}:{line}:{col}",
1879            file = src.path.display(),
1880            line = line + 1,
1881            col = col + 1,
1882        )
1883    }
1884
1885    fn source_for_offset(&self, start: u32) -> &Source {
1886        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1887            Ok(i) => i,
1888            Err(i) => i - 1,
1889        };
1890        &self.sources[i]
1891    }
1892
1893    /// Returns an iterator over all filenames added to this source map.
1894    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1895        self.sources.iter().map(|src| src.path.as_path())
1896    }
1897}
1898
1899impl Source {
1900    fn to_relative_offset(&self, offset: u32) -> usize {
1901        usize::try_from(offset - self.offset).unwrap()
1902    }
1903
1904    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1905        let mut cur = 0;
1906        // Use split_terminator instead of lines so that if there is a `\r`,
1907        // it is included in the offset calculation. The `+1` values below
1908        // account for the `\n`.
1909        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1910            if cur + line.len() + 1 > relative_offset {
1911                return (i, relative_offset - cur);
1912            }
1913            cur += line.len() + 1;
1914        }
1915        (self.contents.lines().count(), 0)
1916    }
1917}
1918
1919pub enum ParsedUsePath {
1920    Name(String),
1921    Package(crate::PackageName, String),
1922}
1923
1924pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1925    let mut tokens = Tokenizer::new(s, 0, None)?;
1926    let path = UsePath::parse(&mut tokens)?;
1927    if tokens.next()?.is_some() {
1928        bail!("trailing tokens in path specifier");
1929    }
1930    Ok(match path {
1931        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1932        UsePath::Package { id, name } => {
1933            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1934        }
1935    })
1936}